Compare commits
2 Commits
fix-linux-
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1be0cf0fdf | ||
|
|
f60cb4090b |
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -5,13 +5,3 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
# Requirements to build documentation
|
|
||||||
- package-ecosystem: "pip"
|
|
||||||
directory: "/lib/spack/docs"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
# Requirements to run style checks
|
|
||||||
- package-ecosystem: "pip"
|
|
||||||
directory: "/.github/workflows/style"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
|
|||||||
15
.github/workflows/audit.yaml
vendored
15
.github/workflows/audit.yaml
vendored
@@ -17,13 +17,10 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run audits on all the packages in the built-in repository
|
# Run audits on all the packages in the built-in repository
|
||||||
package-audits:
|
package-audits:
|
||||||
runs-on: ${{ matrix.operating_system }}
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -34,7 +31,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage run $(which spack) audit externals
|
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
@@ -42,8 +38,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
$(which spack) audit packages
|
||||||
$(which spack) audit externals
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,linux,audits
|
||||||
|
|||||||
26
.github/workflows/bootstrap.yml
vendored
26
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -62,7 +62,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -99,7 +99,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -158,7 +158,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison@2.7 tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -179,11 +179,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -ex
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
echo "Testing $ver_dir"
|
||||||
@@ -204,7 +204,7 @@ jobs:
|
|||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Setup repo
|
||||||
@@ -214,7 +214,7 @@ jobs:
|
|||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -ex
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
echo "Testing $ver_dir"
|
||||||
@@ -247,7 +247,7 @@ jobs:
|
|||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -283,7 +283,7 @@ jobs:
|
|||||||
make patch unzip xz-utils python3 python3-dev tree \
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
gawk
|
gawk
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup non-root user
|
- name: Setup non-root user
|
||||||
@@ -316,7 +316,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -333,7 +333,7 @@ jobs:
|
|||||||
# Remove GnuPG since we want to bootstrap it
|
# Remove GnuPG since we want to bootstrap it
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
|||||||
16
.github/workflows/build-containers.yml
vendored
16
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
|||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
|
|
||||||
- name: Set Container Tag Normal (Nightly)
|
- name: Set Container Tag Normal (Nightly)
|
||||||
run: |
|
run: |
|
||||||
@@ -86,19 +86,19 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -106,13 +106,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
|
|||||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|||||||
31
.github/workflows/nightly-win-builds.yml
vendored
31
.github/workflows/nightly-win-builds.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
name: Windows Paraview Nightly
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 2 * * *' # Run at 2 am
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-paraview-deps:
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
|
||||||
- name: Build Test
|
|
||||||
run: |
|
|
||||||
spack compiler find
|
|
||||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
|
||||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
|
||||||
exit 0
|
|
||||||
7
.github/workflows/style/requirements.txt
vendored
7
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +0,0 @@
|
|||||||
black==23.9.1
|
|
||||||
clingo==5.6.2
|
|
||||||
flake8==6.1.0
|
|
||||||
isort==5.12.0
|
|
||||||
mypy==1.5.1
|
|
||||||
types-six==1.16.21.9
|
|
||||||
vermin==1.5.2
|
|
||||||
27
.github/workflows/unit_tests.yaml
vendored
27
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -87,17 +87,17 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -118,7 +118,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
|
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -152,10 +152,10 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -165,7 +165,6 @@ jobs:
|
|||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
@@ -177,7 +176,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
# Run unit tests on MacOS
|
# Run unit tests on MacOS
|
||||||
@@ -187,10 +186,10 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.10"]
|
python-version: ["3.10"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -212,6 +211,6 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
|||||||
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
pip install --upgrade pip
|
||||||
pip install -r .github/workflows/style/requirements.txt
|
pip install --upgrade vermin
|
||||||
- name: vermin (Spack's Core)
|
- name: vermin (Spack's Core)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
@@ -35,17 +35,16 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||||
pip install -r .github/workflows/style/requirements.txt
|
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
@@ -69,7 +68,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -82,7 +81,6 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack debug report
|
|
||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack style -t black
|
spack style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
|
|||||||
19
.github/workflows/windows_python.yml
vendored
19
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -33,16 +33,16 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -57,16 +57,16 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -75,5 +75,6 @@ jobs:
|
|||||||
- name: Build Test
|
- name: Build Test
|
||||||
run: |
|
run: |
|
||||||
spack compiler find
|
spack compiler find
|
||||||
spack -d external find cmake ninja
|
spack external find cmake
|
||||||
|
spack external find ninja
|
||||||
spack -d install abseil-cpp
|
spack -d install abseil-cpp
|
||||||
|
|||||||
@@ -1,16 +1,10 @@
|
|||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
build:
|
|
||||||
os: "ubuntu-22.04"
|
|
||||||
apt_packages:
|
|
||||||
- graphviz
|
|
||||||
tools:
|
|
||||||
python: "3.11"
|
|
||||||
|
|
||||||
sphinx:
|
sphinx:
|
||||||
configuration: lib/spack/docs/conf.py
|
configuration: lib/spack/docs/conf.py
|
||||||
fail_on_warning: true
|
fail_on_warning: true
|
||||||
|
|
||||||
python:
|
python:
|
||||||
|
version: 3.7
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
|||||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,21 +1,3 @@
|
|||||||
# v0.20.1 (2023-07-10)
|
|
||||||
|
|
||||||
## Spack Bugfixes
|
|
||||||
|
|
||||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
|
||||||
- Speed-up module file generation (#37739)
|
|
||||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
|
||||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
|
||||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
|
||||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
|
||||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
|
||||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
|
||||||
- Fixed compiler removal from command line (#38057)
|
|
||||||
- Show external status as [e] (#33792)
|
|
||||||
- Backported `archspec` fixes (#37793)
|
|
||||||
- Improved a few error messages (#37791)
|
|
||||||
|
|
||||||
|
|
||||||
# v0.20.0 (2023-05-21)
|
# v0.20.0 (2023-05-21)
|
||||||
|
|
||||||
`v0.20.0` is a major feature release.
|
`v0.20.0` is a major feature release.
|
||||||
|
|||||||
54
CITATION.cff
54
CITATION.cff
@@ -27,53 +27,12 @@
|
|||||||
# And here's the CITATION.cff format:
|
# And here's the CITATION.cff format:
|
||||||
#
|
#
|
||||||
cff-version: 1.2.0
|
cff-version: 1.2.0
|
||||||
type: software
|
|
||||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
|
||||||
abstract: >-
|
|
||||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
|
||||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
|
||||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
|
||||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
|
||||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
|
||||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
|
||||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
|
||||||
preferred-citation:
|
preferred-citation:
|
||||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
|
||||||
type: conference-paper
|
type: conference-paper
|
||||||
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
|
doi: "10.1145/2807591.2807623"
|
||||||
|
url: "https://github.com/spack/spack"
|
||||||
authors:
|
authors:
|
||||||
- family-names: "Gamblin"
|
|
||||||
given-names: "Todd"
|
|
||||||
- family-names: "LeGendre"
|
|
||||||
given-names: "Matthew"
|
|
||||||
- family-names: "Collette"
|
|
||||||
given-names: "Michael R."
|
|
||||||
- family-names: "Lee"
|
|
||||||
given-names: "Gregory L."
|
|
||||||
- family-names: "Moody"
|
|
||||||
given-names: "Adam"
|
|
||||||
- family-names: "de Supinski"
|
|
||||||
given-names: "Bronis R."
|
|
||||||
- family-names: "Futral"
|
|
||||||
given-names: "Scott"
|
|
||||||
conference:
|
|
||||||
name: "Supercomputing 2015 (SC’15)"
|
|
||||||
city: "Austin"
|
|
||||||
region: "Texas"
|
|
||||||
country: "US"
|
|
||||||
date-start: 2015-11-15
|
|
||||||
date-end: 2015-11-20
|
|
||||||
month: 11
|
|
||||||
year: 2015
|
|
||||||
identifiers:
|
|
||||||
- description: "The concept DOI of the work."
|
|
||||||
type: doi
|
|
||||||
value: 10.1145/2807591.2807623
|
|
||||||
- description: "The DOE Document Release Number of the work"
|
|
||||||
type: other
|
|
||||||
value: "LLNL-CONF-669890"
|
|
||||||
authors:
|
|
||||||
- family-names: "Gamblin"
|
- family-names: "Gamblin"
|
||||||
given-names: "Todd"
|
given-names: "Todd"
|
||||||
- family-names: "LeGendre"
|
- family-names: "LeGendre"
|
||||||
@@ -88,3 +47,12 @@ authors:
|
|||||||
given-names: "Bronis R."
|
given-names: "Bronis R."
|
||||||
- family-names: "Futral"
|
- family-names: "Futral"
|
||||||
given-names: "Scott"
|
given-names: "Scott"
|
||||||
|
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||||
|
conference:
|
||||||
|
name: "Supercomputing 2015 (SC’15)"
|
||||||
|
city: "Austin"
|
||||||
|
region: "Texas"
|
||||||
|
country: "USA"
|
||||||
|
month: November 15-20
|
||||||
|
year: 2015
|
||||||
|
notes: LLNL-CONF-669890
|
||||||
|
|||||||
32
SECURITY.md
32
SECURITY.md
@@ -2,26 +2,24 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
We provide security updates for `develop` and for the last two
|
We provide security updates for the following releases.
|
||||||
stable (`0.x`) release series of Spack. Security updates will be
|
|
||||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
|
||||||
|
|
||||||
For more on Spack's release structure, see
|
For more on Spack's release structure, see
|
||||||
[`README.md`](https://github.com/spack/spack#releases).
|
[`README.md`](https://github.com/spack/spack#releases).
|
||||||
|
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| develop | :white_check_mark: |
|
||||||
|
| 0.19.x | :white_check_mark: |
|
||||||
|
| 0.18.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
You can report a vulnerability using GitHub's private reporting
|
To report a vulnerability or other security
|
||||||
feature:
|
issue, email maintainers@spack.io.
|
||||||
|
|
||||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
You can expect to hear back within two days.
|
||||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
If your security issue is accepted, we will do
|
||||||
3. Fill out the form and submit your draft security advisory.
|
our best to release a fix within a week. If
|
||||||
|
fixing the issue will take longer than this,
|
||||||
More details are available in
|
we will discuss timeline options with you.
|
||||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
|
||||||
|
|
||||||
You can expect to hear back about security issues within two days.
|
|
||||||
If your security issue is accepted, we will do our best to release
|
|
||||||
a fix within a week. If fixing the issue will take longer than
|
|
||||||
this, we will discuss timeline options with you.
|
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ exit 1
|
|||||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
::
|
::
|
||||||
@echo off
|
@echo off
|
||||||
|
|
||||||
set spack="%SPACK_ROOT%"\bin\spack
|
set spack=%SPACK_ROOT%\bin\spack
|
||||||
|
|
||||||
::#######################################################################
|
::#######################################################################
|
||||||
:: This is a wrapper around the spack command that forwards calls to
|
:: This is a wrapper around the spack command that forwards calls to
|
||||||
@@ -51,43 +51,65 @@ setlocal enabledelayedexpansion
|
|||||||
:: subcommands will never start with '-'
|
:: subcommands will never start with '-'
|
||||||
:: everything after the subcommand is an arg
|
:: everything after the subcommand is an arg
|
||||||
|
|
||||||
|
:: we cannot allow batch "for" loop to directly process CL args
|
||||||
|
:: a number of batch reserved characters are commonly passed to
|
||||||
|
:: spack and allowing batch's "for" method to process the raw inputs
|
||||||
|
:: results in a large number of formatting issues
|
||||||
|
:: instead, treat the entire CLI as one string
|
||||||
|
:: and split by space manually
|
||||||
|
:: capture cl args in variable named cl_args
|
||||||
|
set cl_args=%*
|
||||||
:process_cl_args
|
:process_cl_args
|
||||||
rem Set first cl argument (denoted by %1) to be processed
|
rem tokens=1* returns the first processed token produced
|
||||||
set t=%1
|
rem by tokenizing the input string cl_args on spaces into
|
||||||
rem shift moves all cl positional arguments left by one
|
rem the named variable %%g
|
||||||
rem meaning %2 is now %1, this allows us to iterate over each
|
rem While this make look like a for loop, it only
|
||||||
rem argument
|
rem executes a single time for each of the cl args
|
||||||
shift
|
rem the actual iterative loop is performed by the
|
||||||
rem assign next "first" cl argument to cl_args, will be null when
|
rem goto process_cl_args stanza
|
||||||
rem there are now further arguments to process
|
rem we are simply leveraging the "for" method's string
|
||||||
set cl_args=%1
|
rem tokenization
|
||||||
if "!t:~0,1!" == "-" (
|
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||||
if defined _sp_subcommand (
|
set t=%%~g
|
||||||
rem We already have a subcommand, processing args now
|
rem remainder of string is composed into %%h
|
||||||
|
rem these are the cl args yet to be processed
|
||||||
|
rem assign cl_args var to only the args to be processed
|
||||||
|
rem effectively discarding the current arg %%g
|
||||||
|
rem this will be nul when we have no further tokens to process
|
||||||
|
set cl_args=%%h
|
||||||
|
rem process the first space delineated cl arg
|
||||||
|
rem of this iteration
|
||||||
|
if "!t:~0,1!" == "-" (
|
||||||
|
if defined _sp_subcommand (
|
||||||
|
rem We already have a subcommand, processing args now
|
||||||
|
if not defined _sp_args (
|
||||||
|
set "_sp_args=!t!"
|
||||||
|
) else (
|
||||||
|
set "_sp_args=!_sp_args! !t!"
|
||||||
|
)
|
||||||
|
) else (
|
||||||
|
if not defined _sp_flags (
|
||||||
|
set "_sp_flags=!t!"
|
||||||
|
shift
|
||||||
|
) else (
|
||||||
|
set "_sp_flags=!_sp_flags! !t!"
|
||||||
|
shift
|
||||||
|
)
|
||||||
|
)
|
||||||
|
) else if not defined _sp_subcommand (
|
||||||
|
set "_sp_subcommand=!t!"
|
||||||
|
shift
|
||||||
|
) else (
|
||||||
if not defined _sp_args (
|
if not defined _sp_args (
|
||||||
set "_sp_args=!t!"
|
set "_sp_args=!t!"
|
||||||
|
shift
|
||||||
) else (
|
) else (
|
||||||
set "_sp_args=!_sp_args! !t!"
|
set "_sp_args=!_sp_args! !t!"
|
||||||
|
shift
|
||||||
)
|
)
|
||||||
) else (
|
|
||||||
if not defined _sp_flags (
|
|
||||||
set "_sp_flags=!t!"
|
|
||||||
) else (
|
|
||||||
set "_sp_flags=!_sp_flags! !t!"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
) else if not defined _sp_subcommand (
|
|
||||||
set "_sp_subcommand=!t!"
|
|
||||||
) else (
|
|
||||||
if not defined _sp_args (
|
|
||||||
set "_sp_args=!t!"
|
|
||||||
) else (
|
|
||||||
set "_sp_args=!_sp_args! !t!"
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
rem if this is not nil, we have more tokens to process
|
||||||
rem if this is not nu;ll, we have more tokens to process
|
|
||||||
rem start above process again with remaining unprocessed cl args
|
rem start above process again with remaining unprocessed cl args
|
||||||
if defined cl_args goto :process_cl_args
|
if defined cl_args goto :process_cl_args
|
||||||
|
|
||||||
@@ -192,7 +214,7 @@ goto :end_switch
|
|||||||
if defined _sp_args (
|
if defined _sp_args (
|
||||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
|
|||||||
146
bin/spack.ps1
146
bin/spack.ps1
@@ -1,146 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
# #######################################################################
|
|
||||||
|
|
||||||
function Compare-CommonArgs {
|
|
||||||
$CMDArgs = $args[0]
|
|
||||||
# These aruments take precedence and call for no futher parsing of arguments
|
|
||||||
# invoke actual Spack entrypoint with that context and exit after
|
|
||||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
|
||||||
$arg_opt = $_
|
|
||||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
|
||||||
return $true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return $false
|
|
||||||
}
|
|
||||||
|
|
||||||
function Read-SpackArgs {
|
|
||||||
$SpackCMD_params = @()
|
|
||||||
$SpackSubCommand = $NULL
|
|
||||||
$SpackSubCommandArgs = @()
|
|
||||||
$args_ = $args[0]
|
|
||||||
$args_ | ForEach-Object {
|
|
||||||
if (!$SpackSubCommand) {
|
|
||||||
if($_.SubString(0,1) -eq "-")
|
|
||||||
{
|
|
||||||
$SpackCMD_params += $_
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
$SpackSubCommand = $_
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
$SpackSubCommandArgs += $_
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
|
||||||
}
|
|
||||||
|
|
||||||
function Set-SpackEnv {
|
|
||||||
# This method is responsible
|
|
||||||
# for processing the return from $(spack <command>)
|
|
||||||
# which are returned as System.Object[]'s containing
|
|
||||||
# a list of env commands
|
|
||||||
# Invoke-Expression can only handle one command at a time
|
|
||||||
# so we iterate over the list to invoke the env modification
|
|
||||||
# expressions one at a time
|
|
||||||
foreach($envop in $args[0]){
|
|
||||||
Invoke-Expression $envop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function Invoke-SpackCD {
|
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
|
||||||
if (($NULL -ne $LOC)){
|
|
||||||
if ( Test-Path -Path $LOC){
|
|
||||||
Set-Location $LOC
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Invoke-SpackEnv {
|
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
|
||||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
|
||||||
switch ($SubCommandSubCommand) {
|
|
||||||
"activate" {
|
|
||||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
|
||||||
}
|
|
||||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
|
||||||
}
|
|
||||||
elseif (!$SubCommandSubCommandArgs) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
|
||||||
Set-SpackEnv $SpackEnv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"deactivate" {
|
|
||||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
|
||||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
|
||||||
}
|
|
||||||
elseif($SubCommandSubCommandArgs) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
|
||||||
Set-SpackEnv $SpackEnv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Invoke-SpackLoad {
|
|
||||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
|
||||||
}
|
|
||||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
|
||||||
Set-SpackEnv $SpackEnv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
|
||||||
|
|
||||||
if (Compare-CommonArgs $SpackCMD_params) {
|
|
||||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
|
||||||
exit $LASTEXITCODE
|
|
||||||
}
|
|
||||||
|
|
||||||
# Process Spack commands with special conditions
|
|
||||||
# all other commands are piped directly to Spack
|
|
||||||
switch($SpackSubCommand)
|
|
||||||
{
|
|
||||||
"cd" {Invoke-SpackCD}
|
|
||||||
"env" {Invoke-SpackEnv}
|
|
||||||
"load" {Invoke-SpackLoad}
|
|
||||||
"unload" {Invoke-SpackLoad}
|
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
|
||||||
}
|
|
||||||
@@ -36,9 +36,3 @@ concretizer:
|
|||||||
# on each root spec, allowing different versions and variants of the same package in
|
# on each root spec, allowing different versions and variants of the same package in
|
||||||
# an environment.
|
# an environment.
|
||||||
unify: true
|
unify: true
|
||||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
|
||||||
duplicates:
|
|
||||||
# "none": allows a single node for any package in the DAG.
|
|
||||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
|
||||||
strategy: minimal
|
|
||||||
|
|||||||
@@ -216,11 +216,10 @@ config:
|
|||||||
# manipulation by unprivileged user (e.g. AFS)
|
# manipulation by unprivileged user (e.g. AFS)
|
||||||
allow_sgid: true
|
allow_sgid: true
|
||||||
|
|
||||||
# Whether to show status information during building and installing packages.
|
# Whether to set the terminal title to display status information during
|
||||||
# This gives information about Spack's current progress as well as the current
|
# building and installing packages. This gives information about Spack's
|
||||||
# and total number of packages. Information is shown both in the terminal
|
# current progress as well as the current and total number of packages.
|
||||||
# title and inline.
|
terminal_title: false
|
||||||
install_status: true
|
|
||||||
|
|
||||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||||
|
|||||||
@@ -1,4 +1,2 @@
|
|||||||
mirrors:
|
mirrors:
|
||||||
spack-public:
|
spack-public: https://mirror.spack.io
|
||||||
binary: false
|
|
||||||
url: https://mirror.spack.io
|
|
||||||
|
|||||||
@@ -49,7 +49,6 @@ packages:
|
|||||||
pbs: [openpbs, torque]
|
pbs: [openpbs, torque]
|
||||||
pil: [py-pillow]
|
pil: [py-pillow]
|
||||||
pkgconfig: [pkgconf, pkg-config]
|
pkgconfig: [pkgconf, pkg-config]
|
||||||
qmake: [qt-base, qt]
|
|
||||||
rpc: [libtirpc]
|
rpc: [libtirpc]
|
||||||
scalapack: [netlib-scalapack, amdscalapack]
|
scalapack: [netlib-scalapack, amdscalapack]
|
||||||
sycl: [hipsycl]
|
sycl: [hipsycl]
|
||||||
@@ -60,7 +59,6 @@ packages:
|
|||||||
xxd: [xxd-standalone, vim]
|
xxd: [xxd-standalone, vim]
|
||||||
yacc: [bison, byacc]
|
yacc: [bison, byacc]
|
||||||
ziglang: [zig]
|
ziglang: [zig]
|
||||||
zlib-api: [zlib-ng+compat, zlib]
|
|
||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
|||||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
|
package_list.html
|
||||||
command_index.rst
|
command_index.rst
|
||||||
spack*.rst
|
spack*.rst
|
||||||
llnl*.rst
|
llnl*.rst
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
|
||||||
# We use our own extension of the default style with a few modifications
|
|
||||||
from pygments.styles.default import DefaultStyle
|
|
||||||
from pygments.token import Generic
|
|
||||||
|
|
||||||
|
|
||||||
class SpackStyle(DefaultStyle):
|
|
||||||
styles = DefaultStyle.styles.copy()
|
|
||||||
background_color = "#f4f4f8"
|
|
||||||
styles[Generic.Output] = "#355"
|
|
||||||
styles[Generic.Prompt] = "bold #346ec9"
|
|
||||||
@@ -45,8 +45,7 @@ Listing available packages
|
|||||||
|
|
||||||
To install software with Spack, you need to know what software is
|
To install software with Spack, you need to know what software is
|
||||||
available. You can see a list of available package names at the
|
available. You can see a list of available package names at the
|
||||||
`packages.spack.io <https://packages.spack.io>`_ website, or
|
:ref:`package-list` webpage, or using the ``spack list`` command.
|
||||||
using the ``spack list`` command.
|
|
||||||
|
|
||||||
.. _cmd-spack-list:
|
.. _cmd-spack-list:
|
||||||
|
|
||||||
@@ -61,7 +60,7 @@ can install:
|
|||||||
:ellipsis: 10
|
:ellipsis: 10
|
||||||
|
|
||||||
There are thousands of them, so we've truncated the output above, but you
|
There are thousands of them, so we've truncated the output above, but you
|
||||||
can find a `full list here <https://packages.spack.io>`_.
|
can find a :ref:`full list here <package-list>`.
|
||||||
Packages are listed by name in alphabetical order.
|
Packages are listed by name in alphabetical order.
|
||||||
A pattern to match with no wildcards, ``*`` or ``?``,
|
A pattern to match with no wildcards, ``*`` or ``?``,
|
||||||
will be treated as though it started and ended with
|
will be treated as though it started and ended with
|
||||||
|
|||||||
@@ -48,10 +48,14 @@ Here is an example where a build cache is created in a local directory named
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack buildcache push ./spack-cache ninja
|
$ spack buildcache push --allow-root ./spack-cache ninja
|
||||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||||
|
|
||||||
Note that ``ninja`` must be installed locally for this to work.
|
Not that ``ninja`` must be installed locally for this to work.
|
||||||
|
|
||||||
|
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||||
|
the binaries we're pushing contain references to the local Spack install
|
||||||
|
directory.
|
||||||
|
|
||||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||||
|
|
||||||
@@ -143,7 +147,7 @@ and then install from it exclusively, you would do:
|
|||||||
|
|
||||||
$ spack mirror add E4S https://cache.e4s.io
|
$ spack mirror add E4S https://cache.e4s.io
|
||||||
$ spack buildcache keys --install --trust
|
$ spack buildcache keys --install --trust
|
||||||
$ spack install --use-buildcache only <package>
|
$ spack install --use-buildache only <package>
|
||||||
|
|
||||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||||
keyring, and trusting all downloaded keys.
|
keyring, and trusting all downloaded keys.
|
||||||
|
|||||||
@@ -32,14 +32,9 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
|||||||
|
|
||||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||||
|
|
||||||
% echo $?
|
|
||||||
1
|
|
||||||
|
|
||||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||||
are missing and it's giving detailed information on why they are needed and whether
|
are missing and it's giving detailed information on why they are needed and whether
|
||||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
|
||||||
concretizes a spec, like:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -49,7 +44,7 @@ concretizes a spec, like:
|
|||||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||||
[ ... ]
|
[ ... ]
|
||||||
|
|
||||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||||
|
|
||||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||||
might be useful to setup containers or other similar environments:
|
might be useful to setup containers or other similar environments:
|
||||||
|
|||||||
@@ -3,103 +3,6 @@
|
|||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
.. _concretizer-options:
|
|
||||||
|
|
||||||
==========================================
|
|
||||||
Concretization Settings (concretizer.yaml)
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
The ``concretizer.yaml`` configuration file allows to customize aspects of the
|
|
||||||
algorithm used to select the dependencies you install. The default configuration
|
|
||||||
is the following:
|
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
|
||||||
:language: yaml
|
|
||||||
|
|
||||||
--------------------------------
|
|
||||||
Reuse already installed packages
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
|
||||||
whether it will do a "fresh" installation and prefer the latest settings from
|
|
||||||
``package.py`` files and ``packages.yaml`` (``false``).
|
|
||||||
You can use:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
% spack install --reuse <spec>
|
|
||||||
|
|
||||||
to enable reuse for a single installation, and you can use:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
spack install --fresh <spec>
|
|
||||||
|
|
||||||
to do a fresh install if ``reuse`` is enabled by default.
|
|
||||||
``reuse: true`` is the default.
|
|
||||||
|
|
||||||
------------------------------------------
|
|
||||||
Selection of the target microarchitectures
|
|
||||||
------------------------------------------
|
|
||||||
|
|
||||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
|
||||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
|
||||||
and there are no corresponding command line arguments to enable them for a single solve.
|
|
||||||
|
|
||||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
|
||||||
If set to:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
concretizer:
|
|
||||||
targets:
|
|
||||||
granularity: microarchitectures
|
|
||||||
|
|
||||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
|
||||||
compatibility. If instead the option is set to:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
concretizer:
|
|
||||||
targets:
|
|
||||||
granularity: generic
|
|
||||||
|
|
||||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
|
||||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
|
||||||
``x86_64_v3`` as the best target in the latter case.
|
|
||||||
|
|
||||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
|
||||||
microarchitectures considered during the solve are constrained to be compatible with the
|
|
||||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
|
||||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
|
||||||
|
|
||||||
---------------
|
|
||||||
Duplicate nodes
|
|
||||||
---------------
|
|
||||||
|
|
||||||
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
|
|
||||||
the same package. This is mainly relevant for build dependencies, which may have their version
|
|
||||||
pinned by some nodes, and thus be required at different versions by different nodes in the same
|
|
||||||
DAG.
|
|
||||||
|
|
||||||
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
|
|
||||||
then a single configuration per package is allowed in the DAG. This means, for instance, that only
|
|
||||||
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
|
|
||||||
faster concretization, at the expense of making a few specs unsolvable.
|
|
||||||
|
|
||||||
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
|
|
||||||
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
|
|
||||||
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
|
|
||||||
while `py-numpy` still needs an older version:
|
|
||||||
|
|
||||||
.. figure:: images/shapely_duplicates.svg
|
|
||||||
:scale: 70 %
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
|
||||||
default behavior is ``duplicates:strategy:minimal``.
|
|
||||||
|
|
||||||
.. _build-settings:
|
.. _build-settings:
|
||||||
|
|
||||||
================================
|
================================
|
||||||
@@ -329,6 +232,76 @@ Specific limitations include:
|
|||||||
then Spack will not add a new external entry (``spack config blame packages``
|
then Spack will not add a new external entry (``spack config blame packages``
|
||||||
can help locate all external entries).
|
can help locate all external entries).
|
||||||
|
|
||||||
|
.. _concretizer-options:
|
||||||
|
|
||||||
|
----------------------
|
||||||
|
Concretizer options
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||||
|
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||||
|
algorithm it uses to select the dependencies you install:
|
||||||
|
|
||||||
|
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||||
|
:language: yaml
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Reuse already installed packages
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||||
|
whether it will do a "fresh" installation and prefer the latest settings from
|
||||||
|
``package.py`` files and ``packages.yaml`` (``false``).
|
||||||
|
You can use:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
% spack install --reuse <spec>
|
||||||
|
|
||||||
|
to enable reuse for a single installation, and you can use:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
spack install --fresh <spec>
|
||||||
|
|
||||||
|
to do a fresh install if ``reuse`` is enabled by default.
|
||||||
|
``reuse: true`` is the default.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Selection of the target microarchitectures
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||||
|
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||||
|
and there are no corresponding command line arguments to enable them for a single solve.
|
||||||
|
|
||||||
|
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||||
|
If set to:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
targets:
|
||||||
|
granularity: microarchitectures
|
||||||
|
|
||||||
|
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||||
|
compatibility. If instead the option is set to:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
targets:
|
||||||
|
granularity: generic
|
||||||
|
|
||||||
|
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||||
|
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||||
|
``x86_64_v3`` as the best target in the latter case.
|
||||||
|
|
||||||
|
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||||
|
microarchitectures considered during the solve are constrained to be compatible with the
|
||||||
|
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||||
|
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||||
|
|
||||||
.. _package-requirements:
|
.. _package-requirements:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
|
|||||||
@@ -9,32 +9,9 @@
|
|||||||
Bundle
|
Bundle
|
||||||
------
|
------
|
||||||
|
|
||||||
``BundlePackage`` represents a set of packages that are expected to work
|
``BundlePackage`` represents a set of packages that are expected to work well
|
||||||
well together, such as a collection of commonly used software libraries.
|
together, such as a collection of commonly used software libraries. The
|
||||||
The associated software is specified as dependencies.
|
associated software is specified as bundle dependencies.
|
||||||
|
|
||||||
If it makes sense, variants, conflicts, and requirements can be added to
|
|
||||||
the package. :ref:`Variants <variants>` ensure that common build options
|
|
||||||
are consistent across the packages supporting them. :ref:`Conflicts
|
|
||||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
|
||||||
bugs or limitations.
|
|
||||||
|
|
||||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
|
||||||
it could use the ``require`` directive as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
|
||||||
|
|
||||||
Spack has a number of built-in bundle packages, such as:
|
|
||||||
|
|
||||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
|
||||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
|
||||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
|
||||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
|
||||||
|
|
||||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
|
||||||
``Libc`` is a virtual bundle package for the C standard library.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^
|
^^^^^^^^
|
||||||
|
|||||||
@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
|
|||||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
packages. See the :ref:`package-list` for the full list of available
|
||||||
list of available oneAPI packages, or use::
|
oneAPI packages or use::
|
||||||
|
|
||||||
spack list -d oneAPI
|
spack list -d oneAPI
|
||||||
|
|
||||||
@@ -76,55 +76,6 @@ To build with with ``icx``, do ::
|
|||||||
|
|
||||||
spack install patchelf%oneapi
|
spack install patchelf%oneapi
|
||||||
|
|
||||||
|
|
||||||
Using oneAPI Spack environment
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
|
||||||
compilers are installed with Spack like in example above.
|
|
||||||
|
|
||||||
Install the oneAPI compilers::
|
|
||||||
|
|
||||||
spack install intel-oneapi-compilers
|
|
||||||
|
|
||||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
|
||||||
|
|
||||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
|
||||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
|
||||||
|
|
||||||
Verify that the compilers are available::
|
|
||||||
|
|
||||||
spack compiler list
|
|
||||||
|
|
||||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
|
||||||
|
|
||||||
git clone https://github.com/spack/spack-configs
|
|
||||||
spack env activate spack-configs/INTEL/CPU
|
|
||||||
spack concretize -f
|
|
||||||
|
|
||||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
|
||||||
|
|
||||||
- `Devito <https://www.devitoproject.org/>`_
|
|
||||||
- `GROMACS <https://www.gromacs.org/>`_
|
|
||||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
|
||||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
|
||||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
|
||||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
|
||||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
|
||||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
|
||||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
|
||||||
|
|
||||||
To build lammps with oneAPI compiler from this environment just run::
|
|
||||||
|
|
||||||
spack install lammps
|
|
||||||
|
|
||||||
Compiled binaries can be find using::
|
|
||||||
|
|
||||||
spack cd -i lammps
|
|
||||||
|
|
||||||
You can do the same for all other applications from this environment.
|
|
||||||
|
|
||||||
|
|
||||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||||
------------------------------------------------------
|
------------------------------------------------------
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ By default, these phases run:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ sip-build --verbose --target-dir ...
|
$ python configure.py --bindir ... --destdir ...
|
||||||
$ make
|
$ make
|
||||||
$ make install
|
$ make install
|
||||||
|
|
||||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
|||||||
Important files
|
Important files
|
||||||
^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Each SIP package comes with a custom configuration file written in Python.
|
Each SIP package comes with a custom ``configure.py`` build script,
|
||||||
For newer packages, this is called ``project.py``, while in older packages,
|
written in Python. This script contains instructions to build the project.
|
||||||
it may be called ``configure.py``. This script contains instructions to build
|
|
||||||
the project.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Build system dependencies
|
Build system dependencies
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||||
to run the aforementioned configure script. Python is also needed at run-time to
|
the ``configure.py`` build script, and to run the resulting Python
|
||||||
actually use the installed Python library. And as we are building Python bindings
|
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
needed to build the package. All of these dependencies are automatically
|
||||||
dependencies are automatically added via the base class.
|
added via the base class
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
extends("python", type=("build", "link", "run"))
|
extends('python')
|
||||||
depends_on("py-sip", type="build")
|
|
||||||
|
|
||||||
|
depends_on('qt', type='build')
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
depends_on('py-sip', type='build')
|
||||||
Passing arguments to ``sip-build``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Passing arguments to ``configure.py``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||||
arguments to that particular phase. For example, if you need to pass
|
arguments to that particular phase. For example, if you need to pass
|
||||||
@@ -72,11 +72,11 @@ arguments to the configure phase, you can use:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self, spec, prefix):
|
||||||
return ["--no-python-dbus"]
|
return ['--no-python-dbus']
|
||||||
|
|
||||||
|
|
||||||
A list of valid options can be found by running ``sip-build --help``.
|
A list of valid options can be found by running ``python configure.py --help``.
|
||||||
|
|
||||||
^^^^^^^
|
^^^^^^^
|
||||||
Testing
|
Testing
|
||||||
|
|||||||
@@ -48,6 +48,9 @@
|
|||||||
os.environ["COLIFY_SIZE"] = "25x120"
|
os.environ["COLIFY_SIZE"] = "25x120"
|
||||||
os.environ["COLUMNS"] = "120"
|
os.environ["COLUMNS"] = "120"
|
||||||
|
|
||||||
|
# Generate full package list if needed
|
||||||
|
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||||
|
|
||||||
# Generate a command index if an update is needed
|
# Generate a command index if an update is needed
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
[
|
[
|
||||||
@@ -94,7 +97,9 @@ class PatchedPythonDomain(PythonDomain):
|
|||||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||||
if "refspecific" in node:
|
if "refspecific" in node:
|
||||||
del node["refspecific"]
|
del node["refspecific"]
|
||||||
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
return super(PatchedPythonDomain, self).resolve_xref(
|
||||||
|
env, fromdocname, builder, typ, target, node, contnode
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
@@ -144,6 +149,7 @@ def setup(sphinx):
|
|||||||
# Get nice vector graphics
|
# Get nice vector graphics
|
||||||
graphviz_output_format = "svg"
|
graphviz_output_format = "svg"
|
||||||
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
@@ -211,8 +217,6 @@ def setup(sphinx):
|
|||||||
# Spack classes that intersphinx is unable to resolve
|
# Spack classes that intersphinx is unable to resolve
|
||||||
("py:class", "spack.version.StandardVersion"),
|
("py:class", "spack.version.StandardVersion"),
|
||||||
("py:class", "spack.spec.DependencySpec"),
|
("py:class", "spack.spec.DependencySpec"),
|
||||||
("py:class", "spack.spec.InstallStatus"),
|
|
||||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
|
||||||
("py:class", "spack.install_test.Pb"),
|
("py:class", "spack.install_test.Pb"),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -229,8 +233,30 @@ def setup(sphinx):
|
|||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
# show_authors = False
|
# show_authors = False
|
||||||
sys.path.append("./_pygments")
|
|
||||||
pygments_style = "style.SpackStyle"
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
# We use our own extension of the default style with a few modifications
|
||||||
|
from pygments.style import Style
|
||||||
|
from pygments.styles.default import DefaultStyle
|
||||||
|
from pygments.token import Comment, Generic, Text
|
||||||
|
|
||||||
|
|
||||||
|
class SpackStyle(DefaultStyle):
|
||||||
|
styles = DefaultStyle.styles.copy()
|
||||||
|
background_color = "#f4f4f8"
|
||||||
|
styles[Generic.Output] = "#355"
|
||||||
|
styles[Generic.Prompt] = "bold #346ec9"
|
||||||
|
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
dist = pkg_resources.Distribution(__file__)
|
||||||
|
sys.path.append(".") # make 'conf' module findable
|
||||||
|
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||||
|
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||||
|
pkg_resources.working_set.add(dist)
|
||||||
|
|
||||||
|
pygments_style = "spack"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
# modindex_common_prefix = []
|
||||||
@@ -315,15 +341,16 @@ def setup(sphinx):
|
|||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = "Spackdoc"
|
htmlhelp_basename = "Spackdoc"
|
||||||
|
|
||||||
|
|
||||||
# -- Options for LaTeX output --------------------------------------------------
|
# -- Options for LaTeX output --------------------------------------------------
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
# 'papersize': 'letterpaper',
|
#'papersize': 'letterpaper',
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# 'pointsize': '10pt',
|
#'pointsize': '10pt',
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
# 'preamble': '',
|
#'preamble': '',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
|||||||
@@ -292,13 +292,12 @@ It is also worth noting that:
|
|||||||
non_bindable_shared_objects = ["libinterface.so"]
|
non_bindable_shared_objects = ["libinterface.so"]
|
||||||
|
|
||||||
----------------------
|
----------------------
|
||||||
``install_status``
|
``terminal_title``
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
When set to ``true``, Spack will show information about its current progress
|
By setting this option to ``true``, Spack will update the terminal's title to
|
||||||
as well as the current and total package numbers. Progress is shown both
|
provide information about its current progress as well as the current and
|
||||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
total package numbers.
|
||||||
progress information.
|
|
||||||
|
|
||||||
To work properly, this requires your terminal to reset its title after
|
To work properly, this requires your terminal to reset its title after
|
||||||
Spack has finished its work, otherwise Spack's status information will
|
Spack has finished its work, otherwise Spack's status information will
|
||||||
|
|||||||
@@ -636,7 +636,7 @@ to customize the generation of container recipes:
|
|||||||
- No
|
- No
|
||||||
* - ``os_packages:command``
|
* - ``os_packages:command``
|
||||||
- Tool used to manage system packages
|
- Tool used to manage system packages
|
||||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||||
- Only with custom base images
|
- Only with custom base images
|
||||||
* - ``os_packages:update``
|
* - ``os_packages:update``
|
||||||
- Whether or not to update the list of available packages
|
- Whether or not to update the list of available packages
|
||||||
|
|||||||
@@ -310,11 +310,53 @@ Once all of the dependencies are installed, you can try building the documentati
|
|||||||
$ make clean
|
$ make clean
|
||||||
$ make
|
$ make
|
||||||
|
|
||||||
If you see any warning or error messages, you will have to correct those before your PR
|
If you see any warning or error messages, you will have to correct those before
|
||||||
is accepted. If you are editing the documentation, you should be running the
|
your PR is accepted.
|
||||||
documentation tests to make sure there are no errors. Documentation changes can result
|
|
||||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
If you are editing the documentation, you should obviously be running the
|
||||||
to ask when you submit your PR.
|
documentation tests. But even if you are simply adding a new package, your
|
||||||
|
changes could cause the documentation tests to fail:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||||
|
|
||||||
|
At first, this error message will mean nothing to you, since you didn't edit
|
||||||
|
that file. Until you look at line 8745 of the file in question:
|
||||||
|
|
||||||
|
.. code-block:: rst
|
||||||
|
|
||||||
|
Description:
|
||||||
|
NetCDF is a set of software libraries and self-describing, machine-
|
||||||
|
independent data formats that support the creation, access, and sharing
|
||||||
|
of array-oriented scientific data.
|
||||||
|
|
||||||
|
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||||
|
If you add a new package, its docstring is added to this page. The problem in
|
||||||
|
this case was that the docstring looked like:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Netcdf(Package):
|
||||||
|
"""
|
||||||
|
NetCDF is a set of software libraries and self-describing,
|
||||||
|
machine-independent data formats that support the creation,
|
||||||
|
access, and sharing of array-oriented scientific data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||||
|
Instead, they should look like:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Netcdf(Package):
|
||||||
|
"""NetCDF is a set of software libraries and self-describing,
|
||||||
|
machine-independent data formats that support the creation,
|
||||||
|
access, and sharing of array-oriented scientific data."""
|
||||||
|
|
||||||
|
Documentation changes can result in much more obfuscated warning messages.
|
||||||
|
If you don't understand what they mean, feel free to ask when you submit
|
||||||
|
your PR.
|
||||||
|
|
||||||
--------
|
--------
|
||||||
Coverage
|
Coverage
|
||||||
|
|||||||
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
|||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
projections:
|
projections:
|
||||||
zlib: "{name}-{version}"
|
zlib: {name}-{version}
|
||||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||||
|
|
||||||
The entries in the projections configuration file must all be either
|
The entries in the projections configuration file must all be either
|
||||||
specs or the keyword ``all``. For each spec, the projection used will
|
specs or the keyword ``all``. For each spec, the projection used will
|
||||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
|||||||
example/push/%: example/install/%
|
example/push/%: example/install/%
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
$(info About to push $(SPEC) to a buildcache)
|
$(info About to push $(SPEC) to a buildcache)
|
||||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||||
$(info Updating the buildcache index)
|
$(info Updating the buildcache index)
|
||||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||||
$(info Done!)
|
$(info Done!)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|||||||
@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load gcc/4.9.0
|
$ module load gcc-4.9.0
|
||||||
$ spack compiler find
|
$ spack compiler find
|
||||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||||
gcc@4.9.0
|
gcc@4.9.0
|
||||||
|
|||||||
@@ -1,113 +0,0 @@
|
|||||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
==========================
|
|
||||||
Using External GPU Support
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
|
||||||
configuration Spack will download and install the needed components.
|
|
||||||
It may be preferable to use existing system support: the following sections
|
|
||||||
help with using a system installation of GPU libraries.
|
|
||||||
|
|
||||||
-----------------------------------
|
|
||||||
Using an External ROCm Installation
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
Spack breaks down ROCm into many separate component packages. The following
|
|
||||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
|
||||||
components for use by dependent packages:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
all:
|
|
||||||
compiler: [rocmcc@=5.3.0]
|
|
||||||
variants: amdgpu_target=gfx90a
|
|
||||||
hip:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: hip@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/hip
|
|
||||||
hsa-rocr-dev:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: hsa-rocr-dev@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/
|
|
||||||
llvm-amdgpu:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: llvm-amdgpu@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/llvm/
|
|
||||||
comgr:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: comgr@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/
|
|
||||||
hipsparse:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: hipsparse@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/
|
|
||||||
hipblas:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: hipblas@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/
|
|
||||||
rocblas:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: rocblas@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/
|
|
||||||
rocprim:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: rocprim@5.3.0
|
|
||||||
prefix: /opt/rocm-5.3.0/rocprim/
|
|
||||||
|
|
||||||
This is in combination with the following compiler definition:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
compilers:
|
|
||||||
- compiler:
|
|
||||||
spec: rocmcc@=5.3.0
|
|
||||||
paths:
|
|
||||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
|
||||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
|
||||||
f77: null
|
|
||||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
|
||||||
operating_system: rhel8
|
|
||||||
target: x86_64
|
|
||||||
|
|
||||||
This includes the following considerations:
|
|
||||||
|
|
||||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
|
||||||
to use only the externals we defined.
|
|
||||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
|
||||||
packages, but not all of them, and furthermore not in a manner that
|
|
||||||
guarantees a complementary set if multiple ROCm installations are available.
|
|
||||||
- The ``prefix`` is the same for several components, but note that others
|
|
||||||
require listing one of the subdirectories as a prefix.
|
|
||||||
|
|
||||||
-----------------------------------
|
|
||||||
Using an External CUDA Installation
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
CUDA is split into fewer components and is simpler to specify:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
all:
|
|
||||||
variants:
|
|
||||||
- cuda_arch=70
|
|
||||||
cuda:
|
|
||||||
buildable: false
|
|
||||||
externals:
|
|
||||||
- spec: cuda@11.0.2
|
|
||||||
prefix: /opt/cuda/cuda-11.0.2/
|
|
||||||
|
|
||||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
Before Width: | Height: | Size: 108 KiB |
@@ -54,16 +54,9 @@ or refer to the full manual below.
|
|||||||
features
|
features
|
||||||
getting_started
|
getting_started
|
||||||
basic_usage
|
basic_usage
|
||||||
|
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||||
replace_conda_homebrew
|
replace_conda_homebrew
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Links
|
|
||||||
|
|
||||||
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
|
|
||||||
Packages (packages.spack.io) <https://packages.spack.io>
|
|
||||||
Binaries (binaries.spack.io) <https://cache.spack.io>
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
:caption: Reference
|
:caption: Reference
|
||||||
@@ -79,11 +72,10 @@ or refer to the full manual below.
|
|||||||
repositories
|
repositories
|
||||||
binary_caches
|
binary_caches
|
||||||
command_index
|
command_index
|
||||||
|
package_list
|
||||||
chain
|
chain
|
||||||
extensions
|
extensions
|
||||||
pipelines
|
pipelines
|
||||||
signing
|
|
||||||
gpu_configuration
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|||||||
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
|||||||
$ module avail
|
$ module avail
|
||||||
|
|
||||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||||
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||||
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||||
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||||
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||||
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||||
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||||
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||||
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||||
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||||
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||||
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||||
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||||
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||||
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||||
|
|
||||||
The names should look familiar, as they resemble the output from ``spack find``.
|
The names should look familiar, as they resemble the output from ``spack find``.
|
||||||
For example, you could type the following command to load the ``cmake`` module:
|
For example, you could type the following command to load the ``cmake`` module:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||||
|
|
||||||
Neither of these is particularly pretty, easy to remember, or easy to
|
Neither of these is particularly pretty, easy to remember, or easy to
|
||||||
type. Luckily, Spack offers many facilities for customizing the module
|
type. Luckily, Spack offers many facilities for customizing the module
|
||||||
@@ -275,12 +275,10 @@ of the installed software. For instance, in the snippet below:
|
|||||||
set:
|
set:
|
||||||
BAR: 'bar'
|
BAR: 'bar'
|
||||||
# This anonymous spec selects any package that
|
# This anonymous spec selects any package that
|
||||||
# depends on mpi. The double colon at the
|
# depends on openmpi. The double colon at the
|
||||||
# end clears the set of rules that matched so far.
|
# end clears the set of rules that matched so far.
|
||||||
^mpi::
|
^openmpi::
|
||||||
environment:
|
environment:
|
||||||
prepend_path:
|
|
||||||
PATH: '{^mpi.prefix}/bin'
|
|
||||||
set:
|
set:
|
||||||
BAR: 'baz'
|
BAR: 'baz'
|
||||||
# Selects any zlib package
|
# Selects any zlib package
|
||||||
@@ -295,9 +293,7 @@ of the installed software. For instance, in the snippet below:
|
|||||||
- FOOBAR
|
- FOOBAR
|
||||||
|
|
||||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
|
||||||
to the ``PATH`` variable.
|
|
||||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||||
the variable ``FOOBAR`` will be unset.
|
the variable ``FOOBAR`` will be unset.
|
||||||
@@ -400,30 +396,28 @@ that are already in the Lmod hierarchy.
|
|||||||
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
Tcl modules
|
||||||
|
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
modules:
|
modules:
|
||||||
default:
|
default:
|
||||||
enable:
|
enable:
|
||||||
- tcl
|
- tcl
|
||||||
tcl:
|
tcl:
|
||||||
projections:
|
projections:
|
||||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||||
all:
|
all:
|
||||||
conflict:
|
conflict:
|
||||||
- '{name}'
|
- '{name}'
|
||||||
- 'intel/14.0.1'
|
- 'intel/14.0.1'
|
||||||
|
|
||||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||||
base directory of the same module, effectively preventing the possibility to
|
base directory of the same module, effectively preventing the possibility to
|
||||||
load two or more versions of the same software at the same time. The tokens
|
load two or more versions of the same software at the same time. The tokens
|
||||||
that are available for use in this directive are the same understood by the
|
that are available for use in this directive are the same understood by
|
||||||
:meth:`~spack.spec.Spec.format` method.
|
the :meth:`~spack.spec.Spec.format` method.
|
||||||
|
|
||||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
|
||||||
express the conflict on both modulefiles conflicting with each other.
|
|
||||||
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
@@ -785,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
|||||||
|
|
||||||
$ spack module tcl loads --dependencies py-numpy git
|
$ spack module tcl loads --dependencies py-numpy git
|
||||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||||
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||||
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||||
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||||
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||||
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||||
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||||
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||||
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||||
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||||
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||||
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||||
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||||
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||||
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||||
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||||
|
|
||||||
The script may be further edited by removing unnecessary modules.
|
The script may be further edited by removing unnecessary modules.
|
||||||
|
|
||||||
@@ -832,12 +826,12 @@ For example, consider the following on one system:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ module avail
|
$ module avail
|
||||||
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads antlr # WRONG!
|
$ spack module tcl loads antlr # WRONG!
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|
||||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||||
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||||
|
|||||||
17
lib/spack/docs/package_list.rst
Normal file
17
lib/spack/docs/package_list.rst
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _package-list:
|
||||||
|
|
||||||
|
============
|
||||||
|
Package List
|
||||||
|
============
|
||||||
|
|
||||||
|
This is a list of things you can install using Spack. It is
|
||||||
|
automatically generated based on the packages in this Spack
|
||||||
|
version.
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
:file: package_list.html
|
||||||
@@ -121,7 +121,7 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
|||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
if version >= Version("2.1.1"):
|
if version >= Version("2.1.1"):
|
||||||
return super().url_for_version(version)
|
return super(Openjpeg, self).url_for_version(version)
|
||||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
return url_fmt.format(version)
|
return url_fmt.format(version)
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ builder class explicitly. Using the same example as above, this reads:
|
|||||||
|
|
||||||
def url_for_version(self, version):
|
def url_for_version(self, version):
|
||||||
if version >= Version("2.1.1"):
|
if version >= Version("2.1.1"):
|
||||||
return super().url_for_version(version)
|
return super(Openjpeg, self).url_for_version(version)
|
||||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
return url_fmt.format(version)
|
return url_fmt.format(version)
|
||||||
|
|
||||||
@@ -363,42 +363,6 @@ one of these::
|
|||||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||||
``nano``, and ``notepad``, in that order.
|
``nano``, and ``notepad``, in that order.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
Bundling software
|
|
||||||
^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you have a collection of software expected to work well together with
|
|
||||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
|
||||||
Examples where bundle packages can be useful include defining suites of
|
|
||||||
applications (e.g, `EcpProxyApps
|
|
||||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
|
||||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
|
||||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
|
||||||
|
|
||||||
These versioned packages primarily consist of dependencies on the associated
|
|
||||||
software packages. They can include :ref:`variants <variants>` to ensure
|
|
||||||
common build options are consistently applied to dependencies. Known build
|
|
||||||
failures, such as not building on a platform or when certain compilers or
|
|
||||||
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
|
||||||
Build requirements, such as only building with specific compilers, can similarly
|
|
||||||
be flagged with :ref:`requires <packaging_conflicts>`.
|
|
||||||
|
|
||||||
The ``spack create --template bundle`` command will create a skeleton
|
|
||||||
``BundlePackage`` ``package.py`` for you:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack create --template bundle --name coolsdk
|
|
||||||
|
|
||||||
Now you can fill in the basic package documentation, version(s), and software
|
|
||||||
package dependencies along with any other relevant customizations.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Remember that bundle packages have no software of their own so there
|
|
||||||
is nothing to download.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Non-downloadable software
|
Non-downloadable software
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -646,16 +610,7 @@ add a line like this in the package class:
|
|||||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||||
|
|
||||||
.. note::
|
Versions should be listed in descending order, from newest to oldest.
|
||||||
|
|
||||||
By convention, we list versions in descending order, from newest to oldest.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
|
||||||
there is nothing to fetch. Consequently, their version directives
|
|
||||||
consist solely of the version name (e.g., ``version("202309")``).
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Date Versions
|
Date Versions
|
||||||
@@ -2288,7 +2243,7 @@ looks like this:
|
|||||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||||
|
|
||||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||||
depends_on("zlib-api")
|
depends_on("zlib")
|
||||||
|
|
||||||
parallel = False
|
parallel = False
|
||||||
|
|
||||||
@@ -2723,7 +2678,7 @@ Conflicts and requirements
|
|||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||||
from building e.g. against other dependencies or with certain compilers. Spack
|
to build e.g. against other dependencies or with certain compilers. Spack
|
||||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||||
|
|
||||||
Adding the following to a package:
|
Adding the following to a package:
|
||||||
@@ -3116,7 +3071,7 @@ follows:
|
|||||||
# The library provided by the bar virtual package
|
# The library provided by the bar virtual package
|
||||||
@property
|
@property
|
||||||
def bar_libs(self):
|
def bar_libs(self):
|
||||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||||
|
|
||||||
# The baz virtual package home
|
# The baz virtual package home
|
||||||
@property
|
@property
|
||||||
@@ -4818,17 +4773,17 @@ For example, running:
|
|||||||
|
|
||||||
results in spack checking that the installation created the following **file**:
|
results in spack checking that the installation created the following **file**:
|
||||||
|
|
||||||
* ``self.prefix.bin.reframe``
|
* ``self.prefix/bin/reframe``
|
||||||
|
|
||||||
and the following **directories**:
|
and the following **directories**:
|
||||||
|
|
||||||
* ``self.prefix.bin``
|
* ``self.prefix/bin``
|
||||||
* ``self.prefix.config``
|
* ``self.prefix/config``
|
||||||
* ``self.prefix.docs``
|
* ``self.prefix/docs``
|
||||||
* ``self.prefix.reframe``
|
* ``self.prefix/reframe``
|
||||||
* ``self.prefix.tutorials``
|
* ``self.prefix/tutorials``
|
||||||
* ``self.prefix.unittests``
|
* ``self.prefix/unittests``
|
||||||
* ``self.prefix.cscs-checks``
|
* ``self.prefix/cscs-checks``
|
||||||
|
|
||||||
If **any** of these paths are missing, then Spack considers the installation
|
If **any** of these paths are missing, then Spack considers the installation
|
||||||
to have failed.
|
to have failed.
|
||||||
@@ -4972,7 +4927,7 @@ installed executable. The check is implemented as follows:
|
|||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def check_list(self):
|
def check_list(self):
|
||||||
with working_dir(self.stage.source_path):
|
with working_dir(self.stage.source_path):
|
||||||
reframe = Executable(self.prefix.bin.reframe)
|
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||||
reframe("-l")
|
reframe("-l")
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
@@ -5192,8 +5147,8 @@ embedded test parts.
|
|||||||
for example in ["ex1", "ex2"]:
|
for example in ["ex1", "ex2"]:
|
||||||
with test_part(
|
with test_part(
|
||||||
self,
|
self,
|
||||||
f"test_example_{example}",
|
"test_example_{0}".format(example),
|
||||||
purpose=f"run installed {example}",
|
purpose="run installed {0}".format(example),
|
||||||
):
|
):
|
||||||
exe = which(join_path(self.prefix.bin, example))
|
exe = which(join_path(self.prefix.bin, example))
|
||||||
exe()
|
exe()
|
||||||
@@ -5271,10 +5226,11 @@ Below illustrates using this feature to compile an example.
|
|||||||
...
|
...
|
||||||
cxx = which(os.environ["CXX"])
|
cxx = which(os.environ["CXX"])
|
||||||
cxx(
|
cxx(
|
||||||
f"-L{self.prefix.lib}",
|
"-L{0}".format(self.prefix.lib),
|
||||||
f"-I{self.prefix.include}",
|
"-I{0}".format(self.prefix.include),
|
||||||
f"{exe}.cpp",
|
"{0}.cpp".format(exe),
|
||||||
"-o", exe
|
"-o",
|
||||||
|
exe
|
||||||
)
|
)
|
||||||
cxx_example = which(exe)
|
cxx_example = which(exe)
|
||||||
cxx_example()
|
cxx_example()
|
||||||
@@ -5291,14 +5247,14 @@ Saving build-time files
|
|||||||
We highly recommend re-using build-time test sources and pared down
|
We highly recommend re-using build-time test sources and pared down
|
||||||
input files for testing installed software. These files are easier
|
input files for testing installed software. These files are easier
|
||||||
to keep synchronized with software capabilities since they reside
|
to keep synchronized with software capabilities since they reside
|
||||||
within the software's repository.
|
within the software's repository.
|
||||||
|
|
||||||
If that is not possible, you can add test-related files to the package
|
If that is not possible, you can add test-related files to the package
|
||||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||||
will be important to maintain them so they work across listed or supported
|
will be important to maintain them so they work across listed or supported
|
||||||
versions of the package.
|
versions of the package.
|
||||||
|
|
||||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||||
and or files from the source build stage directory to the package's
|
and or files from the source build stage directory to the package's
|
||||||
installation directory.
|
installation directory.
|
||||||
|
|
||||||
@@ -5306,15 +5262,10 @@ The signature for ``cache_extra_test_sources`` is:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def cache_extra_test_sources(pkg, srcs):
|
def cache_extra_test_sources(self, srcs):
|
||||||
|
|
||||||
where each argument has the following meaning:
|
|
||||||
|
|
||||||
* ``pkg`` is an instance of the package for the spec under test.
|
|
||||||
|
|
||||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
|
||||||
paths of subdirectories and or files needed for stand-alone testing.
|
|
||||||
|
|
||||||
|
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||||
|
paths of subdirectories and or files needed for stand-alone testing.
|
||||||
The paths must be relative to the staged source directory. Contents of
|
The paths must be relative to the staged source directory. Contents of
|
||||||
subdirectories and files are copied to a special test cache subdirectory
|
subdirectories and files are copied to a special test cache subdirectory
|
||||||
of the installation prefix. They are automatically copied to the appropriate
|
of the installation prefix. They are automatically copied to the appropriate
|
||||||
@@ -5335,18 +5286,21 @@ and using ``foo.c`` in a test method is illustrated below.
|
|||||||
srcs = ["tests",
|
srcs = ["tests",
|
||||||
join_path("examples", "foo.c"),
|
join_path("examples", "foo.c"),
|
||||||
join_path("examples", "bar.c")]
|
join_path("examples", "bar.c")]
|
||||||
cache_extra_test_sources(self, srcs)
|
self.cache_extra_test_sources(srcs)
|
||||||
|
|
||||||
def test_foo(self):
|
def test_foo(self):
|
||||||
exe = "foo"
|
exe = "foo"
|
||||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
src_dir = join_path(
|
||||||
|
self.test_suite.current_test_cache_dir, "examples"
|
||||||
|
)
|
||||||
with working_dir(src_dir):
|
with working_dir(src_dir):
|
||||||
cc = which(os.environ["CC"])
|
cc = which(os.environ["CC"])
|
||||||
cc(
|
cc(
|
||||||
f"-L{self.prefix.lib}",
|
"-L{0}".format(self.prefix.lib),
|
||||||
f"-I{self.prefix.include}",
|
"-I{0}".format(self.prefix.include),
|
||||||
f"{exe}.c",
|
"{0}.c".format(exe),
|
||||||
"-o", exe
|
"-o",
|
||||||
|
exe
|
||||||
)
|
)
|
||||||
foo = which(exe)
|
foo = which(exe)
|
||||||
foo()
|
foo()
|
||||||
@@ -5372,9 +5326,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
|||||||
In our example above, test methods can use the following paths to reference
|
In our example above, test methods can use the following paths to reference
|
||||||
the copy of each entry listed in ``srcs``, respectively:
|
the copy of each entry listed in ``srcs``, respectively:
|
||||||
|
|
||||||
* ``self.test_suite.current_test_cache_dir.tests``
|
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||||
|
|
||||||
.. admonition:: Library packages should build stand-alone tests
|
.. admonition:: Library packages should build stand-alone tests
|
||||||
|
|
||||||
@@ -5393,7 +5347,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
|||||||
If one or more of the copied files needs to be modified to reference
|
If one or more of the copied files needs to be modified to reference
|
||||||
the installed software, it is recommended that those changes be made
|
the installed software, it is recommended that those changes be made
|
||||||
to the cached files **once** in the ``copy_test_sources`` method and
|
to the cached files **once** in the ``copy_test_sources`` method and
|
||||||
***after** the call to ``cache_extra_test_sources()``. This will
|
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||||
reduce the amount of unnecessary work in the test method **and** avoid
|
reduce the amount of unnecessary work in the test method **and** avoid
|
||||||
problems testing in shared instances and facility deployments.
|
problems testing in shared instances and facility deployments.
|
||||||
|
|
||||||
@@ -5440,7 +5394,7 @@ property as shown below.
|
|||||||
"""build and run custom-example"""
|
"""build and run custom-example"""
|
||||||
data_dir = self.test_suite.current_test_data_dir
|
data_dir = self.test_suite.current_test_data_dir
|
||||||
exe = "custom-example"
|
exe = "custom-example"
|
||||||
src = datadir.join(f"{exe}.cpp")
|
src = datadir.join("{0}.cpp".format(exe))
|
||||||
...
|
...
|
||||||
# TODO: Build custom-example using src and exe
|
# TODO: Build custom-example using src and exe
|
||||||
...
|
...
|
||||||
@@ -5456,7 +5410,7 @@ Reading expected output from a file
|
|||||||
|
|
||||||
The helper function ``get_escaped_text_output`` is available for packages
|
The helper function ``get_escaped_text_output`` is available for packages
|
||||||
to retrieve and properly format the text from a file that contains the
|
to retrieve and properly format the text from a file that contains the
|
||||||
expected output from running an executable that may contain special
|
expected output from running an executable that may contain special
|
||||||
characters.
|
characters.
|
||||||
|
|
||||||
The signature for ``get_escaped_text_output`` is:
|
The signature for ``get_escaped_text_output`` is:
|
||||||
@@ -5490,7 +5444,7 @@ added to the package's ``test`` subdirectory.
|
|||||||
db_filename, ".dump", output=str.split, error=str.split
|
db_filename, ".dump", output=str.split, error=str.split
|
||||||
)
|
)
|
||||||
for exp in expected:
|
for exp in expected:
|
||||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||||
|
|
||||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||||
source code, the path would be obtained as shown below.
|
source code, the path would be obtained as shown below.
|
||||||
@@ -5503,7 +5457,7 @@ source code, the path would be obtained as shown below.
|
|||||||
db_filename = test_cache_dir.join("packages.db")
|
db_filename = test_cache_dir.join("packages.db")
|
||||||
|
|
||||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||||
as part of the installation process, the test could access the path as
|
as part of the installation process, the test could access the path as
|
||||||
follows:
|
follows:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
@@ -5540,12 +5494,9 @@ Invoking the method is the equivalent of:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
errors = []
|
|
||||||
for check in expected:
|
for check in expected:
|
||||||
if not re.search(check, actual):
|
if not re.search(check, actual):
|
||||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||||
if errors:
|
|
||||||
raise RuntimeError("\n ".join(errors))
|
|
||||||
|
|
||||||
|
|
||||||
.. _accessing-files:
|
.. _accessing-files:
|
||||||
@@ -5585,7 +5536,7 @@ repository, and installation.
|
|||||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||||
* - Current Spec's Build-time Files
|
* - Current Spec's Build-time Files
|
||||||
- ``self.test_suite.current_test_cache_dir``
|
- ``self.test_suite.current_test_cache_dir``
|
||||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||||
* - Current Spec's Custom Test Files
|
* - Current Spec's Custom Test Files
|
||||||
- ``self.test_suite.current_test_data_dir``
|
- ``self.test_suite.current_test_data_dir``
|
||||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||||
@@ -5600,7 +5551,7 @@ Inheriting stand-alone tests
|
|||||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||||
packages that inherit from or provide interface implementations for those
|
packages that inherit from or provide interface implementations for those
|
||||||
packages, respectively.
|
packages, respectively.
|
||||||
|
|
||||||
The table below summarizes the stand-alone tests that will be executed along
|
The table below summarizes the stand-alone tests that will be executed along
|
||||||
with those implemented in the package itself.
|
with those implemented in the package itself.
|
||||||
@@ -5670,7 +5621,7 @@ for ``openmpi``:
|
|||||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||||
...
|
...
|
||||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||||
==> [2023-03-10-16:04:02.215597]
|
==> [2023-03-10-16:04:02.215597]
|
||||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||||
Openmpi::test_bin_mpirun .. PASSED
|
Openmpi::test_bin_mpirun .. PASSED
|
||||||
Openmpi::test_bin_ompi_info .. PASSED
|
Openmpi::test_bin_ompi_info .. PASSED
|
||||||
@@ -6120,7 +6071,7 @@ in the extra attributes can implement this method like this:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def validate_detected_spec(cls, spec, extra_attributes):
|
def validate_detected_spec(cls, spec, extra_attributes):
|
||||||
"""Check that "compilers" is in the extra attributes."""
|
"""Check that "compilers" is in the extra attributes."""
|
||||||
msg = ("the extra attribute 'compilers' must be set for "
|
msg = ("the extra attribute "compilers" must be set for "
|
||||||
"the detected spec '{0}'".format(spec))
|
"the detected spec '{0}'".format(spec))
|
||||||
assert "compilers" in extra_attributes, msg
|
assert "compilers" in extra_attributes, msg
|
||||||
|
|
||||||
@@ -6196,100 +6147,7 @@ follows:
|
|||||||
"foo-package@{0}".format(version_str)
|
"foo-package@{0}".format(version_str)
|
||||||
)
|
)
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
.. _package-lifecycle:
|
||||||
Add detection tests to packages
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
To ensure that software is detected correctly for multiple configurations
|
|
||||||
and on different systems users can write a ``detection_test.yaml`` file and
|
|
||||||
put it in the package directory alongside the ``package.py`` file.
|
|
||||||
This YAML file contains enough information for Spack to mock an environment
|
|
||||||
and try to check if the detection logic yields the results that are expected.
|
|
||||||
|
|
||||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
|
||||||
represent search mechanisms and they each map to a list of tests that should confirm
|
|
||||||
the validity of the package's detection logic.
|
|
||||||
|
|
||||||
The detection tests can be run with the following command:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack audit externals
|
|
||||||
|
|
||||||
Errors that have been detected are reported to screen.
|
|
||||||
|
|
||||||
""""""""""""""""""""""""""
|
|
||||||
Tests for PATH inspections
|
|
||||||
""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Detection tests insisting on ``PATH`` inspections are listed under
|
|
||||||
the ``paths`` attribute:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
paths:
|
|
||||||
- layout:
|
|
||||||
- executables:
|
|
||||||
- "bin/clang-3.9"
|
|
||||||
- "bin/clang++-3.9"
|
|
||||||
script: |
|
|
||||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
|
||||||
echo "Target: x86_64-pc-linux-gnu"
|
|
||||||
echo "Thread model: posix"
|
|
||||||
echo "InstalledDir: /usr/bin"
|
|
||||||
results:
|
|
||||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
|
||||||
|
|
||||||
Each test is performed by first creating a temporary directory structure as
|
|
||||||
specified in the corresponding ``layout`` and by then running
|
|
||||||
package detection and checking that the outcome matches the expected
|
|
||||||
``results``. The exact details on how to specify both the ``layout`` and the
|
|
||||||
``results`` are reported in the table below:
|
|
||||||
|
|
||||||
.. list-table:: Test based on PATH inspections
|
|
||||||
:header-rows: 1
|
|
||||||
|
|
||||||
* - Option Name
|
|
||||||
- Description
|
|
||||||
- Allowed Values
|
|
||||||
- Required Field
|
|
||||||
* - ``layout``
|
|
||||||
- Specifies the filesystem tree used for the test
|
|
||||||
- List of objects
|
|
||||||
- Yes
|
|
||||||
* - ``layout:[0]:executables``
|
|
||||||
- Relative paths for the mock executables to be created
|
|
||||||
- List of strings
|
|
||||||
- Yes
|
|
||||||
* - ``layout:[0]:script``
|
|
||||||
- Mock logic for the executable
|
|
||||||
- Any valid shell script
|
|
||||||
- Yes
|
|
||||||
* - ``results``
|
|
||||||
- List of expected results
|
|
||||||
- List of objects (empty if no result is expected)
|
|
||||||
- Yes
|
|
||||||
* - ``results:[0]:spec``
|
|
||||||
- A spec that is expected from detection
|
|
||||||
- Any valid spec
|
|
||||||
- Yes
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""
|
|
||||||
Reuse tests from other packages
|
|
||||||
"""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
|
||||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
|
||||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
|
||||||
``myrepo.llvm`` might look like:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
includes:
|
|
||||||
- "builtin.llvm"
|
|
||||||
|
|
||||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
|
||||||
those locally defined in the file.
|
|
||||||
|
|
||||||
-----------------------------
|
-----------------------------
|
||||||
Style guidelines for packages
|
Style guidelines for packages
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
=====================================
|
=====================================
|
||||||
Spack for Homebrew/Conda Users
|
Using Spack to Replace Homebrew/Conda
|
||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||||
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
|
|||||||
where possible instead of trying to optimize for reuse of existing installed
|
where possible instead of trying to optimize for reuse of existing installed
|
||||||
packages.
|
packages.
|
||||||
|
|
||||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||||
concretization decisions, allowing you to choose a new version of Python.
|
concretization decisions, allowing you to choose a new version of Python.
|
||||||
If any of the new packages like Bash are already installed, ``spack install``
|
If any of the new packages like Bash are already installed, ``spack install``
|
||||||
won't re-install them, it will keep the symlinks in place.
|
won't re-install them, it will keep the symlinks in place.
|
||||||
|
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
Updating & Cleaning Up Old Packages
|
Updating & Cleaning Up Old Packages
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
|
|
||||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||||
clean up out-of-date packages from your environment after an upgrade. To
|
clean up out-of-date packages from your environment after an upgrade. To
|
||||||
upgrade your entire software stack within an environment and clean up old
|
upgrade your entire software stack within an environment and clean up old
|
||||||
package versions, simply run the following commands:
|
package versions, simply run the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
|
|||||||
$ spack concretize --fresh --force
|
$ spack concretize --fresh --force
|
||||||
$ spack install
|
$ spack install
|
||||||
$ spack gc
|
$ spack gc
|
||||||
|
|
||||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||||
packages within an environment as "implicitly" installed. This tells
|
packages within an environment as "implicitly" installed. This tells
|
||||||
spack's garbage collection system that these packages should be cleaned up.
|
spack's garbage collection system that these packages should be cleaned up.
|
||||||
|
|
||||||
Don't worry however, this will not remove your entire environment.
|
Don't worry however, this will not remove your entire environment.
|
||||||
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
|
|||||||
installed as "explicitly" installed.
|
installed as "explicitly" installed.
|
||||||
|
|
||||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||||
from uninstalling any shared packages that are no longer required by the
|
from uninstalling any shared packages that are no longer required by the
|
||||||
environment you just upgraded.
|
environment you just upgraded.
|
||||||
|
|
||||||
--------------
|
--------------
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==7.2.6
|
# These dependencies should be installed using pip in order
|
||||||
sphinxcontrib-programoutput==0.17
|
# to build the documentation.
|
||||||
sphinx_design==0.5.0
|
|
||||||
sphinx-rtd-theme==1.3.0
|
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||||
python-levenshtein==0.22.0
|
sphinxcontrib-programoutput
|
||||||
docutils==0.18.1
|
sphinx-design
|
||||||
pygments==2.16.1
|
sphinx-rtd-theme
|
||||||
urllib3==2.0.6
|
python-levenshtein
|
||||||
pytest==7.4.2
|
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||||
isort==5.12.0
|
# https://stackoverflow.com/questions/67542699
|
||||||
black==23.9.1
|
docutils <0.17
|
||||||
flake8==6.1.0
|
pygments <2.13
|
||||||
mypy==1.5.1
|
urllib3 <2
|
||||||
|
|||||||
@@ -1,478 +0,0 @@
|
|||||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _signing:
|
|
||||||
|
|
||||||
=====================
|
|
||||||
Spack Package Signing
|
|
||||||
=====================
|
|
||||||
|
|
||||||
The goal of package signing in Spack is to provide data integrity
|
|
||||||
assurances around official packages produced by the automated Spack CI
|
|
||||||
pipelines. These assurances directly address the security of Spack’s
|
|
||||||
software supply chain by explaining why a security-conscious user can
|
|
||||||
be reasonably justified in the belief that packages installed via Spack
|
|
||||||
have an uninterrupted auditable trail back to change management
|
|
||||||
decisions judged to be appropriate by the Spack maintainers. This is
|
|
||||||
achieved through cryptographic signing of packages built by Spack CI
|
|
||||||
pipelines based on code that has been transparently reviewed and
|
|
||||||
approved on GitHub. This document describes the signing process for
|
|
||||||
interested users.
|
|
||||||
|
|
||||||
.. _risks:
|
|
||||||
|
|
||||||
------------------------------
|
|
||||||
Risks, Impact and Threat Model
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
This document addresses the approach taken to safeguard Spack’s
|
|
||||||
reputation with regard to the integrity of the package data produced by
|
|
||||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
|
||||||
(Spack is intended to be largely open source) or availability (efforts
|
|
||||||
are described elsewhere). With that said the main reputational risk can
|
|
||||||
be broadly categorized as a loss of faith in the data integrity due to a
|
|
||||||
breach of the private key used to sign packages. Remediation of a
|
|
||||||
private key breach would require republishing the public key with a
|
|
||||||
revocation certificate, generating a new signing key, an assessment and
|
|
||||||
potential rebuild/resigning of all packages since the key was breached,
|
|
||||||
and finally direct intervention by every spack user to update their copy
|
|
||||||
of Spack’s public keys used for local verification.
|
|
||||||
|
|
||||||
The primary threat model used in mitigating the risks of these stated
|
|
||||||
impacts is one of individual error not malicious intent or insider
|
|
||||||
threat. The primary objective is to avoid the above impacts by making a
|
|
||||||
private key breach nearly impossible due to oversight or configuration
|
|
||||||
error. Obvious and straightforward measures are taken to mitigate issues
|
|
||||||
of malicious interference in data integrity and insider threats but
|
|
||||||
these attack vectors are not systematically addressed. It should be hard
|
|
||||||
to exfiltrate the private key intentionally, and almost impossible to
|
|
||||||
leak the key by accident.
|
|
||||||
|
|
||||||
.. _overview:
|
|
||||||
|
|
||||||
-----------------
|
|
||||||
Pipeline Overview
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Spack pipelines build software through progressive stages where packages
|
|
||||||
in later stages nominally depend on packages built in earlier stages.
|
|
||||||
For both technical and design reasons these dependencies are not
|
|
||||||
implemented through the default GitLab artifacts mechanism; instead
|
|
||||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
|
||||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
|
||||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
|
||||||
pipelines.
|
|
||||||
|
|
||||||
- PR pipelines are launched in response to pull requests made by
|
|
||||||
trusted and untrusted users. Packages built on these pipelines upload
|
|
||||||
code to quarantined AWS S3 locations which cache the built packages
|
|
||||||
for the purposes of review and iteration on the changes proposed in
|
|
||||||
the pull request. Packages built on PR pipelines can come from
|
|
||||||
untrusted users so signing of these pipelines is not implemented.
|
|
||||||
Jobs in these pipelines are executed via normal GitLab runners both
|
|
||||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
|
||||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
|
||||||
strong integrity assurances that trace back to auditable change management
|
|
||||||
decisions. These pipelines only run after members from a trusted group of
|
|
||||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
|
||||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
|
||||||
GitLab runners which provide access to the required signing keys within the
|
|
||||||
job. Intermediary keys are used to sign packages in each stage of the
|
|
||||||
pipeline as they are built and a final job officially signs each package
|
|
||||||
external to any specific packages’ build environment. An intermediate key
|
|
||||||
exists in the AWS infrastructure and for each affiliated instritution that
|
|
||||||
maintains protected runners. The runners that execute these pipelines
|
|
||||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
|
||||||
are never exposed to unreviewed code and the official keys are never exposed
|
|
||||||
to any specific build environment.
|
|
||||||
|
|
||||||
.. _key_architecture:
|
|
||||||
|
|
||||||
----------------
|
|
||||||
Key Architecture
|
|
||||||
----------------
|
|
||||||
|
|
||||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
|
||||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
|
||||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
|
||||||
impact of an individual private key compromise, these key classes are the
|
|
||||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
|
||||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
|
||||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
|
||||||
Each key has a private and a public component as well as one or more identities
|
|
||||||
and zero or more signatures.
|
|
||||||
|
|
||||||
-------------------
|
|
||||||
Intermediate CI Key
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
The Intermediate key class is used to sign and verify packages between stages
|
|
||||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
|
||||||
infrastructure as well as each affiliated institution that maintains protected
|
|
||||||
runners. These intermediate keys are made available to the GitLab execution
|
|
||||||
environment building the package so that the package’s dependencies may be
|
|
||||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
|
||||||
signed by the Signing Intermediate CI Private Key.
|
|
||||||
|
|
||||||
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
| **Intermediate CI Key (GPG)** |
|
|
||||||
+==================================================+======================================================+
|
|
||||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
|
||||||
+--------------------------------------------------+------------------------------------------------------+
|
|
||||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
|
||||||
+--------------------------------------------------+------------------------------------------------------+
|
|
||||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
| Signatures: None |
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
|
|
||||||
|
|
||||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
|
||||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
|
||||||
core development team to append revocation certificates to the GPG key and
|
|
||||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
|
||||||
will happen on a semi regular basis. A corollary of this is that *this key
|
|
||||||
should not be used to verify package integrity outside the internal CI process.*
|
|
||||||
|
|
||||||
----------------
|
|
||||||
Reputational Key
|
|
||||||
----------------
|
|
||||||
|
|
||||||
The Reputational Key is the public facing key used to sign complete groups of
|
|
||||||
development and release packages. Only one key pair exsits in this class of
|
|
||||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
|
||||||
used to verify package integrity. At the end of develop and release pipeline a
|
|
||||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
|
||||||
verifies they were signed with an Intermediate CI Key, then strips the
|
|
||||||
Intermediate CI Key signature from the package and re-signs them with the
|
|
||||||
Signing Reputational Private Key. The officially signed packages are then
|
|
||||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
|
||||||
reputational key into this final job is done to prevent leakage of the key in a
|
|
||||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
|
||||||
a build job it cannot accidentally end up in any built package.
|
|
||||||
|
|
||||||
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
| **Reputational Key (GPG)** |
|
|
||||||
+==================================================+======================================================+
|
|
||||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
|
||||||
+--------------------------------------------------+------------------------------------------------------+
|
|
||||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
|
||||||
+--------------------------------------------------+------------------------------------------------------+
|
|
||||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
| Signatures: Signed by core development team [#f1]_ |
|
|
||||||
+---------------------------------------------------------------------------------------------------------+
|
|
||||||
|
|
||||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
|
||||||
offline completely separate from Spack’s infrastructure. This allows the core
|
|
||||||
development team to append revocation certificates to the GPG key in the
|
|
||||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
|
||||||
general it is the expectation that rotating this key will happen infrequently if
|
|
||||||
at all. This should allow relatively transparent verification for the end-user
|
|
||||||
community without needing deep familiarity with GnuPG or Public Key
|
|
||||||
Infrastructure.
|
|
||||||
|
|
||||||
|
|
||||||
.. _build_cache_format:
|
|
||||||
|
|
||||||
------------------
|
|
||||||
Build Cache Format
|
|
||||||
------------------
|
|
||||||
|
|
||||||
A binary package consists of a metadata file unambiguously defining the
|
|
||||||
built package (and including other details such as how to relocate it)
|
|
||||||
and the installation directory of the package stored as a compressed
|
|
||||||
archive file. The metadata files can either be unsigned, in which case
|
|
||||||
the contents are simply the json-serialized concrete spec plus metadata,
|
|
||||||
or they can be signed, in which case the json-serialized concrete spec
|
|
||||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
|
||||||
metadata files are named to indicate the operating system and
|
|
||||||
architecture for which the package was built as well as the compiler
|
|
||||||
used to build it and the packages name and version. For example::
|
|
||||||
|
|
||||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
|
||||||
|
|
||||||
would contain the concrete spec and binary metadata for a binary package
|
|
||||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
|
||||||
architecture. The id of the built package exists in the name of the file
|
|
||||||
as well (after the package name and version) and in this case begins
|
|
||||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
|
||||||
other built packages with the same os/arch, compiler, name, and version.
|
|
||||||
Below is an example of a signed binary package metadata file. Such a
|
|
||||||
file would live in the ``build_cache`` directory of a binary mirror::
|
|
||||||
|
|
||||||
-----BEGIN PGP SIGNED MESSAGE-----
|
|
||||||
Hash: SHA512
|
|
||||||
|
|
||||||
{
|
|
||||||
"spec": {
|
|
||||||
<concrete-spec-contents-omitted>
|
|
||||||
},
|
|
||||||
|
|
||||||
"buildcache_layout_version": 1,
|
|
||||||
"binary_cache_checksum": {
|
|
||||||
"hash_algorithm": "sha256",
|
|
||||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
-----BEGIN PGP SIGNATURE-----
|
|
||||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
|
||||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
|
||||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
|
||||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
|
||||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
|
||||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
|
||||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
|
||||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
|
||||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
|
||||||
j3DXty57
|
|
||||||
=3gvm
|
|
||||||
-----END PGP SIGNATURE-----
|
|
||||||
|
|
||||||
If a user has trusted the public key associated with the private key
|
|
||||||
used to sign the above spec file, the signature can be verified with
|
|
||||||
gpg, as follows::
|
|
||||||
|
|
||||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
|
||||||
|
|
||||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
|
||||||
of the ``.spack`` file containing the actual installation. The checksum should
|
|
||||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
|
||||||
contents have not changed since the binary spec plus metadata were signed. The
|
|
||||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
|
||||||
install tree. These files, along with the metadata files, live within the
|
|
||||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
|
||||||
|
|
||||||
build_cache/
|
|
||||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
|
||||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
|
||||||
# clearsigned metadata (same as above, but signed)
|
|
||||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
|
||||||
<arch>/
|
|
||||||
<compiler>/
|
|
||||||
<name>-<ver>/
|
|
||||||
# tar.gz-compressed prefix (may support more compression formats later)
|
|
||||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
|
||||||
|
|
||||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
|
||||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
|
||||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
|
||||||
containing the install tree.
|
|
||||||
|
|
||||||
.. _internal_implementation:
|
|
||||||
|
|
||||||
-----------------------
|
|
||||||
Internal Implementation
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
The technical implementation of the pipeline signing process includes components
|
|
||||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
|
||||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
|
||||||
implementation in two interdependent sections. The first addresses how secrets
|
|
||||||
are managed through the lifecycle of a develop or release pipeline. The second
|
|
||||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
|
||||||
support secure automated signing.
|
|
||||||
|
|
||||||
Secrets Management
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
As stated above the Root Private Keys (intermediate and reputational)
|
|
||||||
are stripped from the GPG keys and stored outside Spack’s
|
|
||||||
infrastructure.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
**TODO**
|
|
||||||
- Explanation here about where and how access is handled for these keys.
|
|
||||||
- Both Root private keys are protected with strong passwords
|
|
||||||
- Who has access to these and how?
|
|
||||||
|
|
||||||
**Intermediate CI Key**
|
|
||||||
-----------------------
|
|
||||||
|
|
||||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
|
||||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
|
||||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
|
||||||
|
|
||||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
|
||||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
|
||||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
|
||||||
contains an ASCII-armored export of just the *public* components of the
|
|
||||||
Reputational Key. This secret also contains the *public* components of each of
|
|
||||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
|
||||||
to verify dependent packages which may have been found in the public mirror or
|
|
||||||
built by a protected job running on an affiliated institution's infrastrcuture
|
|
||||||
in an earlier stage of the pipeline.
|
|
||||||
|
|
||||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
|
||||||
the following way:
|
|
||||||
|
|
||||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
|
||||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
|
||||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
|
||||||
2. Based on its configuration, the runner creates a job Pod in the
|
|
||||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
|
||||||
Kubernetes secret into the build container
|
|
||||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
|
||||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
|
||||||
sub-command. This is initiated by the job’s build script which is created by
|
|
||||||
the generate job at the beginning of the pipeline.
|
|
||||||
4. Assuming the package has dependencies those specs are verified using
|
|
||||||
the keyring.
|
|
||||||
5. The package is built and the spec.json is generated
|
|
||||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
|
||||||
build cache.
|
|
||||||
|
|
||||||
**Reputational Key**
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
Because of the increased impact to end users in the case of a private
|
|
||||||
key breach, the Reputational Key is managed separately from the
|
|
||||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
|
||||||
Key was generated outside of Spack’s infrastructure and has been signed
|
|
||||||
by the core development team. The Reputational Key (along with the
|
|
||||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
|
||||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
|
||||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
|
||||||
itself*\ is encrypted and stored in Kubernetes as the
|
|
||||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
|
||||||
|
|
||||||
The encryption of the exported Reputational Key (including the Signing
|
|
||||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
|
||||||
keys
|
|
||||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
|
||||||
The private key material is decrypted and imported at the time of signing into a
|
|
||||||
memory mounted temporary directory holding the keychain. The signing job uses
|
|
||||||
the `AWS Encryption SDK
|
|
||||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
|
||||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
|
||||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
|
||||||
specifically used for this, and only this, function. Finally, for convenience
|
|
||||||
sake, this same secret contains an ASCII-armored export of the *public*
|
|
||||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
|
||||||
signing script to verify that packages were built by the pipeline (both on AWS
|
|
||||||
or at affiliated institutions), or signed previously as a part of a different
|
|
||||||
pipeline. This is is done *before* importing decrypting and importing the
|
|
||||||
Signing Reputational Private Key material and officially signing the packages.
|
|
||||||
|
|
||||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
|
||||||
following way:
|
|
||||||
|
|
||||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
|
||||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
|
||||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
|
||||||
2. Based on its configuration, the runner creates a job pod in the
|
|
||||||
pipeline namespace. The job is run in a stripped down purpose-built
|
|
||||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
|
||||||
configured to only allow running jobs with this image.
|
|
||||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
|
||||||
a path on disk. Note that this becomes several files on disk, the
|
|
||||||
public components of the Intermediate CI Keys, the public components
|
|
||||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
|
||||||
Singing Reputational Private Key.
|
|
||||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
|
||||||
directory where the GnuPG keyring will be created to verify, and
|
|
||||||
then resign the package specs.
|
|
||||||
5. The job script syncs all spec.json.sig files from the build cache to
|
|
||||||
a working directory in the job’s execution environment.
|
|
||||||
6. The job script then runs the ``sign.sh`` script built into the
|
|
||||||
notary Docker image.
|
|
||||||
7. The ``sign.sh`` script imports the public components of the
|
|
||||||
Reputational and Intermediate CI Keys and uses them to verify good
|
|
||||||
signatures on the spec.json.sig files. If any signed spec does not
|
|
||||||
verify the job immediately fails.
|
|
||||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
|
||||||
the spec json data from the signed file in preparation for being
|
|
||||||
re-signed with the Reputational Key.
|
|
||||||
9. The private components of the Reputational Key are decrypted to
|
|
||||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
|
||||||
–import …`` statement which imports the key into the
|
|
||||||
keyring mounted in-memory.
|
|
||||||
10. The private key is then used to sign each of the json specs and the
|
|
||||||
keyring is removed from disk.
|
|
||||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
|
||||||
public signing of the packages for the develop or release pipeline
|
|
||||||
that created them is complete.
|
|
||||||
|
|
||||||
Non service-account access to the private components of the Reputational
|
|
||||||
Key that are managed through access to the symmetric secret in KMS used
|
|
||||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
|
||||||
- See:\ `Encryption SDK
|
|
||||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
|
||||||
A small trusted subset of the core development team are the only
|
|
||||||
individuals with access to this symmetric key.
|
|
||||||
|
|
||||||
.. _protected_runners:
|
|
||||||
|
|
||||||
Protected Runners and Reserved Tags
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
|
||||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
|
||||||
runners deployed at affiliated institutions. The majority of runners are
|
|
||||||
shared runners that operate across projects in gitlab.spack.io. These
|
|
||||||
runners pick up jobs primarily from the spack/spack project and execute
|
|
||||||
them in PR pipelines.
|
|
||||||
|
|
||||||
A small number of runners operating on AWS and at affiliated institutions are
|
|
||||||
registered as specific *protected* runners on the spack/spack project. In
|
|
||||||
addition to protected runners there are protected branches on the spack/spack
|
|
||||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
|
||||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
|
||||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
|
||||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
|
||||||
``protected``, and ``notary``. Understanding how all this works together to
|
|
||||||
protect secrets and provide integrity assurances can be a little confusing so
|
|
||||||
lets break these down:
|
|
||||||
|
|
||||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
|
||||||
other than Maintainers in GitLab from pushing code. In the case of
|
|
||||||
Spack the only Maintainer level entity pushing code to protected
|
|
||||||
branches is Spack bot. Protecting branches also marks them in such a
|
|
||||||
way that Protected Runners will only run jobs from those branches
|
|
||||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
|
||||||
branches. Because protected runners have access to secrets, it's critical
|
|
||||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
|
||||||
would be possible for a PR branch to tag a job in such a way that a protected
|
|
||||||
runner executed that job and mounted secrets into a code execution
|
|
||||||
environment that had not been reviewed by Spack maintainers. Note however
|
|
||||||
that in the absence of tagging used to route jobs, public runners *could* run
|
|
||||||
jobs from protected branches. No secrets would be at risk of being breached
|
|
||||||
because non-protected runners do not have access to those secrets; lack of
|
|
||||||
secrets would, however, cause the jobs to fail.
|
|
||||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
|
||||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
|
||||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
|
||||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
|
||||||
executed via a ``generate`` job. The generate job code systematically ensures
|
|
||||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
|
||||||
job sets these tags based on rules related to the branch where this pipeline
|
|
||||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
|
||||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
|
||||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
|
||||||
it is running on a pipeline that is part of a protected branch then it sets
|
|
||||||
the ``notary`` tag.
|
|
||||||
|
|
||||||
Protected Runners are configured to only run jobs from protected branches. Only
|
|
||||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
|
||||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
|
||||||
runners that provide access to the secrets required to sign the built packages.
|
|
||||||
The secrets are can **only** be accessed via:
|
|
||||||
|
|
||||||
1. Runners under direct control of the core development team.
|
|
||||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
|
||||||
3. By code running the automated pipeline that has been reviewed by the
|
|
||||||
Spack maintainers and judged to be appropriate.
|
|
||||||
|
|
||||||
Other attempts (either through malicious intent or incompetence) can at
|
|
||||||
worst grab jobs intended for protected runners which will cause those
|
|
||||||
jobs to fail alerting both Spack maintainers and the core development
|
|
||||||
team.
|
|
||||||
|
|
||||||
.. [#f1]
|
|
||||||
The Reputational Key has also cross signed core development team
|
|
||||||
keys.
|
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
Name, Supported Versions, Notes, Requirement Reason
|
Name, Supported Versions, Notes, Requirement Reason
|
||||||
Python, 3.6--3.12, , Interpreter for Spack
|
Python, 3.6--3.11, , Interpreter for Spack
|
||||||
C/C++ Compilers, , , Building software
|
C/C++ Compilers, , , Building software
|
||||||
make, , , Build software
|
make, , , Build software
|
||||||
patch, , , Build software
|
patch, , , Build software
|
||||||
|
bash, , , Compiler wrappers
|
||||||
tar, , , Extract/create archives
|
tar, , , Extract/create archives
|
||||||
gzip, , , Compress/Decompress archives
|
gzip, , , Compress/Decompress archives
|
||||||
unzip, , , Compress/Decompress archives
|
unzip, , , Compress/Decompress archives
|
||||||
|
|||||||
|
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,14 +416,30 @@ input_command="$*"
|
|||||||
# The lists are all bell-separated to be as flexible as possible, as their
|
# The lists are all bell-separated to be as flexible as possible, as their
|
||||||
# contents may come from the command line, from ' '-separated lists,
|
# contents may come from the command line, from ' '-separated lists,
|
||||||
# ':'-separated lists, etc.
|
# ':'-separated lists, etc.
|
||||||
|
include_dirs_list=""
|
||||||
|
lib_dirs_list=""
|
||||||
|
rpath_dirs_list=""
|
||||||
|
system_include_dirs_list=""
|
||||||
|
system_lib_dirs_list=""
|
||||||
|
system_rpath_dirs_list=""
|
||||||
|
isystem_system_include_dirs_list=""
|
||||||
|
isystem_include_dirs_list=""
|
||||||
|
libs_list=""
|
||||||
|
other_args_list=""
|
||||||
|
|
||||||
|
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||||
|
wl_expect_rpath=no
|
||||||
|
|
||||||
|
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||||
|
xlinker_expect_rpath=no
|
||||||
|
|
||||||
parse_Wl() {
|
parse_Wl() {
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
if system_dir "$1"; then
|
if system_dir "$1"; then
|
||||||
append return_system_rpath_dirs_list "$1"
|
append system_rpath_dirs_list "$1"
|
||||||
else
|
else
|
||||||
append return_rpath_dirs_list "$1"
|
append rpath_dirs_list "$1"
|
||||||
fi
|
fi
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
else
|
else
|
||||||
@@ -433,9 +449,9 @@ parse_Wl() {
|
|||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
elif system_dir "$arg"; then
|
||||||
append return_system_rpath_dirs_list "$arg"
|
append system_rpath_dirs_list "$arg"
|
||||||
else
|
else
|
||||||
append return_rpath_dirs_list "$arg"
|
append rpath_dirs_list "$arg"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
@@ -443,9 +459,9 @@ parse_Wl() {
|
|||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
elif system_dir "$arg"; then
|
elif system_dir "$arg"; then
|
||||||
append return_system_rpath_dirs_list "$arg"
|
append system_rpath_dirs_list "$arg"
|
||||||
else
|
else
|
||||||
append return_rpath_dirs_list "$arg"
|
append rpath_dirs_list "$arg"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
@@ -459,7 +475,7 @@ parse_Wl() {
|
|||||||
return 1
|
return 1
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
append return_other_args_list "-Wl,$1"
|
append other_args_list "-Wl,$1"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
fi
|
fi
|
||||||
@@ -467,210 +483,177 @@ parse_Wl() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
categorize_arguments() {
|
|
||||||
|
|
||||||
unset IFS
|
while [ $# -ne 0 ]; do
|
||||||
|
|
||||||
return_other_args_list=""
|
# an RPATH to be added after the case statement.
|
||||||
return_isystem_was_used=""
|
rp=""
|
||||||
return_isystem_system_include_dirs_list=""
|
|
||||||
return_isystem_include_dirs_list=""
|
|
||||||
return_system_include_dirs_list=""
|
|
||||||
return_include_dirs_list=""
|
|
||||||
return_system_lib_dirs_list=""
|
|
||||||
return_lib_dirs_list=""
|
|
||||||
return_system_rpath_dirs_list=""
|
|
||||||
return_rpath_dirs_list=""
|
|
||||||
|
|
||||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
# Multiple consecutive spaces in the command line can
|
||||||
wl_expect_rpath=no
|
# result in blank arguments
|
||||||
|
if [ -z "$1" ]; then
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||||
xlinker_expect_rpath=no
|
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||||
|
eval "\
|
||||||
while [ $# -ne 0 ]; do
|
case \"\$1\" in
|
||||||
|
$SPACK_COMPILER_FLAGS_KEEP)
|
||||||
# an RPATH to be added after the case statement.
|
append other_args_list \"\$1\"
|
||||||
rp=""
|
|
||||||
|
|
||||||
# Multiple consecutive spaces in the command line can
|
|
||||||
# result in blank arguments
|
|
||||||
if [ -z "$1" ]; then
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
|
||||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
|
||||||
eval "\
|
|
||||||
case \"\$1\" in
|
|
||||||
$SPACK_COMPILER_FLAGS_KEEP)
|
|
||||||
append return_other_args_list \"\$1\"
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
"
|
|
||||||
fi
|
|
||||||
# the replace list is a space-separated list of pipe-separated pairs,
|
|
||||||
# the first in each pair is the original prefix to be matched, the
|
|
||||||
# second is the replacement prefix
|
|
||||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
|
||||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
|
||||||
before=${rep%|*}
|
|
||||||
after=${rep#*|}
|
|
||||||
eval "\
|
|
||||||
stripped=\"\${1##$before}\"
|
|
||||||
"
|
|
||||||
if [ "$stripped" = "$1" ] ; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
replaced="$after$stripped"
|
|
||||||
|
|
||||||
# it matched, remove it
|
|
||||||
shift
|
shift
|
||||||
|
continue
|
||||||
if [ -z "$replaced" ] ; then
|
|
||||||
# completely removed, continue OUTER loop
|
|
||||||
continue 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# re-build argument list with replacement
|
|
||||||
set -- "$replaced" "$@"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "$1" in
|
|
||||||
-isystem*)
|
|
||||||
arg="${1#-isystem}"
|
|
||||||
return_isystem_was_used=true
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append return_isystem_system_include_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_isystem_include_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-I*)
|
|
||||||
arg="${1#-I}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append return_system_include_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_include_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-L*)
|
|
||||||
arg="${1#-L}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append return_system_lib_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_lib_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-l*)
|
|
||||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
|
||||||
# and passed by ifx to the linker, which confuses it with a
|
|
||||||
# library. Filter it out.
|
|
||||||
# TODO: generalize filtering of args with an env var, so that
|
|
||||||
# TODO: we do not have to special case this here.
|
|
||||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
|
||||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
arg="${1#-l}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
append return_other_args_list "-l$arg"
|
|
||||||
;;
|
|
||||||
-Wl,*)
|
|
||||||
IFS=,
|
|
||||||
if ! parse_Wl ${1#-Wl,}; then
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
fi
|
|
||||||
unset IFS
|
|
||||||
;;
|
|
||||||
-Xlinker)
|
|
||||||
shift
|
|
||||||
if [ $# -eq 0 ]; then
|
|
||||||
# -Xlinker without value: let the compiler error about it.
|
|
||||||
append return_other_args_list -Xlinker
|
|
||||||
xlinker_expect_rpath=no
|
|
||||||
break
|
|
||||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
|
||||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
|
||||||
if system_dir "$1"; then
|
|
||||||
append return_system_rpath_dirs_list "$1"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$1"
|
|
||||||
fi
|
|
||||||
xlinker_expect_rpath=no
|
|
||||||
else
|
|
||||||
case "$1" in
|
|
||||||
-rpath=*)
|
|
||||||
arg="${1#-rpath=}"
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
--rpath=*)
|
|
||||||
arg="${1#--rpath=}"
|
|
||||||
if system_dir "$arg"; then
|
|
||||||
append return_system_rpath_dirs_list "$arg"
|
|
||||||
else
|
|
||||||
append return_rpath_dirs_list "$arg"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
-rpath|--rpath)
|
|
||||||
xlinker_expect_rpath=yes
|
|
||||||
;;
|
|
||||||
"$dtags_to_strip")
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append return_other_args_list -Xlinker
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
"$dtags_to_strip")
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
shift
|
"
|
||||||
done
|
fi
|
||||||
|
# the replace list is a space-separated list of pipe-separated pairs,
|
||||||
|
# the first in each pair is the original prefix to be matched, the
|
||||||
|
# second is the replacement prefix
|
||||||
|
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||||
|
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||||
|
before=${rep%|*}
|
||||||
|
after=${rep#*|}
|
||||||
|
eval "\
|
||||||
|
stripped=\"\${1##$before}\"
|
||||||
|
"
|
||||||
|
if [ "$stripped" = "$1" ] ; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
replaced="$after$stripped"
|
||||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
|
||||||
# parsing.
|
# it matched, remove it
|
||||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
shift
|
||||||
append return_other_args_list -Xlinker
|
|
||||||
append return_other_args_list -rpath
|
if [ -z "$replaced" ] ; then
|
||||||
|
# completely removed, continue OUTER loop
|
||||||
|
continue 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# re-build argument list with replacement
|
||||||
|
set -- "$replaced" "$@"
|
||||||
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Same, but for -Wl flags.
|
case "$1" in
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
-isystem*)
|
||||||
append return_other_args_list -Wl,-rpath
|
arg="${1#-isystem}"
|
||||||
fi
|
isystem_was_used=true
|
||||||
}
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append isystem_system_include_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append isystem_include_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-I*)
|
||||||
|
arg="${1#-I}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append system_include_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append include_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-L*)
|
||||||
|
arg="${1#-L}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append system_lib_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append lib_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-l*)
|
||||||
|
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||||
|
# and passed by ifx to the linker, which confuses it with a
|
||||||
|
# library. Filter it out.
|
||||||
|
# TODO: generalize filtering of args with an env var, so that
|
||||||
|
# TODO: we do not have to special case this here.
|
||||||
|
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||||
|
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
arg="${1#-l}"
|
||||||
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
|
append other_args_list "-l$arg"
|
||||||
|
;;
|
||||||
|
-Wl,*)
|
||||||
|
IFS=,
|
||||||
|
if ! parse_Wl ${1#-Wl,}; then
|
||||||
|
append other_args_list "$1"
|
||||||
|
fi
|
||||||
|
unset IFS
|
||||||
|
;;
|
||||||
|
-Xlinker)
|
||||||
|
shift
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
# -Xlinker without value: let the compiler error about it.
|
||||||
|
append other_args_list -Xlinker
|
||||||
|
xlinker_expect_rpath=no
|
||||||
|
break
|
||||||
|
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
|
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||||
|
if system_dir "$1"; then
|
||||||
|
append system_rpath_dirs_list "$1"
|
||||||
|
else
|
||||||
|
append rpath_dirs_list "$1"
|
||||||
|
fi
|
||||||
|
xlinker_expect_rpath=no
|
||||||
|
else
|
||||||
|
case "$1" in
|
||||||
|
-rpath=*)
|
||||||
|
arg="${1#-rpath=}"
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append system_rpath_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append rpath_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
--rpath=*)
|
||||||
|
arg="${1#--rpath=}"
|
||||||
|
if system_dir "$arg"; then
|
||||||
|
append system_rpath_dirs_list "$arg"
|
||||||
|
else
|
||||||
|
append rpath_dirs_list "$arg"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
-rpath|--rpath)
|
||||||
|
xlinker_expect_rpath=yes
|
||||||
|
;;
|
||||||
|
"$dtags_to_strip")
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
append other_args_list -Xlinker
|
||||||
|
append other_args_list "$1"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
"$dtags_to_strip")
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
append other_args_list "$1"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
categorize_arguments "$@"
|
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||||
include_dirs_list="$return_include_dirs_list"
|
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||||
lib_dirs_list="$return_lib_dirs_list"
|
# parsing.
|
||||||
rpath_dirs_list="$return_rpath_dirs_list"
|
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
system_include_dirs_list="$return_system_include_dirs_list"
|
append other_args_list -Xlinker
|
||||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
append other_args_list -rpath
|
||||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
fi
|
||||||
isystem_was_used="$return_isystem_was_used"
|
|
||||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
# Same, but for -Wl flags.
|
||||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
other_args_list="$return_other_args_list"
|
append other_args_list -Wl,-rpath
|
||||||
|
fi
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||||
@@ -690,14 +673,12 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
|||||||
extend flags_list SPACK_DEBUG_FLAGS
|
extend flags_list SPACK_DEBUG_FLAGS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
spack_flags_list=""
|
|
||||||
|
|
||||||
# Fortran flags come before CPPFLAGS
|
# Fortran flags come before CPPFLAGS
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cc|ccld)
|
cc|ccld)
|
||||||
case $lang_flags in
|
case $lang_flags in
|
||||||
F)
|
F)
|
||||||
extend spack_flags_list SPACK_FFLAGS
|
extend flags_list SPACK_FFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
;;
|
;;
|
||||||
@@ -706,7 +687,7 @@ esac
|
|||||||
# C preprocessor flags come before any C/CXX flags
|
# C preprocessor flags come before any C/CXX flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cpp|as|cc|ccld)
|
cpp|as|cc|ccld)
|
||||||
extend spack_flags_list SPACK_CPPFLAGS
|
extend flags_list SPACK_CPPFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -716,10 +697,10 @@ case "$mode" in
|
|||||||
cc|ccld)
|
cc|ccld)
|
||||||
case $lang_flags in
|
case $lang_flags in
|
||||||
C)
|
C)
|
||||||
extend spack_flags_list SPACK_CFLAGS
|
extend flags_list SPACK_CFLAGS
|
||||||
;;
|
;;
|
||||||
CXX)
|
CXX)
|
||||||
extend spack_flags_list SPACK_CXXFLAGS
|
extend flags_list SPACK_CXXFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@@ -731,25 +712,10 @@ esac
|
|||||||
# Linker flags
|
# Linker flags
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ld|ccld)
|
||||||
extend spack_flags_list SPACK_LDFLAGS
|
extend flags_list SPACK_LDFLAGS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
IFS="$lsep"
|
|
||||||
categorize_arguments $spack_flags_list
|
|
||||||
unset IFS
|
|
||||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
|
||||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
|
||||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
|
||||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
|
||||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
|
||||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
|
||||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
|
||||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
|
||||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
|
||||||
spack_flags_other_args_list="$return_other_args_list"
|
|
||||||
|
|
||||||
|
|
||||||
# On macOS insert headerpad_max_install_names linker flag
|
# On macOS insert headerpad_max_install_names linker flag
|
||||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||||
@@ -775,8 +741,6 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
|||||||
extend lib_dirs_list SPACK_LINK_DIRS
|
extend lib_dirs_list SPACK_LINK_DIRS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
libs_list=""
|
|
||||||
|
|
||||||
# add RPATHs if we're in in any linking mode
|
# add RPATHs if we're in in any linking mode
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ld|ccld)
|
ld|ccld)
|
||||||
@@ -805,16 +769,12 @@ args_list="$flags_list"
|
|||||||
|
|
||||||
# Insert include directories just prior to any system include directories
|
# Insert include directories just prior to any system include directories
|
||||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||||
extend args_list spack_flags_include_dirs_list "-I"
|
|
||||||
extend args_list include_dirs_list "-I"
|
extend args_list include_dirs_list "-I"
|
||||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
|
||||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
cpp|cc|as|ccld)
|
cpp|cc|as|ccld)
|
||||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
if [ "$isystem_was_used" = "true" ]; then
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
|
||||||
elif [ "$isystem_was_used" = "true" ]; then
|
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||||
else
|
else
|
||||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||||
@@ -822,15 +782,11 @@ case "$mode" in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
extend args_list spack_flags_system_include_dirs_list -I
|
|
||||||
extend args_list system_include_dirs_list -I
|
extend args_list system_include_dirs_list -I
|
||||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
|
||||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
# Library search paths
|
# Library search paths
|
||||||
extend args_list spack_flags_lib_dirs_list "-L"
|
|
||||||
extend args_list lib_dirs_list "-L"
|
extend args_list lib_dirs_list "-L"
|
||||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
|
||||||
extend args_list system_lib_dirs_list "-L"
|
extend args_list system_lib_dirs_list "-L"
|
||||||
|
|
||||||
# RPATHs arguments
|
# RPATHs arguments
|
||||||
@@ -839,25 +795,20 @@ case "$mode" in
|
|||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$linker_arg$dtags_to_add"
|
append args_list "$linker_arg$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
|
||||||
extend args_list rpath_dirs_list "$rpath"
|
extend args_list rpath_dirs_list "$rpath"
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
|
||||||
extend args_list system_rpath_dirs_list "$rpath"
|
extend args_list system_rpath_dirs_list "$rpath"
|
||||||
;;
|
;;
|
||||||
ld)
|
ld)
|
||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$dtags_to_add"
|
append args_list "$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
|
||||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
|
||||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Other arguments from the input command
|
# Other arguments from the input command
|
||||||
extend args_list other_args_list
|
extend args_list other_args_list
|
||||||
extend args_list spack_flags_other_args_list
|
|
||||||
|
|
||||||
# Inject SPACK_LDLIBS, if supplied
|
# Inject SPACK_LDLIBS, if supplied
|
||||||
extend args_list libs_list "-l"
|
extend args_list libs_list "-l"
|
||||||
@@ -913,4 +864,3 @@ fi
|
|||||||
# Execute the full command, preserving spaces with IFS set
|
# Execute the full command, preserving spaces with IFS set
|
||||||
# to the alarm bell separator.
|
# to the alarm bell separator.
|
||||||
IFS="$lsep"; exec $full_command_list
|
IFS="$lsep"; exec $full_command_list
|
||||||
|
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
|||||||
"""Init file to avoid namespace packages"""
|
"""Init file to avoid namespace packages"""
|
||||||
__version__ = "0.2.1"
|
__version__ = "0.2.0"
|
||||||
|
|||||||
@@ -79,18 +79,14 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
|||||||
self.features = features
|
self.features = features
|
||||||
self.compilers = compilers
|
self.compilers = compilers
|
||||||
self.generation = generation
|
self.generation = generation
|
||||||
# Cache the ancestor computation
|
|
||||||
self._ancestors = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ancestors(self):
|
def ancestors(self):
|
||||||
"""All the ancestors of this microarchitecture."""
|
"""All the ancestors of this microarchitecture."""
|
||||||
if self._ancestors is None:
|
value = self.parents[:]
|
||||||
value = self.parents[:]
|
for parent in self.parents:
|
||||||
for parent in self.parents:
|
value.extend(a for a in parent.ancestors if a not in value)
|
||||||
value.extend(a for a in parent.ancestors if a not in value)
|
return value
|
||||||
self._ancestors = value
|
|
||||||
return self._ancestors
|
|
||||||
|
|
||||||
def _to_set(self):
|
def _to_set(self):
|
||||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||||
|
|||||||
@@ -145,13 +145,6 @@
|
|||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"intel": [
|
|
||||||
{
|
|
||||||
"versions": "16.0:",
|
|
||||||
"name": "corei7",
|
|
||||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"oneapi": [
|
"oneapi": [
|
||||||
{
|
{
|
||||||
"versions": "2021.2.0:",
|
"versions": "2021.2.0:",
|
||||||
@@ -224,13 +217,6 @@
|
|||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"intel": [
|
|
||||||
{
|
|
||||||
"versions": "16.0:",
|
|
||||||
"name": "core-avx2",
|
|
||||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"oneapi": [
|
"oneapi": [
|
||||||
{
|
{
|
||||||
"versions": "2021.2.0:",
|
"versions": "2021.2.0:",
|
||||||
@@ -314,13 +300,6 @@
|
|||||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"intel": [
|
|
||||||
{
|
|
||||||
"versions": "16.0:",
|
|
||||||
"name": "skylake-avx512",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"oneapi": [
|
"oneapi": [
|
||||||
{
|
{
|
||||||
"versions": "2021.2.0:",
|
"versions": "2021.2.0:",
|
||||||
@@ -1433,92 +1412,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sapphirerapids": {
|
|
||||||
"from": [
|
|
||||||
"icelake"
|
|
||||||
],
|
|
||||||
"vendor": "GenuineIntel",
|
|
||||||
"features": [
|
|
||||||
"mmx",
|
|
||||||
"sse",
|
|
||||||
"sse2",
|
|
||||||
"ssse3",
|
|
||||||
"sse4_1",
|
|
||||||
"sse4_2",
|
|
||||||
"popcnt",
|
|
||||||
"aes",
|
|
||||||
"pclmulqdq",
|
|
||||||
"avx",
|
|
||||||
"rdrand",
|
|
||||||
"f16c",
|
|
||||||
"movbe",
|
|
||||||
"fma",
|
|
||||||
"avx2",
|
|
||||||
"bmi1",
|
|
||||||
"bmi2",
|
|
||||||
"rdseed",
|
|
||||||
"adx",
|
|
||||||
"clflushopt",
|
|
||||||
"xsavec",
|
|
||||||
"xsaveopt",
|
|
||||||
"avx512f",
|
|
||||||
"avx512vl",
|
|
||||||
"avx512bw",
|
|
||||||
"avx512dq",
|
|
||||||
"avx512cd",
|
|
||||||
"avx512vbmi",
|
|
||||||
"avx512ifma",
|
|
||||||
"sha_ni",
|
|
||||||
"clwb",
|
|
||||||
"rdpid",
|
|
||||||
"gfni",
|
|
||||||
"avx512_vbmi2",
|
|
||||||
"avx512_vpopcntdq",
|
|
||||||
"avx512_bitalg",
|
|
||||||
"avx512_vnni",
|
|
||||||
"vpclmulqdq",
|
|
||||||
"vaes",
|
|
||||||
"avx512_bf16",
|
|
||||||
"cldemote",
|
|
||||||
"movdir64b",
|
|
||||||
"movdiri",
|
|
||||||
"pdcm",
|
|
||||||
"serialize",
|
|
||||||
"waitpkg"
|
|
||||||
],
|
|
||||||
"compilers": {
|
|
||||||
"gcc": [
|
|
||||||
{
|
|
||||||
"versions": "11.0:",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"clang": [
|
|
||||||
{
|
|
||||||
"versions": "12.0:",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"intel": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2:",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"oneapi": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2:",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"dpcpp": [
|
|
||||||
{
|
|
||||||
"versions": "2021.2:",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"k10": {
|
"k10": {
|
||||||
"from": ["x86_64"],
|
"from": ["x86_64"],
|
||||||
"vendor": "AuthenticAMD",
|
"vendor": "AuthenticAMD",
|
||||||
@@ -2172,6 +2065,8 @@
|
|||||||
"pku",
|
"pku",
|
||||||
"gfni",
|
"gfni",
|
||||||
"flush_l1d",
|
"flush_l1d",
|
||||||
|
"erms",
|
||||||
|
"avic",
|
||||||
"avx512f",
|
"avx512f",
|
||||||
"avx512dq",
|
"avx512dq",
|
||||||
"avx512ifma",
|
"avx512ifma",
|
||||||
@@ -2188,12 +2083,12 @@
|
|||||||
"compilers": {
|
"compilers": {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
{
|
{
|
||||||
"versions": "10.3:12.2",
|
"versions": "10.3:13.0",
|
||||||
"name": "znver3",
|
"name": "znver3",
|
||||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12.3:",
|
"versions": "13.1:",
|
||||||
"name": "znver4",
|
"name": "znver4",
|
||||||
"flags": "-march={name} -mtune={name}"
|
"flags": "-march={name} -mtune={name}"
|
||||||
}
|
}
|
||||||
|
|||||||
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,6 +65,9 @@
|
|||||||
up to date with CTest, just make sure the ``*_matches`` and
|
up to date with CTest, just make sure the ``*_matches`` and
|
||||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import math
|
import math
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
@@ -208,7 +211,7 @@
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class LogEvent:
|
class LogEvent(object):
|
||||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||||
def __init__(self, text, line_no,
|
def __init__(self, text, line_no,
|
||||||
source_file=None, source_line_no=None,
|
source_file=None, source_line_no=None,
|
||||||
@@ -345,7 +348,7 @@ def _parse_unpack(args):
|
|||||||
return _parse(*args)
|
return _parse(*args)
|
||||||
|
|
||||||
|
|
||||||
class CTestLogParser:
|
class CTestLogParser(object):
|
||||||
"""Log file parser that extracts errors and warnings."""
|
"""Log file parser that extracts errors and warnings."""
|
||||||
def __init__(self, profile=False):
|
def __init__(self, profile=False):
|
||||||
# whether to record timing information
|
# whether to record timing information
|
||||||
|
|||||||
@@ -1,105 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""Path primitives that just require Python standard library."""
|
|
||||||
import functools
|
|
||||||
import sys
|
|
||||||
from typing import List, Optional
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
|
|
||||||
class Path:
|
|
||||||
"""Enum to identify the path-style."""
|
|
||||||
|
|
||||||
unix: int = 0
|
|
||||||
windows: int = 1
|
|
||||||
platform_path: int = windows if sys.platform == "win32" else unix
|
|
||||||
|
|
||||||
|
|
||||||
def format_os_path(path: str, mode: int = Path.unix) -> str:
|
|
||||||
"""Formats the input path to use consistent, platform specific separators.
|
|
||||||
|
|
||||||
Absolute paths are converted between drive letters and a prepended '/' as per platform
|
|
||||||
requirement.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
path: the path to be normalized, must be a string or expose the replace method.
|
|
||||||
mode: the path file separator style to normalize the passed path to.
|
|
||||||
Default is unix style, i.e. '/'
|
|
||||||
"""
|
|
||||||
if not path:
|
|
||||||
return path
|
|
||||||
if mode == Path.windows:
|
|
||||||
path = path.replace("/", "\\")
|
|
||||||
else:
|
|
||||||
path = path.replace("\\", "/")
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_posix_path(path: str) -> str:
|
|
||||||
"""Converts the input path to POSIX style."""
|
|
||||||
return format_os_path(path, mode=Path.unix)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_windows_path(path: str) -> str:
|
|
||||||
"""Converts the input path to Windows style."""
|
|
||||||
return format_os_path(path, mode=Path.windows)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_platform_path(path: str) -> str:
|
|
||||||
"""Converts the input path to the current platform's native style."""
|
|
||||||
return format_os_path(path, mode=Path.platform_path)
|
|
||||||
|
|
||||||
|
|
||||||
def path_to_os_path(*parameters: str) -> List[str]:
|
|
||||||
"""Takes an arbitrary number of positional parameters, converts each argument of type
|
|
||||||
string to use a normalized filepath separator, and returns a list of all values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _is_url(path_or_url: str) -> bool:
|
|
||||||
if "\\" in path_or_url:
|
|
||||||
return False
|
|
||||||
url_tuple = urlparse(path_or_url)
|
|
||||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for item in parameters:
|
|
||||||
if isinstance(item, str) and not _is_url(item):
|
|
||||||
item = convert_to_platform_path(item)
|
|
||||||
result.append(item)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
|
||||||
"""Filters function arguments to account for platform path separators.
|
|
||||||
Optional slicing range can be specified to select specific arguments
|
|
||||||
|
|
||||||
This decorator takes all (or a slice) of a method's positional arguments
|
|
||||||
and normalizes usage of filepath separators on a per platform basis.
|
|
||||||
|
|
||||||
Note: `**kwargs`, urls, and any type that is not a string are ignored
|
|
||||||
so in such cases where path normalization is required, that should be
|
|
||||||
handled by calling path_to_os_path directly as needed.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
arg_slice: a slice object specifying the slice of arguments
|
|
||||||
in the decorated method over which filepath separators are
|
|
||||||
normalized
|
|
||||||
"""
|
|
||||||
|
|
||||||
def holder_func(func):
|
|
||||||
@functools.wraps(func)
|
|
||||||
def path_filter_caller(*args, **kwargs):
|
|
||||||
args = list(args)
|
|
||||||
if arg_slice:
|
|
||||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
|
||||||
else:
|
|
||||||
args = path_to_os_path(*args)
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
|
|
||||||
return path_filter_caller
|
|
||||||
|
|
||||||
if _func:
|
|
||||||
return holder_func(_func)
|
|
||||||
return holder_func
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""String manipulation functions that do not have other dependencies than Python
|
|
||||||
standard library
|
|
||||||
"""
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
|
|
||||||
def comma_list(sequence: List[str], article: str = "") -> str:
|
|
||||||
if type(sequence) is not list:
|
|
||||||
sequence = list(sequence)
|
|
||||||
|
|
||||||
if not sequence:
|
|
||||||
return ""
|
|
||||||
if len(sequence) == 1:
|
|
||||||
return sequence[0]
|
|
||||||
|
|
||||||
out = ", ".join(str(s) for s in sequence[:-1])
|
|
||||||
if len(sequence) != 2:
|
|
||||||
out += "," # oxford comma
|
|
||||||
out += " "
|
|
||||||
if article:
|
|
||||||
out += article + " "
|
|
||||||
out += str(sequence[-1])
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def comma_or(sequence: List[str]) -> str:
|
|
||||||
"""Return a string with all the elements of the input joined by comma, but the last
|
|
||||||
one (which is joined by 'or').
|
|
||||||
"""
|
|
||||||
return comma_list(sequence, "or")
|
|
||||||
|
|
||||||
|
|
||||||
def comma_and(sequence: List[str]) -> str:
|
|
||||||
"""Return a string with all the elements of the input joined by comma, but the last
|
|
||||||
one (which is joined by 'and').
|
|
||||||
"""
|
|
||||||
return comma_list(sequence, "and")
|
|
||||||
|
|
||||||
|
|
||||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
|
||||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
|
||||||
return [f"{q}{e}{q}" for e in sequence]
|
|
||||||
|
|
||||||
|
|
||||||
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
|
|
||||||
"""Pluralize <singular> word by adding an s if n != 1.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
n: number of things there are
|
|
||||||
singular: singular form of word
|
|
||||||
plural: optional plural form, for when it's not just singular + 's'
|
|
||||||
show_n: whether to include n in the result string (default True)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
"1 thing" if n == 1 or "n things" if n != 1
|
|
||||||
"""
|
|
||||||
number = f"{n} " if show_n else ""
|
|
||||||
if n == 1:
|
|
||||||
return f"{number}{singular}"
|
|
||||||
elif plural is not None:
|
|
||||||
return f"{number}{plural}"
|
|
||||||
else:
|
|
||||||
return f"{number}{singular}s"
|
|
||||||
@@ -1,459 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""URL primitives that just require Python standard library."""
|
|
||||||
import itertools
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
from typing import Optional, Set, Tuple
|
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
|
||||||
|
|
||||||
# Archive extensions allowed in Spack
|
|
||||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
|
||||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
|
||||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
|
||||||
|
|
||||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
|
||||||
ALLOWED_ARCHIVE_TYPES = (
|
|
||||||
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
|
|
||||||
+ PREFIX_EXTENSIONS
|
|
||||||
+ EXTENSIONS
|
|
||||||
+ NO_TAR_EXTENSIONS
|
|
||||||
)
|
|
||||||
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
|
|
||||||
|
|
||||||
|
|
||||||
def find_list_urls(url: str) -> Set[str]:
|
|
||||||
r"""Find good list URLs for the supplied URL.
|
|
||||||
|
|
||||||
By default, returns the dirname of the archive path.
|
|
||||||
|
|
||||||
Provides special treatment for the following websites, which have a
|
|
||||||
unique list URL different from the dirname of the download URL:
|
|
||||||
|
|
||||||
========= =======================================================
|
|
||||||
GitHub https://github.com/<repo>/<name>/releases
|
|
||||||
GitLab https://gitlab.\*/<repo>/<name>/tags
|
|
||||||
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
|
|
||||||
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
|
|
||||||
PyPI https://pypi.org/simple/<name>/
|
|
||||||
LuaRocks https://luarocks.org/modules/<repo>/<name>
|
|
||||||
========= =======================================================
|
|
||||||
|
|
||||||
Note: this function is called by `spack versions`, `spack checksum`,
|
|
||||||
and `spack create`, but not by `spack fetch` or `spack install`.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
url (str): The download URL for the package
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
set: One or more list URLs for the package
|
|
||||||
"""
|
|
||||||
|
|
||||||
url_types = [
|
|
||||||
# GitHub
|
|
||||||
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
|
|
||||||
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
|
|
||||||
# GitLab API endpoint
|
|
||||||
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
|
|
||||||
(
|
|
||||||
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
|
|
||||||
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
|
|
||||||
),
|
|
||||||
# GitLab non-API endpoint
|
|
||||||
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
|
|
||||||
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
|
|
||||||
# BitBucket
|
|
||||||
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
|
|
||||||
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
|
|
||||||
# CRAN
|
|
||||||
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
|
|
||||||
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
|
|
||||||
(
|
|
||||||
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
|
|
||||||
lambda m: m.group(1) + "/Archive/" + m.group(2),
|
|
||||||
),
|
|
||||||
# PyPI
|
|
||||||
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
|
||||||
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
|
||||||
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
|
|
||||||
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
|
|
||||||
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
|
|
||||||
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
|
|
||||||
(
|
|
||||||
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
|
|
||||||
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
|
|
||||||
),
|
|
||||||
# LuaRocks
|
|
||||||
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
|
|
||||||
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
|
|
||||||
(
|
|
||||||
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
|
|
||||||
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
|
|
||||||
lambda m: "https://luarocks.org/modules/"
|
|
||||||
+ m.group("org")
|
|
||||||
+ "/"
|
|
||||||
+ m.group("name")
|
|
||||||
+ "/",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
list_urls = {os.path.dirname(url)}
|
|
||||||
|
|
||||||
for pattern, fun in url_types:
|
|
||||||
match = re.search(pattern, url)
|
|
||||||
if match:
|
|
||||||
list_urls.add(fun(match))
|
|
||||||
|
|
||||||
return list_urls
|
|
||||||
|
|
||||||
|
|
||||||
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
|
|
||||||
"""Strips query and fragment from a url, then returns the base url and the suffix.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: URL to be stripped
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: when there is any error parsing the URL
|
|
||||||
"""
|
|
||||||
components = urlsplit(url)
|
|
||||||
stripped = components[:3] + (None, None)
|
|
||||||
|
|
||||||
query, frag = components[3:5]
|
|
||||||
suffix = ""
|
|
||||||
if query:
|
|
||||||
suffix += "?" + query
|
|
||||||
if frag:
|
|
||||||
suffix += "#" + frag
|
|
||||||
|
|
||||||
return urlunsplit(stripped), suffix
|
|
||||||
|
|
||||||
|
|
||||||
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
|
|
||||||
|
|
||||||
|
|
||||||
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
|
|
||||||
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
|
|
||||||
returns the input URL and an empty string.
|
|
||||||
"""
|
|
||||||
match = SOURCEFORGE_RE.search(url)
|
|
||||||
if match is not None:
|
|
||||||
return match.groups()
|
|
||||||
return url, ""
|
|
||||||
|
|
||||||
|
|
||||||
def has_extension(path_or_url: str, ext: str) -> bool:
|
|
||||||
"""Returns true if the extension in input is present in path, false otherwise."""
|
|
||||||
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
|
|
||||||
if not ext.startswith(r"\."):
|
|
||||||
ext = rf"\.{ext}$"
|
|
||||||
|
|
||||||
if re.search(ext, prefix):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
|
|
||||||
"""Tries to match an allowed archive extension to the input. Returns the first match,
|
|
||||||
or None if no match was found.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: if the input is None
|
|
||||||
"""
|
|
||||||
if path_or_url is None:
|
|
||||||
raise ValueError("Can't call extension() on None")
|
|
||||||
|
|
||||||
for t in ALLOWED_ARCHIVE_TYPES:
|
|
||||||
if has_extension(path_or_url, t):
|
|
||||||
return t
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def remove_extension(path_or_url: str, *, extension: str) -> str:
|
|
||||||
"""Returns the input with the extension removed"""
|
|
||||||
suffix = rf"\.{extension}$"
|
|
||||||
return re.sub(suffix, "", path_or_url)
|
|
||||||
|
|
||||||
|
|
||||||
def check_and_remove_ext(path: str, *, extension: str) -> str:
|
|
||||||
"""Returns the input path with the extension removed, if the extension is present in path.
|
|
||||||
Otherwise, returns the input unchanged.
|
|
||||||
"""
|
|
||||||
if not has_extension(path, extension):
|
|
||||||
return path
|
|
||||||
path, _ = split_url_on_sourceforge_suffix(path)
|
|
||||||
return remove_extension(path, extension=extension)
|
|
||||||
|
|
||||||
|
|
||||||
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
|
|
||||||
"""If a path contains the extension in input, returns the path stripped of the extension.
|
|
||||||
Otherwise, returns the input path.
|
|
||||||
|
|
||||||
If extension is None, attempts to strip any allowed extension from path.
|
|
||||||
"""
|
|
||||||
if extension is None:
|
|
||||||
for t in ALLOWED_ARCHIVE_TYPES:
|
|
||||||
if has_extension(path_or_url, ext=t):
|
|
||||||
extension = t
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
return path_or_url
|
|
||||||
|
|
||||||
return check_and_remove_ext(path_or_url, extension=extension)
|
|
||||||
|
|
||||||
|
|
||||||
def split_url_extension(url: str) -> Tuple[str, ...]:
|
|
||||||
"""Some URLs have a query string, e.g.:
|
|
||||||
|
|
||||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
|
||||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
|
||||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
|
||||||
|
|
||||||
In (1), the query string needs to be stripped to get at the
|
|
||||||
extension, but in (2) & (3), the filename is IN a single final query
|
|
||||||
argument.
|
|
||||||
|
|
||||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
|
||||||
The suffix contains anything that was stripped off the URL to
|
|
||||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
|
||||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
|
||||||
after the file extension, e.g.:
|
|
||||||
|
|
||||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
|
||||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
|
||||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
|
||||||
"""
|
|
||||||
# Strip off sourceforge download suffix.
|
|
||||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
|
||||||
prefix, suffix = split_url_on_sourceforge_suffix(url)
|
|
||||||
|
|
||||||
ext = extension_from_path(prefix)
|
|
||||||
if ext is not None:
|
|
||||||
prefix = strip_extension(prefix)
|
|
||||||
return prefix, ext, suffix
|
|
||||||
|
|
||||||
try:
|
|
||||||
prefix, suf = strip_query_and_fragment(prefix)
|
|
||||||
except ValueError:
|
|
||||||
# FIXME: tty.debug("Got error parsing path %s" % path)
|
|
||||||
# Ignore URL parse errors here
|
|
||||||
return url, ""
|
|
||||||
|
|
||||||
ext = extension_from_path(prefix)
|
|
||||||
prefix = strip_extension(prefix)
|
|
||||||
suffix = suf + suffix
|
|
||||||
if ext is None:
|
|
||||||
ext = ""
|
|
||||||
|
|
||||||
return prefix, ext, suffix
|
|
||||||
|
|
||||||
|
|
||||||
def strip_version_suffixes(path_or_url: str) -> str:
|
|
||||||
"""Some tarballs contain extraneous information after the version:
|
|
||||||
|
|
||||||
* ``bowtie2-2.2.5-source``
|
|
||||||
* ``libevent-2.0.21-stable``
|
|
||||||
* ``cuda_8.0.44_linux.run``
|
|
||||||
|
|
||||||
These strings are not part of the version number and should be ignored.
|
|
||||||
This function strips those suffixes off and returns the remaining string.
|
|
||||||
The goal is that the version is always the last thing in ``path``:
|
|
||||||
|
|
||||||
* ``bowtie2-2.2.5``
|
|
||||||
* ``libevent-2.0.21``
|
|
||||||
* ``cuda_8.0.44``
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path_or_url: The filename or URL for the package
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The ``path`` with any extraneous suffixes removed
|
|
||||||
"""
|
|
||||||
# NOTE: This could be done with complicated regexes in parse_version_offset
|
|
||||||
# NOTE: The problem is that we would have to add these regexes to the end
|
|
||||||
# NOTE: of every single version regex. Easier to just strip them off
|
|
||||||
# NOTE: permanently
|
|
||||||
|
|
||||||
suffix_regexes = [
|
|
||||||
# Download type
|
|
||||||
r"[Ii]nstall",
|
|
||||||
r"all",
|
|
||||||
r"code",
|
|
||||||
r"[Ss]ources?",
|
|
||||||
r"file",
|
|
||||||
r"full",
|
|
||||||
r"single",
|
|
||||||
r"with[a-zA-Z_-]+",
|
|
||||||
r"rock",
|
|
||||||
r"src(_0)?",
|
|
||||||
r"public",
|
|
||||||
r"bin",
|
|
||||||
r"binary",
|
|
||||||
r"run",
|
|
||||||
r"[Uu]niversal",
|
|
||||||
r"jar",
|
|
||||||
r"complete",
|
|
||||||
r"dynamic",
|
|
||||||
r"oss",
|
|
||||||
r"gem",
|
|
||||||
r"tar",
|
|
||||||
r"sh",
|
|
||||||
# Download version
|
|
||||||
r"release",
|
|
||||||
r"bin",
|
|
||||||
r"stable",
|
|
||||||
r"[Ff]inal",
|
|
||||||
r"rel",
|
|
||||||
r"orig",
|
|
||||||
r"dist",
|
|
||||||
r"\+",
|
|
||||||
# License
|
|
||||||
r"gpl",
|
|
||||||
# Arch
|
|
||||||
# Needs to come before and after OS, appears in both orders
|
|
||||||
r"ia32",
|
|
||||||
r"intel",
|
|
||||||
r"amd64",
|
|
||||||
r"linux64",
|
|
||||||
r"x64",
|
|
||||||
r"64bit",
|
|
||||||
r"x86[_-]64",
|
|
||||||
r"i586_64",
|
|
||||||
r"x86",
|
|
||||||
r"i[36]86",
|
|
||||||
r"ppc64(le)?",
|
|
||||||
r"armv?(7l|6l|64)",
|
|
||||||
# Other
|
|
||||||
r"cpp",
|
|
||||||
r"gtk",
|
|
||||||
r"incubating",
|
|
||||||
# OS
|
|
||||||
r"[Ll]inux(_64)?",
|
|
||||||
r"LINUX",
|
|
||||||
r"[Uu]ni?x",
|
|
||||||
r"[Ss]un[Oo][Ss]",
|
|
||||||
r"[Mm]ac[Oo][Ss][Xx]?",
|
|
||||||
r"[Oo][Ss][Xx]",
|
|
||||||
r"[Dd]arwin(64)?",
|
|
||||||
r"[Aa]pple",
|
|
||||||
r"[Ww]indows",
|
|
||||||
r"[Ww]in(64|32)?",
|
|
||||||
r"[Cc]ygwin(64|32)?",
|
|
||||||
r"[Mm]ingw",
|
|
||||||
r"centos",
|
|
||||||
# Arch
|
|
||||||
# Needs to come before and after OS, appears in both orders
|
|
||||||
r"ia32",
|
|
||||||
r"intel",
|
|
||||||
r"amd64",
|
|
||||||
r"linux64",
|
|
||||||
r"x64",
|
|
||||||
r"64bit",
|
|
||||||
r"x86[_-]64",
|
|
||||||
r"i586_64",
|
|
||||||
r"x86",
|
|
||||||
r"i[36]86",
|
|
||||||
r"ppc64(le)?",
|
|
||||||
r"armv?(7l|6l|64)?",
|
|
||||||
# PyPI
|
|
||||||
r"[._-]py[23].*\.whl",
|
|
||||||
r"[._-]cp[23].*\.whl",
|
|
||||||
r"[._-]win.*\.exe",
|
|
||||||
]
|
|
||||||
|
|
||||||
for regex in suffix_regexes:
|
|
||||||
# Remove the suffix from the end of the path
|
|
||||||
# This may be done multiple times
|
|
||||||
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
|
|
||||||
|
|
||||||
return path_or_url
|
|
||||||
|
|
||||||
|
|
||||||
def expand_contracted_extension(extension: str) -> str:
|
|
||||||
"""Returns the expanded version of a known contracted extension.
|
|
||||||
|
|
||||||
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
|
|
||||||
return the input unmodified.
|
|
||||||
"""
|
|
||||||
extension = extension.strip(".")
|
|
||||||
return CONTRACTION_MAP.get(extension, extension)
|
|
||||||
|
|
||||||
|
|
||||||
def expand_contracted_extension_in_path(
|
|
||||||
path_or_url: str, *, extension: Optional[str] = None
|
|
||||||
) -> str:
|
|
||||||
"""Returns the input path or URL with any contraction extension expanded.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path_or_url: path or URL to be expanded
|
|
||||||
extension: if specified, only attempt to expand that extension
|
|
||||||
"""
|
|
||||||
extension = extension or extension_from_path(path_or_url)
|
|
||||||
if extension is None:
|
|
||||||
return path_or_url
|
|
||||||
|
|
||||||
expanded = expand_contracted_extension(extension)
|
|
||||||
if expanded != extension:
|
|
||||||
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
|
|
||||||
return path_or_url
|
|
||||||
|
|
||||||
|
|
||||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
|
||||||
"""Returns compression extension for a compressed archive"""
|
|
||||||
extension = expand_contracted_extension(extension)
|
|
||||||
for ext in [*EXTENSIONS]:
|
|
||||||
if ext in extension:
|
|
||||||
return ext
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
|
|
||||||
"""Strips the compression extension from the input, and returns it. For instance,
|
|
||||||
"foo.tgz" becomes "foo.tar".
|
|
||||||
|
|
||||||
If no extension is given, try a default list of extensions.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path_or_url: input to be stripped
|
|
||||||
ext: if given, extension to be stripped
|
|
||||||
"""
|
|
||||||
if not extension_from_path(path_or_url):
|
|
||||||
return path_or_url
|
|
||||||
|
|
||||||
expanded_path = expand_contracted_extension_in_path(path_or_url)
|
|
||||||
candidates = [ext] if ext is not None else EXTENSIONS
|
|
||||||
for current_extension in candidates:
|
|
||||||
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
|
|
||||||
if modified_path != expanded_path:
|
|
||||||
return modified_path
|
|
||||||
return expanded_path
|
|
||||||
|
|
||||||
|
|
||||||
def allowed_archive(path_or_url: str) -> bool:
|
|
||||||
"""Returns true if the input is a valid archive, False otherwise."""
|
|
||||||
return (
|
|
||||||
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def determine_url_file_extension(path: str) -> str:
|
|
||||||
"""This returns the type of archive a URL refers to. This is
|
|
||||||
sometimes confusing because of URLs like:
|
|
||||||
|
|
||||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
|
||||||
|
|
||||||
Where the URL doesn't actually contain the filename. We need
|
|
||||||
to know what type it is so that we can appropriately name files
|
|
||||||
in mirrors.
|
|
||||||
"""
|
|
||||||
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
|
|
||||||
if match:
|
|
||||||
if match.group(1) == "zip":
|
|
||||||
return "zip"
|
|
||||||
elif match.group(1) == "tar":
|
|
||||||
return "tar.gz"
|
|
||||||
|
|
||||||
prefix, ext, suffix = split_url_extension(path)
|
|
||||||
return ext
|
|
||||||
@@ -3,42 +3,33 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import abc
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import errno
|
||||||
import io
|
import io
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
|
||||||
from typing import IO, Any, Iterable, List, Optional, Sequence, Tuple, Union
|
|
||||||
|
|
||||||
|
|
||||||
class Command:
|
class Command(object):
|
||||||
"""Parsed representation of a command from argparse.
|
"""Parsed representation of a command from argparse.
|
||||||
|
|
||||||
This is a single command from an argparse parser. ``ArgparseWriter`` creates these and returns
|
This is a single command from an argparse parser. ``ArgparseWriter``
|
||||||
them from ``parse()``, and it passes one of these to each call to ``format()`` so that we can
|
creates these and returns them from ``parse()``, and it passes one of
|
||||||
take an action for a single command.
|
these to each call to ``format()`` so that we can take an action for
|
||||||
|
a single command.
|
||||||
|
|
||||||
|
Parts of a Command:
|
||||||
|
- prog: command name (str)
|
||||||
|
- description: command description (str)
|
||||||
|
- usage: command usage (str)
|
||||||
|
- positionals: list of positional arguments (list)
|
||||||
|
- optionals: list of optional arguments (list)
|
||||||
|
- subcommands: list of subcommand parsers (list)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
||||||
self,
|
|
||||||
prog: str,
|
|
||||||
description: Optional[str],
|
|
||||||
usage: str,
|
|
||||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
|
||||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
|
||||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
|
||||||
) -> None:
|
|
||||||
"""Initialize a new Command instance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
prog: Program name.
|
|
||||||
description: Command description.
|
|
||||||
usage: Command usage.
|
|
||||||
positionals: List of positional arguments.
|
|
||||||
optionals: List of optional arguments.
|
|
||||||
subcommands: List of subcommand parsers.
|
|
||||||
"""
|
|
||||||
self.prog = prog
|
self.prog = prog
|
||||||
self.description = description
|
self.description = description
|
||||||
self.usage = usage
|
self.usage = usage
|
||||||
@@ -47,34 +38,35 @@ def __init__(
|
|||||||
self.subcommands = subcommands
|
self.subcommands = subcommands
|
||||||
|
|
||||||
|
|
||||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access to self._expand_help(),
|
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access
|
||||||
# ArgparseWriter is not intended to be used as a formatter_class.
|
# to self._expand_help(), ArgparseWriter is not intended to be used as a
|
||||||
class ArgparseWriter(argparse.HelpFormatter, abc.ABC):
|
# formatter_class.
|
||||||
"""Analyze an argparse ArgumentParser for easy generation of help."""
|
class ArgparseWriter(argparse.HelpFormatter):
|
||||||
|
"""Analyzes an argparse ArgumentParser for easy generation of help."""
|
||||||
|
|
||||||
def __init__(self, prog: str, out: IO = sys.stdout, aliases: bool = False) -> None:
|
def __init__(self, prog, out=None, aliases=False):
|
||||||
"""Initialize a new ArgparseWriter instance.
|
"""Initializes a new ArgparseWriter instance.
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
prog: Program name.
|
prog (str): the program name
|
||||||
out: File object to write to.
|
out (file object): the file to write to (default sys.stdout)
|
||||||
aliases: Whether or not to include subparsers for aliases.
|
aliases (bool): whether or not to include subparsers for aliases
|
||||||
"""
|
"""
|
||||||
super().__init__(prog)
|
super(ArgparseWriter, self).__init__(prog)
|
||||||
self.level = 0
|
self.level = 0
|
||||||
self.prog = prog
|
self.prog = prog
|
||||||
self.out = out
|
self.out = sys.stdout if out is None else out
|
||||||
self.aliases = aliases
|
self.aliases = aliases
|
||||||
|
|
||||||
def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
def parse(self, parser, prog):
|
||||||
"""Parse the parser object and return the relavent components.
|
"""Parses the parser object and returns the relavent components.
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
parser: Command parser.
|
parser (argparse.ArgumentParser): the parser
|
||||||
prog: Program name.
|
prog (str): the command name
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Information about the command from the parser.
|
(Command) information about the command from the parser
|
||||||
"""
|
"""
|
||||||
self.parser = parser
|
self.parser = parser
|
||||||
|
|
||||||
@@ -88,7 +80,8 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
|||||||
groups = parser._mutually_exclusive_groups
|
groups = parser._mutually_exclusive_groups
|
||||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||||
|
|
||||||
# Go through actions and split them into optionals, positionals, and subcommands
|
# Go through actions and split them into optionals, positionals,
|
||||||
|
# and subcommands
|
||||||
optionals = []
|
optionals = []
|
||||||
positionals = []
|
positionals = []
|
||||||
subcommands = []
|
subcommands = []
|
||||||
@@ -96,97 +89,74 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
|||||||
if action.option_strings:
|
if action.option_strings:
|
||||||
flags = action.option_strings
|
flags = action.option_strings
|
||||||
dest_flags = fmt._format_action_invocation(action)
|
dest_flags = fmt._format_action_invocation(action)
|
||||||
nargs = action.nargs
|
help = self._expand_help(action) if action.help else ""
|
||||||
help = (
|
help = help.replace("\n", " ")
|
||||||
self._expand_help(action)
|
optionals.append((flags, dest_flags, help))
|
||||||
if action.help and action.help != argparse.SUPPRESS
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
help = help.split("\n")[0]
|
|
||||||
|
|
||||||
if action.choices is not None:
|
|
||||||
dest = [str(choice) for choice in action.choices]
|
|
||||||
else:
|
|
||||||
dest = [action.dest]
|
|
||||||
|
|
||||||
optionals.append((flags, dest, dest_flags, nargs, help))
|
|
||||||
elif isinstance(action, argparse._SubParsersAction):
|
elif isinstance(action, argparse._SubParsersAction):
|
||||||
for subaction in action._choices_actions:
|
for subaction in action._choices_actions:
|
||||||
subparser = action._name_parser_map[subaction.dest]
|
subparser = action._name_parser_map[subaction.dest]
|
||||||
help = (
|
subcommands.append((subparser, subaction.dest))
|
||||||
self._expand_help(subaction)
|
|
||||||
if subaction.help and action.help != argparse.SUPPRESS
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
help = help.split("\n")[0]
|
|
||||||
subcommands.append((subparser, subaction.dest, help))
|
|
||||||
|
|
||||||
# Look for aliases of the form 'name (alias, ...)'
|
# Look for aliases of the form 'name (alias, ...)'
|
||||||
if self.aliases and isinstance(subaction.metavar, str):
|
if self.aliases:
|
||||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||||
if match:
|
if match:
|
||||||
aliases = match.group(2).split(", ")
|
aliases = match.group(2).split(", ")
|
||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
subparser = action._name_parser_map[alias]
|
subparser = action._name_parser_map[alias]
|
||||||
help = (
|
subcommands.append((subparser, alias))
|
||||||
self._expand_help(subaction)
|
|
||||||
if subaction.help and action.help != argparse.SUPPRESS
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
help = help.split("\n")[0]
|
|
||||||
subcommands.append((subparser, alias, help))
|
|
||||||
else:
|
else:
|
||||||
args = fmt._format_action_invocation(action)
|
args = fmt._format_action_invocation(action)
|
||||||
help = (
|
help = self._expand_help(action) if action.help else ""
|
||||||
self._expand_help(action)
|
help = help.replace("\n", " ")
|
||||||
if action.help and action.help != argparse.SUPPRESS
|
positionals.append((args, help))
|
||||||
else ""
|
|
||||||
)
|
|
||||||
help = help.split("\n")[0]
|
|
||||||
positionals.append((args, action.choices, action.nargs, help))
|
|
||||||
|
|
||||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||||
|
|
||||||
@abc.abstractmethod
|
def format(self, cmd):
|
||||||
def format(self, cmd: Command) -> str:
|
"""Returns the string representation of a single node in the
|
||||||
"""Return the string representation of a single node in the parser tree.
|
parser tree.
|
||||||
|
|
||||||
Override this in subclasses to define how each subcommand should be displayed.
|
Override this in subclasses to define how each subcommand
|
||||||
|
should be displayed.
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
cmd: Parsed information about a command or subcommand.
|
(Command): parsed information about a command or subcommand
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
String representation of this subcommand.
|
str: the string representation of this subcommand
|
||||||
"""
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
def _write(self, parser, prog, level=0):
|
||||||
"""Recursively write a parser.
|
"""Recursively writes a parser.
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
parser: Command parser.
|
parser (argparse.ArgumentParser): the parser
|
||||||
prog: Program name.
|
prog (str): the command name
|
||||||
level: Current level.
|
level (int): the current level
|
||||||
"""
|
"""
|
||||||
self.level = level
|
self.level = level
|
||||||
|
|
||||||
cmd = self.parse(parser, prog)
|
cmd = self.parse(parser, prog)
|
||||||
self.out.write(self.format(cmd))
|
self.out.write(self.format(cmd))
|
||||||
|
|
||||||
for subparser, prog, help in cmd.subcommands:
|
for subparser, prog in cmd.subcommands:
|
||||||
self._write(subparser, prog, level=level + 1)
|
self._write(subparser, prog, level=level + 1)
|
||||||
|
|
||||||
def write(self, parser: ArgumentParser) -> None:
|
def write(self, parser):
|
||||||
"""Write out details about an ArgumentParser.
|
"""Write out details about an ArgumentParser.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
parser: Command parser.
|
parser (argparse.ArgumentParser): the parser
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self._write(parser, self.prog)
|
self._write(parser, self.prog)
|
||||||
except BrokenPipeError:
|
except IOError as e:
|
||||||
# Swallow pipe errors
|
# Swallow pipe errors
|
||||||
pass
|
# Raises IOError in Python 2 and BrokenPipeError in Python 3
|
||||||
|
if e.errno != errno.EPIPE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||||
@@ -195,33 +165,21 @@ def write(self, parser: ArgumentParser) -> None:
|
|||||||
class ArgparseRstWriter(ArgparseWriter):
|
class ArgparseRstWriter(ArgparseWriter):
|
||||||
"""Write argparse output as rst sections."""
|
"""Write argparse output as rst sections."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||||
self,
|
"""Create a new ArgparseRstWriter.
|
||||||
prog: str,
|
|
||||||
out: IO = sys.stdout,
|
|
||||||
aliases: bool = False,
|
|
||||||
rst_levels: Sequence[str] = _rst_levels,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize a new ArgparseRstWriter instance.
|
|
||||||
|
|
||||||
Args:
|
Parameters:
|
||||||
prog: Program name.
|
prog (str): program name
|
||||||
out: File object to write to.
|
out (file object): file to write to
|
||||||
aliases: Whether or not to include subparsers for aliases.
|
aliases (bool): whether or not to include subparsers for aliases
|
||||||
rst_levels: List of characters for rst section headings.
|
rst_levels (list of str): list of characters
|
||||||
|
for rst section headings
|
||||||
"""
|
"""
|
||||||
super().__init__(prog, out, aliases)
|
out = sys.stdout if out is None else out
|
||||||
|
super(ArgparseRstWriter, self).__init__(prog, out, aliases)
|
||||||
self.rst_levels = rst_levels
|
self.rst_levels = rst_levels
|
||||||
|
|
||||||
def format(self, cmd: Command) -> str:
|
def format(self, cmd):
|
||||||
"""Return the string representation of a single node in the parser tree.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cmd: Parsed information about a command or subcommand.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
String representation of a node.
|
|
||||||
"""
|
|
||||||
string = io.StringIO()
|
string = io.StringIO()
|
||||||
string.write(self.begin_command(cmd.prog))
|
string.write(self.begin_command(cmd.prog))
|
||||||
|
|
||||||
@@ -232,13 +190,13 @@ def format(self, cmd: Command) -> str:
|
|||||||
|
|
||||||
if cmd.positionals:
|
if cmd.positionals:
|
||||||
string.write(self.begin_positionals())
|
string.write(self.begin_positionals())
|
||||||
for args, choices, nargs, help in cmd.positionals:
|
for args, help in cmd.positionals:
|
||||||
string.write(self.positional(args, help))
|
string.write(self.positional(args, help))
|
||||||
string.write(self.end_positionals())
|
string.write(self.end_positionals())
|
||||||
|
|
||||||
if cmd.optionals:
|
if cmd.optionals:
|
||||||
string.write(self.begin_optionals())
|
string.write(self.begin_optionals())
|
||||||
for flags, dest, dest_flags, nargs, help in cmd.optionals:
|
for flags, dest_flags, help in cmd.optionals:
|
||||||
string.write(self.optional(dest_flags, help))
|
string.write(self.optional(dest_flags, help))
|
||||||
string.write(self.end_optionals())
|
string.write(self.end_optionals())
|
||||||
|
|
||||||
@@ -247,15 +205,7 @@ def format(self, cmd: Command) -> str:
|
|||||||
|
|
||||||
return string.getvalue()
|
return string.getvalue()
|
||||||
|
|
||||||
def begin_command(self, prog: str) -> str:
|
def begin_command(self, prog):
|
||||||
"""Text to print before a command.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
prog: Program name.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Text before a command.
|
|
||||||
"""
|
|
||||||
return """
|
return """
|
||||||
----
|
----
|
||||||
|
|
||||||
@@ -268,26 +218,10 @@ def begin_command(self, prog: str) -> str:
|
|||||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||||
)
|
)
|
||||||
|
|
||||||
def description(self, description: str) -> str:
|
def description(self, description):
|
||||||
"""Description of a command.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
description: Command description.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Description of a command.
|
|
||||||
"""
|
|
||||||
return description + "\n\n"
|
return description + "\n\n"
|
||||||
|
|
||||||
def usage(self, usage: str) -> str:
|
def usage(self, usage):
|
||||||
"""Example usage of a command.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
usage: Command usage.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Usage of a command.
|
|
||||||
"""
|
|
||||||
return """\
|
return """\
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -297,24 +231,10 @@ def usage(self, usage: str) -> str:
|
|||||||
usage
|
usage
|
||||||
)
|
)
|
||||||
|
|
||||||
def begin_positionals(self) -> str:
|
def begin_positionals(self):
|
||||||
"""Text to print before positional arguments.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Positional arguments header.
|
|
||||||
"""
|
|
||||||
return "\n**Positional arguments**\n\n"
|
return "\n**Positional arguments**\n\n"
|
||||||
|
|
||||||
def positional(self, name: str, help: str) -> str:
|
def positional(self, name, help):
|
||||||
"""Description of a positional argument.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: Argument name.
|
|
||||||
help: Help text.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Positional argument description.
|
|
||||||
"""
|
|
||||||
return """\
|
return """\
|
||||||
{0}
|
{0}
|
||||||
{1}
|
{1}
|
||||||
@@ -323,32 +243,13 @@ def positional(self, name: str, help: str) -> str:
|
|||||||
name, help
|
name, help
|
||||||
)
|
)
|
||||||
|
|
||||||
def end_positionals(self) -> str:
|
def end_positionals(self):
|
||||||
"""Text to print after positional arguments.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Positional arguments footer.
|
|
||||||
"""
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def begin_optionals(self) -> str:
|
def begin_optionals(self):
|
||||||
"""Text to print before optional arguments.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Optional arguments header.
|
|
||||||
"""
|
|
||||||
return "\n**Optional arguments**\n\n"
|
return "\n**Optional arguments**\n\n"
|
||||||
|
|
||||||
def optional(self, opts: str, help: str) -> str:
|
def optional(self, opts, help):
|
||||||
"""Description of an optional argument.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
opts: Optional argument.
|
|
||||||
help: Help text.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Optional argument description.
|
|
||||||
"""
|
|
||||||
return """\
|
return """\
|
||||||
``{0}``
|
``{0}``
|
||||||
{1}
|
{1}
|
||||||
@@ -357,23 +258,10 @@ def optional(self, opts: str, help: str) -> str:
|
|||||||
opts, help
|
opts, help
|
||||||
)
|
)
|
||||||
|
|
||||||
def end_optionals(self) -> str:
|
def end_optionals(self):
|
||||||
"""Text to print after optional arguments.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Optional arguments footer.
|
|
||||||
"""
|
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
def begin_subcommands(self, subcommands):
|
||||||
"""Table with links to other subcommands.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
subcommands: List of subcommands.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Subcommand linking text.
|
|
||||||
"""
|
|
||||||
string = """
|
string = """
|
||||||
**Subcommands**
|
**Subcommands**
|
||||||
|
|
||||||
@@ -382,8 +270,116 @@ def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]])
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for cmd, _, _ in subcommands:
|
for cmd, _ in subcommands:
|
||||||
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
||||||
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
||||||
|
|
||||||
return string + "\n"
|
return string + "\n"
|
||||||
|
|
||||||
|
|
||||||
|
class ArgparseCompletionWriter(ArgparseWriter):
|
||||||
|
"""Write argparse output as shell programmable tab completion functions."""
|
||||||
|
|
||||||
|
def format(self, cmd):
|
||||||
|
"""Returns the string representation of a single node in the
|
||||||
|
parser tree.
|
||||||
|
|
||||||
|
Override this in subclasses to define how each subcommand
|
||||||
|
should be displayed.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
(Command): parsed information about a command or subcommand
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the string representation of this subcommand
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert cmd.optionals # we should always at least have -h, --help
|
||||||
|
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||||
|
|
||||||
|
# We only care about the arguments/flags, not the help messages
|
||||||
|
positionals = []
|
||||||
|
if cmd.positionals:
|
||||||
|
positionals, _ = zip(*cmd.positionals)
|
||||||
|
optionals, _, _ = zip(*cmd.optionals)
|
||||||
|
subcommands = []
|
||||||
|
if cmd.subcommands:
|
||||||
|
_, subcommands = zip(*cmd.subcommands)
|
||||||
|
|
||||||
|
# Flatten lists of lists
|
||||||
|
optionals = [x for xx in optionals for x in xx]
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.start_function(cmd.prog)
|
||||||
|
+ self.body(positionals, optionals, subcommands)
|
||||||
|
+ self.end_function(cmd.prog)
|
||||||
|
)
|
||||||
|
|
||||||
|
def start_function(self, prog):
|
||||||
|
"""Returns the syntax needed to begin a function definition.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
prog (str): the command name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the function definition beginning
|
||||||
|
"""
|
||||||
|
name = prog.replace("-", "_").replace(" ", "_")
|
||||||
|
return "\n_{0}() {{".format(name)
|
||||||
|
|
||||||
|
def end_function(self, prog=None):
|
||||||
|
"""Returns the syntax needed to end a function definition.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
prog (str or None): the command name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the function definition ending
|
||||||
|
"""
|
||||||
|
return "}\n"
|
||||||
|
|
||||||
|
def body(self, positionals, optionals, subcommands):
|
||||||
|
"""Returns the body of the function.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
positionals (list): list of positional arguments
|
||||||
|
optionals (list): list of optional arguments
|
||||||
|
subcommands (list): list of subcommand parsers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the function body
|
||||||
|
"""
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def positionals(self, positionals):
|
||||||
|
"""Returns the syntax for reporting positional arguments.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
positionals (list): list of positional arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the syntax for positional arguments
|
||||||
|
"""
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def optionals(self, optionals):
|
||||||
|
"""Returns the syntax for reporting optional flags.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
optionals (list): list of optional arguments
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the syntax for optional flags
|
||||||
|
"""
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def subcommands(self, subcommands):
|
||||||
|
"""Returns the syntax for reporting subcommands.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
subcommands (list): list of subcommand parsers
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: the syntax for subcommand parsers
|
||||||
|
"""
|
||||||
|
return ""
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -19,17 +18,14 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
|
||||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.symlink
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
from llnl.util.symlink import islink, symlink
|
||||||
|
|
||||||
from spack.util.executable import Executable, which
|
from spack.util.executable import Executable, which
|
||||||
|
from spack.util.path import path_to_os_path, system_path_filter
|
||||||
from ..path import path_to_os_path, system_path_filter
|
|
||||||
|
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
import grp
|
import grp
|
||||||
@@ -105,7 +101,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
# follow symlinks (aka don't not follow symlinks)
|
# follow symlinks (aka don't not follow symlinks)
|
||||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||||
if follow:
|
if follow:
|
||||||
# use the real function if it exists
|
# use the real function if it exists
|
||||||
def lookup(name):
|
def lookup(name):
|
||||||
@@ -173,7 +169,7 @@ def rename(src, dst):
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||||
# so check for that case
|
# so check for that case
|
||||||
if os.path.exists(dst) or islink(dst):
|
if os.path.exists(dst) or os.path.islink(dst):
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
os.rename(src, dst)
|
os.rename(src, dst)
|
||||||
|
|
||||||
@@ -337,7 +333,8 @@ def groupid_to_group(x):
|
|||||||
|
|
||||||
if string:
|
if string:
|
||||||
regex = re.escape(regex)
|
regex = re.escape(regex)
|
||||||
for filename in path_to_os_path(*filenames):
|
filenames = path_to_os_path(*filenames)
|
||||||
|
for filename in filenames:
|
||||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||||
tty.debug(msg.format(filename, regex))
|
tty.debug(msg.format(filename, regex))
|
||||||
|
|
||||||
@@ -405,7 +402,7 @@ def groupid_to_group(x):
|
|||||||
os.remove(backup_filename)
|
os.remove(backup_filename)
|
||||||
|
|
||||||
|
|
||||||
class FileFilter:
|
class FileFilter(object):
|
||||||
"""Convenience class for calling ``filter_file`` a lot."""
|
"""Convenience class for calling ``filter_file`` a lot."""
|
||||||
|
|
||||||
def __init__(self, *filenames):
|
def __init__(self, *filenames):
|
||||||
@@ -569,7 +566,7 @@ def set_install_permissions(path):
|
|||||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||||
# this function will be invoked on the target. If the file is outside a
|
# this function will be invoked on the target. If the file is outside a
|
||||||
# Spack-maintained prefix, the permissions should not be modified.
|
# Spack-maintained prefix, the permissions should not be modified.
|
||||||
if islink(path):
|
if os.path.islink(path):
|
||||||
return
|
return
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
os.chmod(path, 0o755)
|
os.chmod(path, 0o755)
|
||||||
@@ -613,8 +610,6 @@ def chgrp(path, group, follow_symlinks=True):
|
|||||||
gid = grp.getgrnam(group).gr_gid
|
gid = grp.getgrnam(group).gr_gid
|
||||||
else:
|
else:
|
||||||
gid = group
|
gid = group
|
||||||
if os.stat(path).st_gid == gid:
|
|
||||||
return
|
|
||||||
if follow_symlinks:
|
if follow_symlinks:
|
||||||
os.chown(path, -1, gid)
|
os.chown(path, -1, gid)
|
||||||
else:
|
else:
|
||||||
@@ -638,7 +633,7 @@ def chmod_x(entry, perms):
|
|||||||
@system_path_filter
|
@system_path_filter
|
||||||
def copy_mode(src, dest):
|
def copy_mode(src, dest):
|
||||||
"""Set the mode of dest to that of src unless it is a link."""
|
"""Set the mode of dest to that of src unless it is a link."""
|
||||||
if islink(dest):
|
if os.path.islink(dest):
|
||||||
return
|
return
|
||||||
src_mode = os.stat(src).st_mode
|
src_mode = os.stat(src).st_mode
|
||||||
dest_mode = os.stat(dest).st_mode
|
dest_mode = os.stat(dest).st_mode
|
||||||
@@ -724,12 +719,26 @@ def install(src, dest):
|
|||||||
copy(src, dest, _permissions=True)
|
copy(src, dest, _permissions=True)
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def resolve_link_target_relative_to_the_link(link):
|
||||||
|
"""
|
||||||
|
os.path.isdir uses os.path.exists, which for links will check
|
||||||
|
the existence of the link target. If the link target is relative to
|
||||||
|
the link, we need to construct a pathname that is valid from
|
||||||
|
our cwd (which may not be the same as the link's directory)
|
||||||
|
"""
|
||||||
|
target = os.readlink(link)
|
||||||
|
if os.path.isabs(target):
|
||||||
|
return target
|
||||||
|
link_dir = os.path.dirname(os.path.abspath(link))
|
||||||
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def copy_tree(
|
def copy_tree(
|
||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -752,8 +761,6 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -761,8 +768,6 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if allow_broken_symlinks and sys.platform == "win32":
|
|
||||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -776,11 +781,6 @@ def copy_tree(
|
|||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise IOError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
|
||||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
|
||||||
# symlinks at the end.
|
|
||||||
links = []
|
|
||||||
|
|
||||||
for src in files:
|
for src in files:
|
||||||
abs_src = os.path.abspath(src)
|
abs_src = os.path.abspath(src)
|
||||||
if not abs_src.endswith(os.path.sep):
|
if not abs_src.endswith(os.path.sep):
|
||||||
@@ -803,7 +803,7 @@ def copy_tree(
|
|||||||
ignore=ignore,
|
ignore=ignore,
|
||||||
follow_nonexisting=True,
|
follow_nonexisting=True,
|
||||||
):
|
):
|
||||||
if islink(s):
|
if os.path.islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = os.readlink(s)
|
||||||
@@ -817,9 +817,7 @@ def escaped_path(path):
|
|||||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||||
target = new_target
|
target = new_target
|
||||||
|
|
||||||
links.append((target, d, s))
|
symlink(target, d)
|
||||||
continue
|
|
||||||
|
|
||||||
elif os.path.isdir(link_target):
|
elif os.path.isdir(link_target):
|
||||||
mkdirp(d)
|
mkdirp(d)
|
||||||
else:
|
else:
|
||||||
@@ -834,17 +832,9 @@ def escaped_path(path):
|
|||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
for target, d, s in links:
|
|
||||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
|
||||||
if _permissions:
|
|
||||||
set_install_permissions(d)
|
|
||||||
copy_mode(s, d)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(
|
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
|
||||||
):
|
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -855,21 +845,12 @@ def install_tree(
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
symlinks=symlinks,
|
|
||||||
allow_broken_symlinks=allow_broken_symlinks,
|
|
||||||
ignore=ignore,
|
|
||||||
_permissions=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1273,12 +1254,7 @@ def traverse_tree(
|
|||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||||
values are 'pre' and 'post'
|
values are 'pre' and 'post'
|
||||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
|
||||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
|
||||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
|
||||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
|
||||||
paths too.
|
|
||||||
follow_nonexisting (bool): Whether to descend into directories in
|
follow_nonexisting (bool): Whether to descend into directories in
|
||||||
``src`` that do not exit in ``dest``. Default is True
|
``src`` that do not exit in ``dest``. Default is True
|
||||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||||
@@ -1305,24 +1281,11 @@ def traverse_tree(
|
|||||||
dest_child = os.path.join(dest_path, f)
|
dest_child = os.path.join(dest_path, f)
|
||||||
rel_child = os.path.join(rel_path, f)
|
rel_child = os.path.join(rel_path, f)
|
||||||
|
|
||||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
|
||||||
# but only do this if the ignore is defined.
|
|
||||||
if ignore is not None:
|
|
||||||
if islink(source_child) and not follow_links:
|
|
||||||
target = readlink(source_child)
|
|
||||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
|
||||||
if any(map(ignore, all_parents)):
|
|
||||||
tty.warn(
|
|
||||||
f"Skipping {source_path} because the source or a part of the source's "
|
|
||||||
f"path is included in the ignores."
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Treat as a directory
|
# Treat as a directory
|
||||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||||
# target is relative to the link, then that may not resolve properly
|
# target is relative to the link, then that may not resolve properly
|
||||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||||
# When follow_nonexisting isn't set, don't descend into dirs
|
# When follow_nonexisting isn't set, don't descend into dirs
|
||||||
# in source that do not exist in dest
|
# in source that do not exist in dest
|
||||||
if follow_nonexisting or os.path.exists(dest_child):
|
if follow_nonexisting or os.path.exists(dest_child):
|
||||||
@@ -1348,11 +1311,7 @@ def traverse_tree(
|
|||||||
|
|
||||||
def lexists_islink_isdir(path):
|
def lexists_islink_isdir(path):
|
||||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
number of stat calls."""
|
||||||
if sys.platform == "win32":
|
|
||||||
if not os.path.lexists(path):
|
|
||||||
return False, False, False
|
|
||||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
|
||||||
# First try to lstat, so we know if it's a link or not.
|
# First try to lstat, so we know if it's a link or not.
|
||||||
try:
|
try:
|
||||||
lst = os.lstat(path)
|
lst = os.lstat(path)
|
||||||
@@ -1377,7 +1336,7 @@ def lexists_islink_isdir(path):
|
|||||||
return True, is_link, is_dir
|
return True, is_link, is_dir
|
||||||
|
|
||||||
|
|
||||||
class BaseDirectoryVisitor:
|
class BaseDirectoryVisitor(object):
|
||||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||||
|
|
||||||
def visit_file(self, root, rel_path, depth):
|
def visit_file(self, root, rel_path, depth):
|
||||||
@@ -1567,7 +1526,7 @@ def remove_if_dead_link(path):
|
|||||||
Parameters:
|
Parameters:
|
||||||
path (str): The potential dead link
|
path (str): The potential dead link
|
||||||
"""
|
"""
|
||||||
if islink(path) and not os.path.exists(path):
|
if os.path.islink(path) and not os.path.exists(path):
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
|
|
||||||
|
|
||||||
@@ -1626,7 +1585,7 @@ def remove_linked_tree(path):
|
|||||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||||
|
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if islink(path):
|
if os.path.islink(path):
|
||||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||||
os.unlink(path)
|
os.unlink(path)
|
||||||
else:
|
else:
|
||||||
@@ -1793,14 +1752,9 @@ def find(root, files, recursive=True):
|
|||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
if recursive:
|
if recursive:
|
||||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
return _find_recursive(root, files)
|
||||||
result = _find_recursive(root, files)
|
|
||||||
else:
|
else:
|
||||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
return _find_non_recursive(root, files)
|
||||||
result = _find_non_recursive(root, files)
|
|
||||||
|
|
||||||
tty.debug(f"Find complete: {root} {str(files)}")
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1936,7 +1890,7 @@ class HeaderList(FileList):
|
|||||||
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
||||||
|
|
||||||
def __init__(self, files):
|
def __init__(self, files):
|
||||||
super().__init__(files)
|
super(HeaderList, self).__init__(files)
|
||||||
|
|
||||||
self._macro_definitions = []
|
self._macro_definitions = []
|
||||||
self._directories = None
|
self._directories = None
|
||||||
@@ -1962,7 +1916,7 @@ def _default_directories(self):
|
|||||||
"""Default computation of directories based on the list of
|
"""Default computation of directories based on the list of
|
||||||
header files.
|
header files.
|
||||||
"""
|
"""
|
||||||
dir_list = super().directories
|
dir_list = super(HeaderList, self).directories
|
||||||
values = []
|
values = []
|
||||||
for d in dir_list:
|
for d in dir_list:
|
||||||
# If the path contains a subdirectory named 'include' then stop
|
# If the path contains a subdirectory named 'include' then stop
|
||||||
@@ -2398,7 +2352,7 @@ def find_all_libraries(root, recursive=False):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WindowsSimulatedRPath:
|
class WindowsSimulatedRPath(object):
|
||||||
"""Class representing Windows filesystem rpath analog
|
"""Class representing Windows filesystem rpath analog
|
||||||
|
|
||||||
One instance of this class is associated with a package (only on Windows)
|
One instance of this class is associated with a package (only on Windows)
|
||||||
@@ -2427,7 +2381,7 @@ def library_dependents(self):
|
|||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2440,9 +2394,9 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
self._additional_library_dependents.add(os.path.dirname)
|
||||||
else:
|
else:
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
self._additional_library_dependents.add(pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
@@ -2455,7 +2409,7 @@ def rpaths(self):
|
|||||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||||
for extra_path in self._addl_rpaths:
|
for extra_path in self._addl_rpaths:
|
||||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||||
return set([pathlib.Path(x) for x in dependent_libs])
|
return set(dependent_libs)
|
||||||
|
|
||||||
def add_rpath(self, *paths):
|
def add_rpath(self, *paths):
|
||||||
"""
|
"""
|
||||||
@@ -2471,7 +2425,7 @@ def add_rpath(self, *paths):
|
|||||||
"""
|
"""
|
||||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||||
|
|
||||||
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
def _link(self, path, dest_dir):
|
||||||
"""Perform link step of simulated rpathing, installing
|
"""Perform link step of simulated rpathing, installing
|
||||||
simlinks of file in path to the dest_dir
|
simlinks of file in path to the dest_dir
|
||||||
location. This method deliberately prevents
|
location. This method deliberately prevents
|
||||||
@@ -2479,35 +2433,27 @@ def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
|||||||
This is because it is both meaningless from an rpath
|
This is because it is both meaningless from an rpath
|
||||||
perspective, and will cause an error when Developer
|
perspective, and will cause an error when Developer
|
||||||
mode is not enabled"""
|
mode is not enabled"""
|
||||||
|
file_name = os.path.basename(path)
|
||||||
def report_already_linked():
|
dest_file = os.path.join(dest_dir, file_name)
|
||||||
# We have either already symlinked or we are encoutering a naming clash
|
if os.path.exists(dest_dir) and not dest_file == path:
|
||||||
# either way, we don't want to overwrite existing libraries
|
|
||||||
already_linked = islink(str(dest_file))
|
|
||||||
tty.debug(
|
|
||||||
"Linking library %s to %s failed, " % (str(path), str(dest_file))
|
|
||||||
+ "already linked."
|
|
||||||
if already_linked
|
|
||||||
else "library with name %s already exists at location %s."
|
|
||||||
% (str(file_name), str(dest_dir))
|
|
||||||
)
|
|
||||||
|
|
||||||
file_name = path.name
|
|
||||||
dest_file = dest_dir / file_name
|
|
||||||
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
|
|
||||||
try:
|
try:
|
||||||
symlink(str(path), str(dest_file))
|
symlink(path, dest_file)
|
||||||
# For py2 compatibility, we have to catch the specific Windows error code
|
# For py2 compatibility, we have to catch the specific Windows error code
|
||||||
# associate with trying to create a file that already exists (winerror 183)
|
# associate with trying to create a file that already exists (winerror 183)
|
||||||
# Catch OSErrors missed by the SymlinkError checks
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||||
report_already_linked()
|
# We have either already symlinked or we are encoutering a naming clash
|
||||||
|
# either way, we don't want to overwrite existing libraries
|
||||||
|
already_linked = islink(dest_file)
|
||||||
|
tty.debug(
|
||||||
|
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||||
|
if already_linked
|
||||||
|
else "library with name %s already exists at location %s."
|
||||||
|
% (file_name, dest_dir)
|
||||||
|
)
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
# catch errors we raise ourselves from Spack
|
|
||||||
except llnl.util.symlink.AlreadyExistsError:
|
|
||||||
report_already_linked()
|
|
||||||
|
|
||||||
def establish_link(self):
|
def establish_link(self):
|
||||||
"""
|
"""
|
||||||
@@ -2740,7 +2686,7 @@ def remove_directory_contents(dir):
|
|||||||
"""Remove all contents of a directory."""
|
"""Remove all contents of a directory."""
|
||||||
if os.path.exists(dir):
|
if os.path.exists(dir):
|
||||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||||
if os.path.isfile(entry) or islink(entry):
|
if os.path.isfile(entry) or os.path.islink(entry):
|
||||||
os.unlink(entry)
|
os.unlink(entry)
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(entry)
|
shutil.rmtree(entry)
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
@@ -766,10 +768,10 @@ def pretty_seconds(seconds):
|
|||||||
|
|
||||||
class RequiredAttributeError(ValueError):
|
class RequiredAttributeError(ValueError):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super(RequiredAttributeError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ObjectWrapper:
|
class ObjectWrapper(object):
|
||||||
"""Base class that wraps an object. Derived classes can add new behavior
|
"""Base class that wraps an object. Derived classes can add new behavior
|
||||||
while staying undercover.
|
while staying undercover.
|
||||||
|
|
||||||
@@ -796,7 +798,7 @@ def __init__(self, wrapped_object):
|
|||||||
self.__dict__ = wrapped_object.__dict__
|
self.__dict__ = wrapped_object.__dict__
|
||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton(object):
|
||||||
"""Simple wrapper for lazily initialized singleton objects."""
|
"""Simple wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory):
|
def __init__(self, factory):
|
||||||
@@ -821,7 +823,7 @@ def __getattr__(self, name):
|
|||||||
# 'instance'/'_instance' to be defined or it will enter an infinite
|
# 'instance'/'_instance' to be defined or it will enter an infinite
|
||||||
# loop, so protect against that here.
|
# loop, so protect against that here.
|
||||||
if name in ["_instance", "instance"]:
|
if name in ["_instance", "instance"]:
|
||||||
raise AttributeError(f"cannot create {name}")
|
raise AttributeError()
|
||||||
return getattr(self.instance, name)
|
return getattr(self.instance, name)
|
||||||
|
|
||||||
def __getitem__(self, name):
|
def __getitem__(self, name):
|
||||||
@@ -843,6 +845,27 @@ def __repr__(self):
|
|||||||
return repr(self.instance)
|
return repr(self.instance)
|
||||||
|
|
||||||
|
|
||||||
|
class LazyReference(object):
|
||||||
|
"""Lazily evaluated reference to part of a singleton."""
|
||||||
|
|
||||||
|
def __init__(self, ref_function):
|
||||||
|
self.ref_function = ref_function
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
if name == "ref_function":
|
||||||
|
raise AttributeError()
|
||||||
|
return getattr(self.ref_function(), name)
|
||||||
|
|
||||||
|
def __getitem__(self, name):
|
||||||
|
return self.ref_function()[name]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return str(self.ref_function())
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.ref_function())
|
||||||
|
|
||||||
|
|
||||||
def load_module_from_file(module_name, module_path):
|
def load_module_from_file(module_name, module_path):
|
||||||
"""Loads a python module from the path of the corresponding file.
|
"""Loads a python module from the path of the corresponding file.
|
||||||
|
|
||||||
@@ -920,7 +943,7 @@ def _wrapper(args):
|
|||||||
return _wrapper
|
return _wrapper
|
||||||
|
|
||||||
|
|
||||||
class Devnull:
|
class Devnull(object):
|
||||||
"""Null stream with less overhead than ``os.devnull``.
|
"""Null stream with less overhead than ``os.devnull``.
|
||||||
|
|
||||||
See https://stackoverflow.com/a/2929954.
|
See https://stackoverflow.com/a/2929954.
|
||||||
@@ -1037,7 +1060,7 @@ def __str__(self):
|
|||||||
return str(self.data)
|
return str(self.data)
|
||||||
|
|
||||||
|
|
||||||
class GroupedExceptionHandler:
|
class GroupedExceptionHandler(object):
|
||||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -1068,7 +1091,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
|
|||||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||||
|
|
||||||
|
|
||||||
class GroupedExceptionForwarder:
|
class GroupedExceptionForwarder(object):
|
||||||
"""A contextmanager to capture exceptions and forward them to a
|
"""A contextmanager to capture exceptions and forward them to a
|
||||||
GroupedExceptionHandler."""
|
GroupedExceptionHandler."""
|
||||||
|
|
||||||
@@ -1088,7 +1111,7 @@ def __exit__(self, exc_type, exc_value, tb):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
class classproperty:
|
class classproperty(object):
|
||||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||||
the evaluation is injected at creation time and take an instance (could be None) and
|
the evaluation is injected at creation time and take an instance (could be None) and
|
||||||
an owner (i.e. the class that originated the instance)
|
an owner (i.e. the class that originated the instance)
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
"""LinkTree class for setting up trees of symbolic links."""
|
"""LinkTree class for setting up trees of symbolic links."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import filecmp
|
import filecmp
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@@ -285,7 +287,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
|
|||||||
self.visit_file(root, rel_path, depth)
|
self.visit_file(root, rel_path, depth)
|
||||||
|
|
||||||
|
|
||||||
class LinkTree:
|
class LinkTree(object):
|
||||||
"""Class to create trees of symbolic links from a source directory.
|
"""Class to create trees of symbolic links from a source directory.
|
||||||
|
|
||||||
LinkTree objects are constructed with a source root. Their
|
LinkTree objects are constructed with a source root. Their
|
||||||
@@ -430,12 +432,12 @@ class MergeConflictError(Exception):
|
|||||||
|
|
||||||
class ConflictingSpecsError(MergeConflictError):
|
class ConflictingSpecsError(MergeConflictError):
|
||||||
def __init__(self, spec_1, spec_2):
|
def __init__(self, spec_1, spec_2):
|
||||||
super().__init__(spec_1, spec_2)
|
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||||
|
|
||||||
|
|
||||||
class SingleMergeConflictError(MergeConflictError):
|
class SingleMergeConflictError(MergeConflictError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
super().__init__("Package merge blocked by file: %s" % path)
|
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||||
|
|
||||||
|
|
||||||
class MergeConflictSummary(MergeConflictError):
|
class MergeConflictSummary(MergeConflictError):
|
||||||
@@ -450,4 +452,4 @@ def __init__(self, conflicts):
|
|||||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||||
conflict.src_a, conflict.src_b, conflict.dst
|
conflict.src_a, conflict.src_b, conflict.dst
|
||||||
)
|
)
|
||||||
super().__init__(msg)
|
super(MergeConflictSummary, self).__init__(msg)
|
||||||
|
|||||||
@@ -9,12 +9,11 @@
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from types import TracebackType
|
|
||||||
from typing import IO, Any, Callable, ContextManager, Dict, Generator, Optional, Tuple, Type, Union
|
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import pretty_seconds
|
||||||
|
|
||||||
from ..string import plural
|
import spack.util.string
|
||||||
|
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
import fcntl
|
import fcntl
|
||||||
@@ -35,15 +34,12 @@
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
ReleaseFnType = Optional[Callable[[], bool]]
|
#: A useful replacement for functions that should return True when not provided
|
||||||
|
#: for example.
|
||||||
|
true_fn = lambda: True
|
||||||
|
|
||||||
|
|
||||||
def true_fn() -> bool:
|
class OpenFile(object):
|
||||||
"""A function that always returns True."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class OpenFile:
|
|
||||||
"""Record for keeping track of open lockfiles (with reference counting).
|
"""Record for keeping track of open lockfiles (with reference counting).
|
||||||
|
|
||||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||||
@@ -52,12 +48,12 @@ class OpenFile:
|
|||||||
file descriptors as well in the future.
|
file descriptors as well in the future.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, fh: IO) -> None:
|
def __init__(self, fh):
|
||||||
self.fh = fh
|
self.fh = fh
|
||||||
self.refs = 0
|
self.refs = 0
|
||||||
|
|
||||||
|
|
||||||
class OpenFileTracker:
|
class OpenFileTracker(object):
|
||||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||||
|
|
||||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||||
@@ -82,11 +78,11 @@ class OpenFileTracker:
|
|||||||
work in Python and assume the GIL.
|
work in Python and assume the GIL.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self):
|
||||||
"""Create a new ``OpenFileTracker``."""
|
"""Create a new ``OpenFileTracker``."""
|
||||||
self._descriptors: Dict[Any, OpenFile] = {}
|
self._descriptors = {}
|
||||||
|
|
||||||
def get_fh(self, path: str) -> IO:
|
def get_fh(self, path):
|
||||||
"""Get a filehandle for a lockfile.
|
"""Get a filehandle for a lockfile.
|
||||||
|
|
||||||
This routine will open writable files for read/write even if you're asking
|
This routine will open writable files for read/write even if you're asking
|
||||||
@@ -94,7 +90,7 @@ def get_fh(self, path: str) -> IO:
|
|||||||
(write) lock later if requested.
|
(write) lock later if requested.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
path: path to lock file we want a filehandle for
|
path (str): path to lock file we want a filehandle for
|
||||||
"""
|
"""
|
||||||
# Open writable files as 'r+' so we can upgrade to write later
|
# Open writable files as 'r+' so we can upgrade to write later
|
||||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||||
@@ -143,7 +139,7 @@ def get_fh(self, path: str) -> IO:
|
|||||||
def release_by_stat(self, stat):
|
def release_by_stat(self, stat):
|
||||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||||
open_file = self._descriptors.get(key)
|
open_file = self._descriptors.get(key)
|
||||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||||
|
|
||||||
open_file.refs -= 1
|
open_file.refs -= 1
|
||||||
if not open_file.refs:
|
if not open_file.refs:
|
||||||
@@ -161,7 +157,7 @@ def purge(self):
|
|||||||
|
|
||||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||||
#: from opening the sam file many times for different byte range locks
|
#: from opening the sam file many times for different byte range locks
|
||||||
FILE_TRACKER = OpenFileTracker()
|
file_tracker = OpenFileTracker()
|
||||||
|
|
||||||
|
|
||||||
def _attempts_str(wait_time, nattempts):
|
def _attempts_str(wait_time, nattempts):
|
||||||
@@ -169,11 +165,11 @@ def _attempts_str(wait_time, nattempts):
|
|||||||
if nattempts <= 1:
|
if nattempts <= 1:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
attempts = plural(nattempts, "attempt")
|
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||||
|
|
||||||
|
|
||||||
class LockType:
|
class LockType(object):
|
||||||
READ = 0
|
READ = 0
|
||||||
WRITE = 1
|
WRITE = 1
|
||||||
|
|
||||||
@@ -192,11 +188,11 @@ def to_module(tid):
|
|||||||
return lock
|
return lock
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid(op: int) -> bool:
|
def is_valid(op):
|
||||||
return op == LockType.READ or op == LockType.WRITE
|
return op == LockType.READ or op == LockType.WRITE
|
||||||
|
|
||||||
|
|
||||||
class Lock:
|
class Lock(object):
|
||||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||||
|
|
||||||
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
||||||
@@ -211,16 +207,7 @@ class Lock:
|
|||||||
overlapping byte ranges in the same file).
|
overlapping byte ranges in the same file).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
|
||||||
self,
|
|
||||||
path: str,
|
|
||||||
*,
|
|
||||||
start: int = 0,
|
|
||||||
length: int = 0,
|
|
||||||
default_timeout: Optional[float] = None,
|
|
||||||
debug: bool = False,
|
|
||||||
desc: str = "",
|
|
||||||
) -> None:
|
|
||||||
"""Construct a new lock on the file at ``path``.
|
"""Construct a new lock on the file at ``path``.
|
||||||
|
|
||||||
By default, the lock applies to the whole file. Optionally,
|
By default, the lock applies to the whole file. Optionally,
|
||||||
@@ -233,17 +220,17 @@ def __init__(
|
|||||||
beginning of the file.
|
beginning of the file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
path: path to the lock
|
path (str): path to the lock
|
||||||
start: optional byte offset at which the lock starts
|
start (int): optional byte offset at which the lock starts
|
||||||
length: optional number of bytes to lock
|
length (int): optional number of bytes to lock
|
||||||
default_timeout: seconds to wait for lock attempts,
|
default_timeout (int): number of seconds to wait for lock attempts,
|
||||||
where None means to wait indefinitely
|
where None means to wait indefinitely
|
||||||
debug: debug mode specific to locking
|
debug (bool): debug mode specific to locking
|
||||||
desc: optional debug message lock description, which is
|
desc (str): optional debug message lock description, which is
|
||||||
helpful for distinguishing between different Spack locks.
|
helpful for distinguishing between different Spack locks.
|
||||||
"""
|
"""
|
||||||
self.path = path
|
self.path = path
|
||||||
self._file: Optional[IO] = None
|
self._file = None
|
||||||
self._reads = 0
|
self._reads = 0
|
||||||
self._writes = 0
|
self._writes = 0
|
||||||
|
|
||||||
@@ -255,7 +242,7 @@ def __init__(
|
|||||||
self.debug = debug
|
self.debug = debug
|
||||||
|
|
||||||
# optional debug description
|
# optional debug description
|
||||||
self.desc = f" ({desc})" if desc else ""
|
self.desc = " ({0})".format(desc) if desc else ""
|
||||||
|
|
||||||
# If the user doesn't set a default timeout, or if they choose
|
# If the user doesn't set a default timeout, or if they choose
|
||||||
# None, 0, etc. then lock attempts will not time out (unless the
|
# None, 0, etc. then lock attempts will not time out (unless the
|
||||||
@@ -263,15 +250,11 @@ def __init__(
|
|||||||
self.default_timeout = default_timeout or None
|
self.default_timeout = default_timeout or None
|
||||||
|
|
||||||
# PID and host of lock holder (only used in debug mode)
|
# PID and host of lock holder (only used in debug mode)
|
||||||
self.pid: Optional[int] = None
|
self.pid = self.old_pid = None
|
||||||
self.old_pid: Optional[int] = None
|
self.host = self.old_host = None
|
||||||
self.host: Optional[str] = None
|
|
||||||
self.old_host: Optional[str] = None
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _poll_interval_generator(
|
def _poll_interval_generator(_wait_times=None):
|
||||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
|
||||||
) -> Generator[float, None, None]:
|
|
||||||
"""This implements a backoff scheme for polling a contended resource
|
"""This implements a backoff scheme for polling a contended resource
|
||||||
by suggesting a succession of wait times between polls.
|
by suggesting a succession of wait times between polls.
|
||||||
|
|
||||||
@@ -294,21 +277,21 @@ def _poll_interval_generator(
|
|||||||
num_requests += 1
|
num_requests += 1
|
||||||
yield wait_time
|
yield wait_time
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self):
|
||||||
"""Formal representation of the lock."""
|
"""Formal representation of the lock."""
|
||||||
rep = "{0}(".format(self.__class__.__name__)
|
rep = "{0}(".format(self.__class__.__name__)
|
||||||
for attr, value in self.__dict__.items():
|
for attr, value in self.__dict__.items():
|
||||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||||
return "{0})".format(rep.strip(", "))
|
return "{0})".format(rep.strip(", "))
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self):
|
||||||
"""Readable string (with key fields) of the lock."""
|
"""Readable string (with key fields) of the lock."""
|
||||||
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
||||||
timeout = "timeout={0}".format(self.default_timeout)
|
timeout = "timeout={0}".format(self.default_timeout)
|
||||||
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
||||||
return "({0}, {1}, {2})".format(location, timeout, activity)
|
return "({0}, {1}, {2})".format(location, timeout, activity)
|
||||||
|
|
||||||
def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
def _lock(self, op, timeout=None):
|
||||||
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
||||||
|
|
||||||
The lock is implemented as a spin lock using a nonblocking call
|
The lock is implemented as a spin lock using a nonblocking call
|
||||||
@@ -327,7 +310,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
|||||||
# Create file and parent directories if they don't exist.
|
# Create file and parent directories if they don't exist.
|
||||||
if self._file is None:
|
if self._file is None:
|
||||||
self._ensure_parent_directory()
|
self._ensure_parent_directory()
|
||||||
self._file = FILE_TRACKER.get_fh(self.path)
|
self._file = file_tracker.get_fh(self.path)
|
||||||
|
|
||||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||||
# Attempt to upgrade to write lock w/a read-only file.
|
# Attempt to upgrade to write lock w/a read-only file.
|
||||||
@@ -336,7 +319,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
|||||||
|
|
||||||
self._log_debug(
|
self._log_debug(
|
||||||
"{} locking [{}:{}]: timeout {}".format(
|
"{} locking [{}:{}]: timeout {}".format(
|
||||||
op_str.lower(), self._start, self._length, lang.pretty_seconds(timeout or 0)
|
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -360,20 +343,15 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
|||||||
total_wait_time = time.time() - start_time
|
total_wait_time = time.time() - start_time
|
||||||
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||||
|
|
||||||
def _poll_lock(self, op: int) -> bool:
|
def _poll_lock(self, op):
|
||||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||||
the locking attempt succeeds
|
the locking attempt succeeds
|
||||||
"""
|
"""
|
||||||
assert self._file is not None, "cannot poll a lock without the file being set"
|
|
||||||
module_op = LockType.to_module(op)
|
module_op = LockType.to_module(op)
|
||||||
try:
|
try:
|
||||||
# Try to get the lock (will raise if not available.)
|
# Try to get the lock (will raise if not available.)
|
||||||
fcntl.lockf(
|
fcntl.lockf(
|
||||||
self._file.fileno(),
|
self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
|
||||||
module_op | fcntl.LOCK_NB,
|
|
||||||
self._length,
|
|
||||||
self._start,
|
|
||||||
os.SEEK_SET,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# help for debugging distributed locking
|
# help for debugging distributed locking
|
||||||
@@ -399,7 +377,7 @@ def _poll_lock(self, op: int) -> bool:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _ensure_parent_directory(self) -> str:
|
def _ensure_parent_directory(self):
|
||||||
parent = os.path.dirname(self.path)
|
parent = os.path.dirname(self.path)
|
||||||
|
|
||||||
# relative paths to lockfiles in the current directory have no parent
|
# relative paths to lockfiles in the current directory have no parent
|
||||||
@@ -418,22 +396,20 @@ def _ensure_parent_directory(self) -> str:
|
|||||||
raise
|
raise
|
||||||
return parent
|
return parent
|
||||||
|
|
||||||
def _read_log_debug_data(self) -> None:
|
def _read_log_debug_data(self):
|
||||||
"""Read PID and host data out of the file if it is there."""
|
"""Read PID and host data out of the file if it is there."""
|
||||||
assert self._file is not None, "cannot read debug log without the file being set"
|
|
||||||
self.old_pid = self.pid
|
self.old_pid = self.pid
|
||||||
self.old_host = self.host
|
self.old_host = self.host
|
||||||
|
|
||||||
line = self._file.read()
|
line = self._file.read()
|
||||||
if line:
|
if line:
|
||||||
pid, host = line.strip().split(",")
|
pid, host = line.strip().split(",")
|
||||||
_, _, pid = pid.rpartition("=")
|
_, _, self.pid = pid.rpartition("=")
|
||||||
_, _, self.host = host.rpartition("=")
|
_, _, self.host = host.rpartition("=")
|
||||||
self.pid = int(pid)
|
self.pid = int(self.pid)
|
||||||
|
|
||||||
def _write_log_debug_data(self) -> None:
|
def _write_log_debug_data(self):
|
||||||
"""Write PID and host data to the file, recording old values."""
|
"""Write PID and host data to the file, recording old values."""
|
||||||
assert self._file is not None, "cannot write debug log without the file being set"
|
|
||||||
self.old_pid = self.pid
|
self.old_pid = self.pid
|
||||||
self.old_host = self.host
|
self.old_host = self.host
|
||||||
|
|
||||||
@@ -447,21 +423,20 @@ def _write_log_debug_data(self) -> None:
|
|||||||
self._file.flush()
|
self._file.flush()
|
||||||
os.fsync(self._file.fileno())
|
os.fsync(self._file.fileno())
|
||||||
|
|
||||||
def _unlock(self) -> None:
|
def _unlock(self):
|
||||||
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
||||||
|
|
||||||
Releases the lock regardless of mode. Note that read locks may
|
Releases the lock regardless of mode. Note that read locks may
|
||||||
be masquerading as write locks, but this removes either.
|
be masquerading as write locks, but this removes either.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
assert self._file is not None, "cannot unlock without the file being set"
|
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||||
fcntl.lockf(self._file.fileno(), fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
file_tracker.release_by_fh(self._file)
|
||||||
FILE_TRACKER.release_by_fh(self._file)
|
|
||||||
self._file = None
|
self._file = None
|
||||||
self._reads = 0
|
self._reads = 0
|
||||||
self._writes = 0
|
self._writes = 0
|
||||||
|
|
||||||
def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
def acquire_read(self, timeout=None):
|
||||||
"""Acquires a recursive, shared lock for reading.
|
"""Acquires a recursive, shared lock for reading.
|
||||||
|
|
||||||
Read and write locks can be acquired and released in arbitrary
|
Read and write locks can be acquired and released in arbitrary
|
||||||
@@ -486,7 +461,7 @@ def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
|||||||
self._reads += 1
|
self._reads += 1
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
def acquire_write(self, timeout=None):
|
||||||
"""Acquires a recursive, exclusive lock for writing.
|
"""Acquires a recursive, exclusive lock for writing.
|
||||||
|
|
||||||
Read and write locks can be acquired and released in arbitrary
|
Read and write locks can be acquired and released in arbitrary
|
||||||
@@ -516,7 +491,7 @@ def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
|||||||
self._writes += 1
|
self._writes += 1
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_write_locked(self) -> bool:
|
def is_write_locked(self):
|
||||||
"""Check if the file is write locked
|
"""Check if the file is write locked
|
||||||
|
|
||||||
Return:
|
Return:
|
||||||
@@ -533,7 +508,7 @@ def is_write_locked(self) -> bool:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
def downgrade_write_to_read(self, timeout=None):
|
||||||
"""
|
"""
|
||||||
Downgrade from an exclusive write lock to a shared read.
|
Downgrade from an exclusive write lock to a shared read.
|
||||||
|
|
||||||
@@ -552,7 +527,7 @@ def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
|||||||
else:
|
else:
|
||||||
raise LockDowngradeError(self.path)
|
raise LockDowngradeError(self.path)
|
||||||
|
|
||||||
def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
def upgrade_read_to_write(self, timeout=None):
|
||||||
"""
|
"""
|
||||||
Attempts to upgrade from a shared read lock to an exclusive write.
|
Attempts to upgrade from a shared read lock to an exclusive write.
|
||||||
|
|
||||||
@@ -571,7 +546,7 @@ def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
|||||||
else:
|
else:
|
||||||
raise LockUpgradeError(self.path)
|
raise LockUpgradeError(self.path)
|
||||||
|
|
||||||
def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
def release_read(self, release_fn=None):
|
||||||
"""Releases a read lock.
|
"""Releases a read lock.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -607,7 +582,7 @@ def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
|||||||
self._reads -= 1
|
self._reads -= 1
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
def release_write(self, release_fn=None):
|
||||||
"""Releases a write lock.
|
"""Releases a write lock.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -648,65 +623,65 @@ def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
def cleanup(self):
|
||||||
if self._reads == 0 and self._writes == 0:
|
if self._reads == 0 and self._writes == 0:
|
||||||
os.unlink(self.path)
|
os.unlink(self.path)
|
||||||
else:
|
else:
|
||||||
raise LockError("Attempting to cleanup active lock.")
|
raise LockError("Attempting to cleanup active lock.")
|
||||||
|
|
||||||
def _get_counts_desc(self) -> str:
|
def _get_counts_desc(self):
|
||||||
return (
|
return (
|
||||||
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
||||||
)
|
)
|
||||||
|
|
||||||
def _log_acquired(self, locktype, wait_time, nattempts) -> None:
|
def _log_acquired(self, locktype, wait_time, nattempts):
|
||||||
attempts_part = _attempts_str(wait_time, nattempts)
|
attempts_part = _attempts_str(wait_time, nattempts)
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
||||||
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
||||||
|
|
||||||
def _log_acquiring(self, locktype) -> None:
|
def _log_acquiring(self, locktype):
|
||||||
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
||||||
|
|
||||||
def _log_debug(self, *args, **kwargs) -> None:
|
def _log_debug(self, *args, **kwargs):
|
||||||
"""Output lock debug messages."""
|
"""Output lock debug messages."""
|
||||||
kwargs["level"] = kwargs.get("level", 2)
|
kwargs["level"] = kwargs.get("level", 2)
|
||||||
tty.debug(*args, **kwargs)
|
tty.debug(*args, **kwargs)
|
||||||
|
|
||||||
def _log_downgraded(self, wait_time, nattempts) -> None:
|
def _log_downgraded(self, wait_time, nattempts):
|
||||||
attempts_part = _attempts_str(wait_time, nattempts)
|
attempts_part = _attempts_str(wait_time, nattempts)
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||||
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||||
|
|
||||||
def _log_downgrading(self) -> None:
|
def _log_downgrading(self):
|
||||||
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
||||||
|
|
||||||
def _log_released(self, locktype) -> None:
|
def _log_released(self, locktype):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
||||||
self._log_debug(self._status_msg(locktype, desc))
|
self._log_debug(self._status_msg(locktype, desc))
|
||||||
|
|
||||||
def _log_releasing(self, locktype) -> None:
|
def _log_releasing(self, locktype):
|
||||||
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
||||||
|
|
||||||
def _log_upgraded(self, wait_time, nattempts) -> None:
|
def _log_upgraded(self, wait_time, nattempts):
|
||||||
attempts_part = _attempts_str(wait_time, nattempts)
|
attempts_part = _attempts_str(wait_time, nattempts)
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||||
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||||
|
|
||||||
def _log_upgrading(self) -> None:
|
def _log_upgrading(self):
|
||||||
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
||||||
|
|
||||||
def _status_msg(self, locktype: str, status: str) -> str:
|
def _status_msg(self, locktype, status):
|
||||||
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
||||||
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
||||||
locktype, self, status_desc
|
locktype, self, status_desc
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class LockTransaction:
|
class LockTransaction(object):
|
||||||
"""Simple nested transaction context manager that uses a file lock.
|
"""Simple nested transaction context manager that uses a file lock.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -734,13 +709,7 @@ class LockTransaction:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, lock, acquire=None, release=None, timeout=None):
|
||||||
self,
|
|
||||||
lock: Lock,
|
|
||||||
acquire: Union[ReleaseFnType, ContextManager] = None,
|
|
||||||
release: Union[ReleaseFnType, ContextManager] = None,
|
|
||||||
timeout: Optional[float] = None,
|
|
||||||
) -> None:
|
|
||||||
self._lock = lock
|
self._lock = lock
|
||||||
self._timeout = timeout
|
self._timeout = timeout
|
||||||
self._acquire_fn = acquire
|
self._acquire_fn = acquire
|
||||||
@@ -755,20 +724,15 @@ def __enter__(self):
|
|||||||
else:
|
else:
|
||||||
return self._as
|
return self._as
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(self, type, value, traceback):
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_value: Optional[BaseException],
|
|
||||||
traceback: Optional[TracebackType],
|
|
||||||
) -> bool:
|
|
||||||
suppress = False
|
suppress = False
|
||||||
|
|
||||||
def release_fn():
|
def release_fn():
|
||||||
if self._release_fn is not None:
|
if self._release_fn is not None:
|
||||||
return self._release_fn(exc_type, exc_value, traceback)
|
return self._release_fn(type, value, traceback)
|
||||||
|
|
||||||
if self._as and hasattr(self._as, "__exit__"):
|
if self._as and hasattr(self._as, "__exit__"):
|
||||||
if self._as.__exit__(exc_type, exc_value, traceback):
|
if self._as.__exit__(type, value, traceback):
|
||||||
suppress = True
|
suppress = True
|
||||||
|
|
||||||
if self._exit(release_fn):
|
if self._exit(release_fn):
|
||||||
@@ -776,12 +740,6 @@ def release_fn():
|
|||||||
|
|
||||||
return suppress
|
return suppress
|
||||||
|
|
||||||
def _enter(self) -> bool:
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
def _exit(self, release_fn: ReleaseFnType) -> bool:
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
|
|
||||||
class ReadTransaction(LockTransaction):
|
class ReadTransaction(LockTransaction):
|
||||||
"""LockTransaction context manager that does a read and releases it."""
|
"""LockTransaction context manager that does a read and releases it."""
|
||||||
@@ -812,7 +770,7 @@ class LockDowngradeError(LockError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||||
super().__init__(msg)
|
super(LockDowngradeError, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class LockLimitError(LockError):
|
class LockLimitError(LockError):
|
||||||
@@ -824,10 +782,10 @@ class LockTimeoutError(LockError):
|
|||||||
|
|
||||||
def __init__(self, lock_type, path, time, attempts):
|
def __init__(self, lock_type, path, time, attempts):
|
||||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||||
super().__init__(
|
super(LockTimeoutError, self).__init__(
|
||||||
fmt.format(
|
fmt.format(
|
||||||
lock_type,
|
lock_type,
|
||||||
lang.pretty_seconds(time),
|
pretty_seconds(time),
|
||||||
attempts,
|
attempts,
|
||||||
"attempt" if attempts == 1 else "attempts",
|
"attempt" if attempts == 1 else "attempts",
|
||||||
path,
|
path,
|
||||||
@@ -840,7 +798,7 @@ class LockUpgradeError(LockError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||||
super().__init__(msg)
|
super(LockUpgradeError, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class LockPermissionError(LockError):
|
class LockPermissionError(LockError):
|
||||||
@@ -852,7 +810,7 @@ class LockROFileError(LockPermissionError):
|
|||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "Can't take write lock on read-only file: %s" % path
|
msg = "Can't take write lock on read-only file: %s" % path
|
||||||
super().__init__(msg)
|
super(LockROFileError, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class CantCreateLockError(LockPermissionError):
|
class CantCreateLockError(LockPermissionError):
|
||||||
@@ -861,4 +819,4 @@ class CantCreateLockError(LockPermissionError):
|
|||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
msg = "cannot create lock '%s': " % path
|
msg = "cannot create lock '%s': " % path
|
||||||
msg += "file does not exist and location is not writable"
|
msg += "file does not exist and location is not writable"
|
||||||
super().__init__(msg)
|
super(LockError, self).__init__(msg)
|
||||||
|
|||||||
@@ -2,189 +2,77 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import errno
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from os.path import exists, join
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang
|
||||||
|
|
||||||
from ..path import system_path_filter
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
def symlink(real_path, link_path):
|
||||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
|
||||||
"""
|
"""
|
||||||
Create a link.
|
Create a symbolic link.
|
||||||
|
|
||||||
On non-Windows and Windows with System Administrator
|
On Windows, use junctions if os.symlink fails.
|
||||||
privleges this will be a normal symbolic link via
|
|
||||||
os.symlink.
|
|
||||||
|
|
||||||
On Windows without privledges the link will be a
|
|
||||||
junction for a directory and a hardlink for a file.
|
|
||||||
On Windows the various link types are:
|
|
||||||
|
|
||||||
Symbolic Link: A link to a file or directory on the
|
|
||||||
same or different volume (drive letter) or even to
|
|
||||||
a remote file or directory (using UNC in its path).
|
|
||||||
Need System Administrator privileges to make these.
|
|
||||||
|
|
||||||
Hard Link: A link to a file on the same volume (drive
|
|
||||||
letter) only. Every file (file's data) has at least 1
|
|
||||||
hard link (file's name). But when this method creates
|
|
||||||
a new hard link there will be 2. Deleting all hard
|
|
||||||
links effectively deletes the file. Don't need System
|
|
||||||
Administrator privileges.
|
|
||||||
|
|
||||||
Junction: A link to a directory on the same or different
|
|
||||||
volume (drive letter) but not to a remote directory. Don't
|
|
||||||
need System Administrator privileges.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
source_path (str): The real file or directory that the link points to.
|
|
||||||
Must be absolute OR relative to the link.
|
|
||||||
link_path (str): The path where the link will exist.
|
|
||||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
|
||||||
doesn't exist. This will still raise an exception on Windows.
|
|
||||||
"""
|
"""
|
||||||
source_path = os.path.normpath(source_path)
|
if sys.platform != "win32":
|
||||||
win_source_path = source_path
|
os.symlink(real_path, link_path)
|
||||||
link_path = os.path.normpath(link_path)
|
elif _win32_can_symlink():
|
||||||
|
# Windows requires target_is_directory=True when the target is a dir.
|
||||||
# Never allow broken links on Windows.
|
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||||
if sys.platform == "win32" and allow_broken_symlinks:
|
|
||||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
|
||||||
|
|
||||||
if not allow_broken_symlinks:
|
|
||||||
# Perform basic checks to make sure symlinking will succeed
|
|
||||||
if os.path.lexists(link_path):
|
|
||||||
raise AlreadyExistsError(
|
|
||||||
f"Link path ({link_path}) already exists. Cannot create link."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(source_path):
|
|
||||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
|
||||||
# An absolute source path that does not exist will result in a broken link.
|
|
||||||
raise SymlinkError(
|
|
||||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
|
||||||
f"link would be broken so not making link."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# os.symlink can create a link when the given source path is relative to
|
|
||||||
# the link path. Emulate this behavior and check to see if the source exists
|
|
||||||
# relative to the link path ahead of link creation to prevent broken
|
|
||||||
# links from being made.
|
|
||||||
link_parent_dir = os.path.dirname(link_path)
|
|
||||||
relative_path = os.path.join(link_parent_dir, source_path)
|
|
||||||
if os.path.exists(relative_path):
|
|
||||||
# In order to work on windows, the source path needs to be modified to be
|
|
||||||
# relative because hardlink/junction dont resolve relative paths the same
|
|
||||||
# way as os.symlink. This is ignored on other operating systems.
|
|
||||||
win_source_path = relative_path
|
|
||||||
elif not allow_broken_symlinks:
|
|
||||||
raise SymlinkError(
|
|
||||||
f"The source path ({source_path}) is not relative to the link path "
|
|
||||||
f"({link_path}). Resulting link would be broken so not making link."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create the symlink
|
|
||||||
if sys.platform == "win32" and not _windows_can_symlink():
|
|
||||||
_windows_create_link(win_source_path, link_path)
|
|
||||||
else:
|
else:
|
||||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
try:
|
||||||
|
# Try to use junctions
|
||||||
|
_win32_junction(real_path, link_path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST:
|
||||||
|
# EEXIST error indicates that file we're trying to "link"
|
||||||
|
# is already present, don't bother trying to copy which will also fail
|
||||||
|
# just raise
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# If all else fails, fall back to copying files
|
||||||
|
shutil.copyfile(real_path, link_path)
|
||||||
|
|
||||||
|
|
||||||
def islink(path: str) -> bool:
|
def islink(path):
|
||||||
"""Override os.islink to give correct answer for spack logic.
|
return os.path.islink(path) or _win32_is_junction(path)
|
||||||
|
|
||||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
|
||||||
Windows-only methods will return false for other operating systems.
|
|
||||||
|
|
||||||
For Windows: spack considers symlinks, hard links, and junctions to
|
|
||||||
all be links, so if any of those are True, return True.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str): path to check if it is a link.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool - whether the path is any kind link or not.
|
|
||||||
"""
|
|
||||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
|
||||||
|
|
||||||
|
|
||||||
def _windows_is_hardlink(path: str) -> bool:
|
# '_win32' functions based on
|
||||||
"""Determines if a path is a windows hard link. This is accomplished
|
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||||
by looking at the number of links using os.stat. A non-hard-linked file
|
def _win32_junction(path, link):
|
||||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
# junctions require absolute paths
|
||||||
larger than 1. Note that both the original and hard-linked file will
|
if not os.path.isabs(link):
|
||||||
return True because they share the same inode.
|
link = os.path.abspath(link)
|
||||||
|
|
||||||
Args:
|
# os.symlink will fail if link exists, emulate the behavior here
|
||||||
path (str): Windows path to check for a hard link
|
if exists(link):
|
||||||
|
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||||
|
|
||||||
Returns:
|
if not os.path.isabs(path):
|
||||||
bool - Whether the path is a hard link or not.
|
parent = os.path.join(link, os.pardir)
|
||||||
"""
|
path = os.path.join(parent, path)
|
||||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
path = os.path.abspath(path)
|
||||||
return False
|
|
||||||
|
|
||||||
return os.stat(path).st_nlink > 1
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def _windows_is_junction(path: str) -> bool:
|
|
||||||
"""Determines if a path is a windows junction. A junction can be
|
|
||||||
determined using a bitwise AND operation between the file's
|
|
||||||
attribute bitmask and the known junction bitmask (0x400).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str): A non-file path
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool - whether the path is a junction or not.
|
|
||||||
"""
|
|
||||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
|
||||||
return False
|
|
||||||
|
|
||||||
import ctypes.wintypes
|
|
||||||
|
|
||||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
|
||||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
|
||||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
|
||||||
|
|
||||||
invalid_file_attributes = 0xFFFFFFFF
|
|
||||||
reparse_point = 0x400
|
|
||||||
file_attr = get_file_attributes(str(path))
|
|
||||||
|
|
||||||
if file_attr == invalid_file_attributes:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return file_attr & reparse_point > 0
|
|
||||||
|
|
||||||
|
|
||||||
@lang.memoized
|
@lang.memoized
|
||||||
def _windows_can_symlink() -> bool:
|
def _win32_can_symlink():
|
||||||
"""
|
|
||||||
Determines if windows is able to make a symlink depending on
|
|
||||||
the system configuration and the level of the user's permissions.
|
|
||||||
"""
|
|
||||||
if sys.platform != "win32":
|
|
||||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
tempdir = tempfile.mkdtemp()
|
tempdir = tempfile.mkdtemp()
|
||||||
|
|
||||||
dpath = os.path.join(tempdir, "dpath")
|
dpath = join(tempdir, "dpath")
|
||||||
fpath = os.path.join(tempdir, "fpath.txt")
|
fpath = join(tempdir, "fpath.txt")
|
||||||
|
|
||||||
dlink = os.path.join(tempdir, "dlink")
|
dlink = join(tempdir, "dlink")
|
||||||
flink = os.path.join(tempdir, "flink.txt")
|
flink = join(tempdir, "flink.txt")
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
@@ -208,140 +96,24 @@ def _windows_can_symlink() -> bool:
|
|||||||
return can_symlink_directories and can_symlink_files
|
return can_symlink_directories and can_symlink_files
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_link(source: str, link: str):
|
def _win32_is_junction(path):
|
||||||
"""
|
"""
|
||||||
Attempts to create a Hard Link or Junction as an alternative
|
Determines if a path is a win32 junction
|
||||||
to a symbolic link. This is called when symbolic links cannot
|
|
||||||
be created.
|
|
||||||
"""
|
"""
|
||||||
if sys.platform != "win32":
|
if os.path.islink(path):
|
||||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
return False
|
||||||
elif os.path.isdir(source):
|
|
||||||
_windows_create_junction(source=source, link=link)
|
|
||||||
elif os.path.isfile(source):
|
|
||||||
_windows_create_hard_link(path=source, link=link)
|
|
||||||
else:
|
|
||||||
raise SymlinkError(
|
|
||||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
import ctypes.wintypes
|
||||||
|
|
||||||
def _windows_create_junction(source: str, link: str):
|
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||||
"""Duly verify that the path and link are eligible to create a junction,
|
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||||
then create the junction.
|
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||||
"""
|
|
||||||
if sys.platform != "win32":
|
|
||||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
|
||||||
elif not os.path.exists(source):
|
|
||||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
|
||||||
elif os.path.lexists(link):
|
|
||||||
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
|
|
||||||
elif not os.path.isdir(source):
|
|
||||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
|
||||||
|
|
||||||
import subprocess
|
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||||
|
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||||
|
|
||||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
res = GetFileAttributes(path)
|
||||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||||
out, err = proc.communicate()
|
|
||||||
tty.debug(out.decode())
|
|
||||||
if proc.returncode != 0:
|
|
||||||
err = err.decode()
|
|
||||||
tty.error(err)
|
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
|
||||||
|
|
||||||
|
return False
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
|
||||||
"""Duly verify that the path and link are eligible to create a hard
|
|
||||||
link, then create the hard link.
|
|
||||||
"""
|
|
||||||
if sys.platform != "win32":
|
|
||||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
|
||||||
elif not os.path.exists(path):
|
|
||||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
|
||||||
elif os.path.lexists(link):
|
|
||||||
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
|
|
||||||
elif not os.path.isfile(path):
|
|
||||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
|
||||||
else:
|
|
||||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
|
||||||
CreateHardLink(link, path)
|
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str):
|
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
|
||||||
if _windows_is_hardlink(path):
|
|
||||||
return _windows_read_hard_link(path)
|
|
||||||
elif _windows_is_junction(path):
|
|
||||||
return _windows_read_junction(path)
|
|
||||||
else:
|
|
||||||
return os.readlink(path)
|
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
|
||||||
"""Find all of the files that point to the same inode as the link"""
|
|
||||||
if sys.platform != "win32":
|
|
||||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
|
||||||
link = os.path.abspath(link)
|
|
||||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
|
||||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
||||||
out, err = proc.communicate()
|
|
||||||
if proc.returncode != 0:
|
|
||||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
|
||||||
|
|
||||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
|
||||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
|
||||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
|
||||||
links.remove(link)
|
|
||||||
if len(links) == 1:
|
|
||||||
return links.pop()
|
|
||||||
elif len(links) > 1:
|
|
||||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
|
||||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
|
||||||
else:
|
|
||||||
raise SymlinkError("Cannot determine hard link source path.")
|
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_junction(link: str):
|
|
||||||
"""Find the path that a junction points to."""
|
|
||||||
if sys.platform != "win32":
|
|
||||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
|
||||||
|
|
||||||
link = os.path.abspath(link)
|
|
||||||
link_basename = os.path.basename(link)
|
|
||||||
link_parent = os.path.dirname(link)
|
|
||||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
|
||||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
|
||||||
out, err = proc.communicate()
|
|
||||||
if proc.returncode != 0:
|
|
||||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
|
||||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
|
||||||
if matches:
|
|
||||||
return matches.group(1)
|
|
||||||
else:
|
|
||||||
raise SymlinkError("Could not find junction path.")
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def resolve_link_target_relative_to_the_link(link):
|
|
||||||
"""
|
|
||||||
os.path.isdir uses os.path.exists, which for links will check
|
|
||||||
the existence of the link target. If the link target is relative to
|
|
||||||
the link, we need to construct a pathname that is valid from
|
|
||||||
our cwd (which may not be the same as the link's directory)
|
|
||||||
"""
|
|
||||||
target = readlink(link)
|
|
||||||
if os.path.isabs(target):
|
|
||||||
return target
|
|
||||||
link_dir = os.path.dirname(os.path.abspath(link))
|
|
||||||
return os.path.join(link_dir, target)
|
|
||||||
|
|
||||||
|
|
||||||
class SymlinkError(RuntimeError):
|
|
||||||
"""Exception class for errors raised while creating symlinks,
|
|
||||||
junctions and hard links
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class AlreadyExistsError(SymlinkError):
|
|
||||||
"""Link path already exists."""
|
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
@@ -12,7 +14,6 @@
|
|||||||
import traceback
|
import traceback
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sys import platform as _platform
|
from sys import platform as _platform
|
||||||
from typing import NoReturn
|
|
||||||
|
|
||||||
if _platform != "win32":
|
if _platform != "win32":
|
||||||
import fcntl
|
import fcntl
|
||||||
@@ -245,7 +246,7 @@ def warn(message, *args, **kwargs):
|
|||||||
info("Warning: " + str(message), *args, **kwargs)
|
info("Warning: " + str(message), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def die(message, *args, **kwargs) -> NoReturn:
|
def die(message, *args, **kwargs):
|
||||||
kwargs.setdefault("countback", 4)
|
kwargs.setdefault("countback", 4)
|
||||||
error(message, *args, **kwargs)
|
error(message, *args, **kwargs)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
"""
|
"""
|
||||||
Routines for printing columnar output. See ``colify()`` for more information.
|
Routines for printing columnar output. See ``colify()`` for more information.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import division, unicode_literals
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -59,6 +59,8 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
@@ -68,7 +70,7 @@ class ColorParseError(Exception):
|
|||||||
"""Raised when a color format fails to parse."""
|
"""Raised when a color format fails to parse."""
|
||||||
|
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super(ColorParseError, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
# Text styles for ansi codes
|
# Text styles for ansi codes
|
||||||
@@ -203,7 +205,7 @@ def color_when(value):
|
|||||||
set_color_when(old_value)
|
set_color_when(old_value)
|
||||||
|
|
||||||
|
|
||||||
class match_to_ansi:
|
class match_to_ansi(object):
|
||||||
def __init__(self, color=True, enclose=False):
|
def __init__(self, color=True, enclose=False):
|
||||||
self.color = _color_when_value(color)
|
self.color = _color_when_value(color)
|
||||||
self.enclose = enclose
|
self.enclose = enclose
|
||||||
@@ -319,7 +321,7 @@ def cescape(string):
|
|||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
class ColorStream:
|
class ColorStream(object):
|
||||||
def __init__(self, stream, color=None):
|
def __init__(self, stream, color=None):
|
||||||
self._stream = stream
|
self._stream = stream
|
||||||
self._color = color
|
self._color = color
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
"""Utility classes for logging the output of blocks of code.
|
"""Utility classes for logging the output of blocks of code.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import ctypes
|
import ctypes
|
||||||
import errno
|
import errno
|
||||||
@@ -65,7 +67,7 @@ def _strip(line):
|
|||||||
return _escape.sub("", line)
|
return _escape.sub("", line)
|
||||||
|
|
||||||
|
|
||||||
class keyboard_input:
|
class keyboard_input(object):
|
||||||
"""Context manager to disable line editing and echoing.
|
"""Context manager to disable line editing and echoing.
|
||||||
|
|
||||||
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
||||||
@@ -242,7 +244,7 @@ def __exit__(self, exc_type, exception, traceback):
|
|||||||
signal.signal(signum, old_handler)
|
signal.signal(signum, old_handler)
|
||||||
|
|
||||||
|
|
||||||
class Unbuffered:
|
class Unbuffered(object):
|
||||||
"""Wrapper for Python streams that forces them to be unbuffered.
|
"""Wrapper for Python streams that forces them to be unbuffered.
|
||||||
|
|
||||||
This is implemented by forcing a flush after each write.
|
This is implemented by forcing a flush after each write.
|
||||||
@@ -287,7 +289,7 @@ def _file_descriptors_work(*streams):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
class FileWrapper:
|
class FileWrapper(object):
|
||||||
"""Represents a file. Can be an open stream, a path to a file (not opened
|
"""Represents a file. Can be an open stream, a path to a file (not opened
|
||||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||||
object.
|
object.
|
||||||
@@ -329,7 +331,7 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
class MultiProcessFd:
|
class MultiProcessFd(object):
|
||||||
"""Return an object which stores a file descriptor and can be passed as an
|
"""Return an object which stores a file descriptor and can be passed as an
|
||||||
argument to a function run with ``multiprocessing.Process``, such that
|
argument to a function run with ``multiprocessing.Process``, such that
|
||||||
the file descriptor is available in the subprocess."""
|
the file descriptor is available in the subprocess."""
|
||||||
@@ -429,7 +431,7 @@ def log_output(*args, **kwargs):
|
|||||||
return nixlog(*args, **kwargs)
|
return nixlog(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class nixlog:
|
class nixlog(object):
|
||||||
"""
|
"""
|
||||||
Under the hood, we spawn a daemon and set up a pipe between this
|
Under the hood, we spawn a daemon and set up a pipe between this
|
||||||
process and the daemon. The daemon writes our output to both the
|
process and the daemon. The daemon writes our output to both the
|
||||||
@@ -750,7 +752,7 @@ def close(self):
|
|||||||
os.close(self.saved_stream)
|
os.close(self.saved_stream)
|
||||||
|
|
||||||
|
|
||||||
class winlog:
|
class winlog(object):
|
||||||
"""
|
"""
|
||||||
Similar to nixlog, with underlying
|
Similar to nixlog, with underlying
|
||||||
functionality ported to support Windows.
|
functionality ported to support Windows.
|
||||||
@@ -780,7 +782,7 @@ def __enter__(self):
|
|||||||
raise RuntimeError("file argument must be set by __init__ ")
|
raise RuntimeError("file argument must be set by __init__ ")
|
||||||
|
|
||||||
# Open both write and reading on logfile
|
# Open both write and reading on logfile
|
||||||
if isinstance(self.logfile, io.StringIO):
|
if type(self.logfile) == io.StringIO:
|
||||||
self._ioflag = True
|
self._ioflag = True
|
||||||
# cannot have two streams on tempfile, so we must make our own
|
# cannot have two streams on tempfile, so we must make our own
|
||||||
sys.stdout = self.logfile
|
sys.stdout = self.logfile
|
||||||
|
|||||||
@@ -13,6 +13,8 @@
|
|||||||
|
|
||||||
Note: The functionality in this module is unsupported on Windows
|
Note: The functionality in this module is unsupported on Windows
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -34,7 +36,7 @@
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProcessController:
|
class ProcessController(object):
|
||||||
"""Wrapper around some fundamental process control operations.
|
"""Wrapper around some fundamental process control operations.
|
||||||
|
|
||||||
This allows one process (the controller) to drive another (the
|
This allows one process (the controller) to drive another (the
|
||||||
@@ -155,7 +157,7 @@ def wait_running(self):
|
|||||||
self.wait(lambda: "T" not in self.proc_status())
|
self.wait(lambda: "T" not in self.proc_status())
|
||||||
|
|
||||||
|
|
||||||
class PseudoShell:
|
class PseudoShell(object):
|
||||||
"""Sets up controller and minion processes with a PTY.
|
"""Sets up controller and minion processes with a PTY.
|
||||||
|
|
||||||
You can create a ``PseudoShell`` if you want to test how some
|
You can create a ``PseudoShell`` if you want to test how some
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
|
|
||||||
class ABI:
|
class ABI(object):
|
||||||
"""This class provides methods to test ABI compatibility between specs.
|
"""This class provides methods to test ABI compatibility between specs.
|
||||||
The current implementation is rather rough and could be improved."""
|
The current implementation is rather rough and could be improved."""
|
||||||
|
|
||||||
|
|||||||
@@ -38,13 +38,10 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
import ast
|
import ast
|
||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import glob
|
|
||||||
import inspect
|
import inspect
|
||||||
import itertools
|
import itertools
|
||||||
import pathlib
|
|
||||||
import pickle
|
import pickle
|
||||||
import re
|
import re
|
||||||
import warnings
|
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -63,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
GROUPS = collections.defaultdict(list)
|
GROUPS = collections.defaultdict(list)
|
||||||
|
|
||||||
|
|
||||||
class Error:
|
class Error(object):
|
||||||
"""Information on an error reported in a test."""
|
"""Information on an error reported in a test."""
|
||||||
|
|
||||||
def __init__(self, summary, details):
|
def __init__(self, summary, details):
|
||||||
@@ -289,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
|||||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||||
|
|
||||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||||
@@ -315,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
for condition, patches in pkg_cls.patches.items():
|
for condition, patches in pkg_cls.patches.items():
|
||||||
for patch in patches:
|
for patch in patches:
|
||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
@@ -345,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
name_definitions = collections.defaultdict(list)
|
name_definitions = collections.defaultdict(list)
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
|
||||||
for cls_item in inspect.getmro(pkg_cls):
|
for cls_item in inspect.getmro(pkg_cls):
|
||||||
for name in RESERVED_NAMES:
|
for name in RESERVED_NAMES:
|
||||||
@@ -386,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
|||||||
"""Ensure that package objects are pickleable"""
|
"""Ensure that package objects are pickleable"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||||
try:
|
try:
|
||||||
pickle.dumps(pkg)
|
pickle.dumps(pkg)
|
||||||
@@ -427,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
|||||||
"""Ensure all versions in a package can produce a fetcher"""
|
"""Ensure all versions in a package can produce a fetcher"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||||
try:
|
try:
|
||||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||||
@@ -452,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
|||||||
]
|
]
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
details = []
|
details = []
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
with open(filename, "r") as package_file:
|
with open(filename, "r") as package_file:
|
||||||
for i, line in enumerate(package_file):
|
for i, line in enumerate(package_file):
|
||||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||||
@@ -464,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
|||||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||||
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
if not pkg_cls.__doc__:
|
if not pkg_cls.__doc__:
|
||||||
error_msg = "Package '{}' miss a docstring"
|
error_msg = "Package '{}' miss a docstring"
|
||||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||||
@@ -477,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
|||||||
"""Ensure no packages use md5 checksums"""
|
"""Ensure no packages use md5 checksums"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
if pkg_cls.manual_download:
|
if pkg_cls.manual_download:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -514,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
|||||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||||
@@ -541,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
|||||||
"""Check for correctness of links"""
|
"""Check for correctness of links"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
|
||||||
# Does the homepage have http, and if so, does https work?
|
# Does the homepage have http, and if so, does https work?
|
||||||
if pkg_cls.homepage.startswith("http://"):
|
if pkg_cls.homepage.startswith("http://"):
|
||||||
@@ -565,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
|||||||
"""Report unknown or wrong variants in directives for this package"""
|
"""Report unknown or wrong variants in directives for this package"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
|
||||||
# Check "conflicts" directive
|
# Check "conflicts" directive
|
||||||
for conflict, triggers in pkg_cls.conflicts.items():
|
for conflict, triggers in pkg_cls.conflicts.items():
|
||||||
@@ -631,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
|||||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||||
# No need to analyze virtual packages
|
# No need to analyze virtual packages
|
||||||
if spack.repo.PATH.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||||
except spack.repo.UnknownPackageError:
|
except spack.repo.UnknownPackageError:
|
||||||
# This dependency is completely missing, so report
|
# This dependency is completely missing, so report
|
||||||
# and continue the analysis
|
# and continue the analysis
|
||||||
@@ -678,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
|||||||
"""Ensures that variant defaults are present and parsable from cli"""
|
"""Ensures that variant defaults are present and parsable from cli"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
for variant_name, entry in pkg_cls.variants.items():
|
for variant_name, entry in pkg_cls.variants.items():
|
||||||
variant, _ = entry
|
variant, _ = entry
|
||||||
default_is_parsable = (
|
default_is_parsable = (
|
||||||
@@ -712,53 +709,27 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_directives
|
|
||||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
|
||||||
"""Ensures that all variants have a description."""
|
|
||||||
errors = []
|
|
||||||
for pkg_name in pkgs:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
|
||||||
for variant_name, entry in pkg_cls.variants.items():
|
|
||||||
variant, _ = entry
|
|
||||||
if not variant.description:
|
|
||||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
|
||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
@package_directives
|
@package_directives
|
||||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||||
"""Report if version constraints used in directives are not satisfiable"""
|
"""Report if version constraints used in directives are not satisfiable"""
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||||
dependencies_to_check = []
|
dependencies_to_check = []
|
||||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||||
# Skip virtual dependencies for the time being, check on
|
# Skip virtual dependencies for the time being, check on
|
||||||
# their versions can be added later
|
# their versions can be added later
|
||||||
if spack.repo.PATH.is_virtual(dependency_name):
|
if spack.repo.path.is_virtual(dependency_name):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||||
|
|
||||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
|
||||||
for s in dependencies_to_check:
|
for s in dependencies_to_check:
|
||||||
dependency_pkg_cls = None
|
dependency_pkg_cls = None
|
||||||
try:
|
try:
|
||||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||||
# Some packages have hacks that might cause failures on some platform
|
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||||
# Allow to explicitly set conditions to skip version checks in that case
|
|
||||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
|
||||||
skip_version_check = False
|
|
||||||
for condition in skip_conditions:
|
|
||||||
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
|
|
||||||
skip_version_check = True
|
|
||||||
break
|
|
||||||
assert skip_version_check or any(
|
|
||||||
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
|
|
||||||
)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = (
|
summary = (
|
||||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||||
@@ -790,7 +761,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
|||||||
except variant_exceptions as e:
|
except variant_exceptions as e:
|
||||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||||
summary = summary.format(directive)
|
summary = summary.format(directive)
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||||
|
|
||||||
error_msg = str(e).strip()
|
error_msg = str(e).strip()
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
@@ -801,76 +772,3 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
|||||||
errors.append(err)
|
errors.append(err)
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
#: Sanity checks on package directives
|
|
||||||
external_detection = AuditClass(
|
|
||||||
group="externals",
|
|
||||||
tag="PKG-EXTERNALS",
|
|
||||||
description="Sanity checks for external software detection",
|
|
||||||
kwargs=("pkgs",),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def packages_with_detection_tests():
|
|
||||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
|
||||||
import spack.config
|
|
||||||
import spack.util.path
|
|
||||||
|
|
||||||
to_be_tested = []
|
|
||||||
for current_repo in spack.repo.PATH.repos:
|
|
||||||
namespace = current_repo.namespace
|
|
||||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
|
||||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
|
||||||
pkgs_with_tests = [
|
|
||||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
|
||||||
]
|
|
||||||
to_be_tested.extend(pkgs_with_tests)
|
|
||||||
|
|
||||||
return to_be_tested
|
|
||||||
|
|
||||||
|
|
||||||
@external_detection
|
|
||||||
def _test_detection_by_executable(pkgs, error_cls):
|
|
||||||
"""Test drive external detection for packages"""
|
|
||||||
import spack.detection
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Filter the packages and retain only the ones with detection tests
|
|
||||||
pkgs_with_tests = packages_with_detection_tests()
|
|
||||||
selected_pkgs = []
|
|
||||||
for current_package in pkgs_with_tests:
|
|
||||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
|
||||||
# Check for both unqualified name and qualified name
|
|
||||||
if unqualified_name in pkgs or current_package in pkgs:
|
|
||||||
selected_pkgs.append(current_package)
|
|
||||||
selected_pkgs.sort()
|
|
||||||
|
|
||||||
if not selected_pkgs:
|
|
||||||
summary = "No detection test to run"
|
|
||||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
|
||||||
warnings.warn("\n".join([summary] + details))
|
|
||||||
return errors
|
|
||||||
|
|
||||||
for pkg_name in selected_pkgs:
|
|
||||||
for idx, test_runner in enumerate(
|
|
||||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
|
||||||
):
|
|
||||||
specs = test_runner.execute()
|
|
||||||
expected_specs = test_runner.expected_specs
|
|
||||||
|
|
||||||
not_detected = set(expected_specs) - set(specs)
|
|
||||||
if not_detected:
|
|
||||||
summary = pkg_name + ": cannot detect some specs"
|
|
||||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
|
||||||
|
|
||||||
not_expected = set(specs) - set(expected_specs)
|
|
||||||
if not_expected:
|
|
||||||
summary = pkg_name + ": detected unexpected specs"
|
|
||||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
|
||||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
|
import multiprocessing.pool
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -23,7 +24,7 @@
|
|||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing, contextmanager
|
from contextlib import closing, contextmanager
|
||||||
from gzip import GzipFile
|
from gzip import GzipFile
|
||||||
from typing import Dict, List, NamedTuple, Optional, Tuple, Union
|
from typing import List, NamedTuple, Optional, Union
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.error import HTTPError, URLError
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
@@ -34,7 +35,6 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
import spack.database as spack_db
|
import spack.database as spack_db
|
||||||
import spack.error
|
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.hooks.sbang
|
import spack.hooks.sbang
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
@@ -49,11 +49,9 @@
|
|||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.timer as timer
|
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.caches import misc_cache_location
|
from spack.caches import misc_cache_location
|
||||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
|
||||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
@@ -63,22 +61,6 @@
|
|||||||
_build_cache_keys_relative_path = "_pgp"
|
_build_cache_keys_relative_path = "_pgp"
|
||||||
|
|
||||||
|
|
||||||
class BuildCacheDatabase(spack_db.Database):
|
|
||||||
"""A database for binary buildcaches.
|
|
||||||
|
|
||||||
A database supports writing buildcache index files, in which case certain fields are not
|
|
||||||
needed in each install record, and no locking is required. To use this feature, it provides
|
|
||||||
``lock_cfg=NO_LOCK``, and override the list of ``record_fields``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
|
||||||
|
|
||||||
def __init__(self, root):
|
|
||||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
|
||||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
|
||||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
|
||||||
|
|
||||||
|
|
||||||
class FetchCacheError(Exception):
|
class FetchCacheError(Exception):
|
||||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||||
|
|
||||||
@@ -98,14 +80,14 @@ def __init__(self, errors):
|
|||||||
else:
|
else:
|
||||||
err = errors[0]
|
err = errors[0]
|
||||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||||
super().__init__(self.message)
|
super(FetchCacheError, self).__init__(self.message)
|
||||||
|
|
||||||
|
|
||||||
class ListMirrorSpecsError(spack.error.SpackError):
|
class ListMirrorSpecsError(spack.error.SpackError):
|
||||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||||
|
|
||||||
|
|
||||||
class BinaryCacheIndex:
|
class BinaryCacheIndex(object):
|
||||||
"""
|
"""
|
||||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||||
binary caches.
|
binary caches.
|
||||||
@@ -208,7 +190,8 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
db = BuildCacheDatabase(tmpdir)
|
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||||
|
db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._index_file_cache.init_entry(cache_key)
|
self._index_file_cache.init_entry(cache_key)
|
||||||
@@ -216,11 +199,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
with self._index_file_cache.read_transaction(cache_key):
|
with self._index_file_cache.read_transaction(cache_key):
|
||||||
db._read_from_file(cache_path)
|
db._read_from_file(cache_path)
|
||||||
except spack_db.InvalidDatabaseVersionError as e:
|
except spack_db.InvalidDatabaseVersionError as e:
|
||||||
tty.warn(
|
msg = (
|
||||||
f"you need a newer Spack version to read the buildcache index for the "
|
f"you need a newer Spack version to read the buildcache index for the "
|
||||||
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
f"following mirror: '{mirror_url}'. {e.database_version_message}"
|
||||||
)
|
)
|
||||||
return
|
raise BuildcacheIndexError(msg) from e
|
||||||
|
|
||||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||||
|
|
||||||
@@ -334,9 +317,9 @@ def update(self, with_cooldown=False):
|
|||||||
from each configured mirror and stored locally (both in memory and
|
from each configured mirror and stored locally (both in memory and
|
||||||
on disk under ``_index_cache_root``)."""
|
on disk under ``_index_cache_root``)."""
|
||||||
self._init_local_index_cache()
|
self._init_local_index_cache()
|
||||||
configured_mirror_urls = [
|
|
||||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
mirrors = spack.mirror.MirrorCollection()
|
||||||
]
|
configured_mirror_urls = [m.fetch_url for m in mirrors.values()]
|
||||||
items_to_remove = []
|
items_to_remove = []
|
||||||
spec_cache_clear_needed = False
|
spec_cache_clear_needed = False
|
||||||
spec_cache_regenerate_needed = not self._mirrors_for_spec
|
spec_cache_regenerate_needed = not self._mirrors_for_spec
|
||||||
@@ -534,7 +517,9 @@ class NoOverwriteException(spack.error.SpackError):
|
|||||||
"""Raised when a file would be overwritten"""
|
"""Raised when a file would be overwritten"""
|
||||||
|
|
||||||
def __init__(self, file_path):
|
def __init__(self, file_path):
|
||||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
super(NoOverwriteException, self).__init__(
|
||||||
|
f"Refusing to overwrite the following file: {file_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoGpgException(spack.error.SpackError):
|
class NoGpgException(spack.error.SpackError):
|
||||||
@@ -543,7 +528,7 @@ class NoGpgException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super().__init__(msg)
|
super(NoGpgException, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class NoKeyException(spack.error.SpackError):
|
class NoKeyException(spack.error.SpackError):
|
||||||
@@ -552,7 +537,7 @@ class NoKeyException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super().__init__(msg)
|
super(NoKeyException, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class PickKeyException(spack.error.SpackError):
|
class PickKeyException(spack.error.SpackError):
|
||||||
@@ -563,7 +548,7 @@ class PickKeyException(spack.error.SpackError):
|
|||||||
def __init__(self, keys):
|
def __init__(self, keys):
|
||||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||||
super().__init__(err_msg)
|
super(PickKeyException, self).__init__(err_msg)
|
||||||
|
|
||||||
|
|
||||||
class NoVerifyException(spack.error.SpackError):
|
class NoVerifyException(spack.error.SpackError):
|
||||||
@@ -580,7 +565,7 @@ class NoChecksumException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||||
super().__init__(
|
super(NoChecksumException, self).__init__(
|
||||||
f"{algorithm} checksum failed for {path}",
|
f"{algorithm} checksum failed for {path}",
|
||||||
f"Expected {expected} but got {computed}. "
|
f"Expected {expected} but got {computed}. "
|
||||||
f"File size = {size} bytes. Contents = {contents!r}",
|
f"File size = {size} bytes. Contents = {contents!r}",
|
||||||
@@ -593,7 +578,7 @@ class NewLayoutException(spack.error.SpackError):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
super().__init__(msg)
|
super(NewLayoutException, self).__init__(msg)
|
||||||
|
|
||||||
|
|
||||||
class UnsignedPackageException(spack.error.SpackError):
|
class UnsignedPackageException(spack.error.SpackError):
|
||||||
@@ -625,7 +610,8 @@ def buildinfo_file_name(prefix):
|
|||||||
"""
|
"""
|
||||||
Filename of the binary package meta-data file
|
Filename of the binary package meta-data file
|
||||||
"""
|
"""
|
||||||
return os.path.join(prefix, ".spack/binary_distribution")
|
name = os.path.join(prefix, ".spack/binary_distribution")
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
def read_buildinfo_file(prefix):
|
def read_buildinfo_file(prefix):
|
||||||
@@ -646,7 +632,8 @@ class BuildManifestVisitor(BaseDirectoryVisitor):
|
|||||||
directories."""
|
directories."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
# Save unique identifiers of files to avoid
|
||||||
|
# relocating hardlink files for each path.
|
||||||
self.visited = set()
|
self.visited = set()
|
||||||
|
|
||||||
# Lists of files we will check
|
# Lists of files we will check
|
||||||
@@ -655,8 +642,6 @@ def __init__(self):
|
|||||||
|
|
||||||
def seen_before(self, root, rel_path):
|
def seen_before(self, root, rel_path):
|
||||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||||
if stat_result.st_nlink == 1:
|
|
||||||
return False
|
|
||||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||||
if identifier in self.visited:
|
if identifier in self.visited:
|
||||||
return True
|
return True
|
||||||
@@ -720,7 +705,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# look for them to decide if text file needs to be relocated or not
|
# look for them to decide if text file needs to be relocated or not
|
||||||
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||||
prefixes.append(str(spack.store.STORE.layout.root))
|
prefixes.append(str(spack.store.layout.root))
|
||||||
|
|
||||||
# Create a giant regex that matches all prefixes
|
# Create a giant regex that matches all prefixes
|
||||||
regex = utf8_paths_to_single_binary_regex(prefixes)
|
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||||
@@ -733,7 +718,7 @@ def get_buildfile_manifest(spec):
|
|||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = os.readlink(abs_path)
|
link = os.readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
# Non-symlinks.
|
# Non-symlinks.
|
||||||
@@ -775,15 +760,16 @@ def hashes_to_prefixes(spec):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec, rel=False):
|
||||||
"""Create metadata for a tarball"""
|
"""Create metadata for a tarball"""
|
||||||
manifest = get_buildfile_manifest(spec)
|
manifest = get_buildfile_manifest(spec)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||||
"buildpath": spack.store.STORE.layout.root,
|
"relative_rpaths": rel,
|
||||||
|
"buildpath": spack.store.layout.root,
|
||||||
"spackprefix": spack.paths.prefix,
|
"spackprefix": spack.paths.prefix,
|
||||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||||
"relocate_textfiles": manifest["text_to_relocate"],
|
"relocate_textfiles": manifest["text_to_relocate"],
|
||||||
"relocate_binaries": manifest["binary_to_relocate"],
|
"relocate_binaries": manifest["binary_to_relocate"],
|
||||||
"relocate_links": manifest["link_to_relocate"],
|
"relocate_links": manifest["link_to_relocate"],
|
||||||
@@ -877,18 +863,32 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
|||||||
db: A spack database used for adding specs and then writing the index.
|
db: A spack database used for adding specs and then writing the index.
|
||||||
temp_dir (str): Location to write index.json and hash for pushing
|
temp_dir (str): Location to write index.json and hash for pushing
|
||||||
concurrency (int): Number of parallel processes to use when fetching
|
concurrency (int): Number of parallel processes to use when fetching
|
||||||
"""
|
|
||||||
for file in file_list:
|
|
||||||
contents = read_method(file)
|
|
||||||
# Need full spec.json name or this gets confused with index.json.
|
|
||||||
if file.endswith(".json.sig"):
|
|
||||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
|
||||||
fetched_spec = Spec.from_dict(specfile_json)
|
|
||||||
elif file.endswith(".json"):
|
|
||||||
fetched_spec = Spec.from_json(contents)
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
Return:
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _fetch_spec_from_mirror(spec_url):
|
||||||
|
spec_file_contents = read_method(spec_url)
|
||||||
|
|
||||||
|
if spec_file_contents:
|
||||||
|
# Need full spec.json name or this gets confused with index.json.
|
||||||
|
if spec_url.endswith(".json.sig"):
|
||||||
|
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||||
|
return Spec.from_dict(specfile_json)
|
||||||
|
if spec_url.endswith(".json"):
|
||||||
|
return Spec.from_json(spec_file_contents)
|
||||||
|
|
||||||
|
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||||
|
try:
|
||||||
|
fetched_specs = tp.map(
|
||||||
|
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
tp.terminate()
|
||||||
|
tp.join()
|
||||||
|
|
||||||
|
for fetched_spec in fetched_specs:
|
||||||
db.add(fetched_spec, None)
|
db.add(fetched_spec, None)
|
||||||
db.mark(fetched_spec, "in_buildcache", True)
|
db.mark(fetched_spec, "in_buildcache", True)
|
||||||
|
|
||||||
@@ -1062,10 +1062,13 @@ def generate_package_index(cache_prefix, concurrency=32):
|
|||||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
|
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||||
db = BuildCacheDatabase(tmpdir)
|
db = spack_db.Database(
|
||||||
db.root = None
|
None,
|
||||||
db_root_dir = db.database_directory
|
db_dir=db_root_dir,
|
||||||
|
enable_transaction_locking=False,
|
||||||
|
record_fields=["spec", "ref_count", "in_buildcache"],
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||||
@@ -1157,105 +1160,61 @@ def gzip_compressed_tarfile(path):
|
|||||||
yield tar
|
yield tar
|
||||||
|
|
||||||
|
|
||||||
def _tarinfo_name(p: str):
|
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||||
return p.lstrip("/")
|
# We only add files, symlinks, hardlinks, and directories
|
||||||
|
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||||
|
if tarinfo.isdev():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||||
|
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||||
|
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||||
|
# root will extract as root, and non-privileged user will extract as themselves.
|
||||||
|
tarinfo.uid = 0
|
||||||
|
tarinfo.gid = 0
|
||||||
|
tarinfo.uname = ""
|
||||||
|
tarinfo.gname = ""
|
||||||
|
|
||||||
|
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||||
|
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||||
|
tarinfo.mtime = 0
|
||||||
|
|
||||||
|
# Normalize mode
|
||||||
|
if tarinfo.isfile() or tarinfo.islnk():
|
||||||
|
# If user can execute, use 0o755; else 0o644
|
||||||
|
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||||
|
# extracted when Python or tar is run with privileges
|
||||||
|
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||||
|
else: # symbolic link and directories
|
||||||
|
tarinfo.mode = 0o755
|
||||||
|
|
||||||
|
return tarinfo
|
||||||
|
|
||||||
|
|
||||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
# Serialize buildinfo for the tarball
|
||||||
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
|
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||||
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
|
tarinfo = tarfile.TarInfo(name=path)
|
||||||
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
|
tarinfo.size = len(bstring)
|
||||||
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
|
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||||
|
|
||||||
Args:
|
|
||||||
tar: tarfile object to add files to
|
|
||||||
prefix: absolute install prefix of spec"""
|
|
||||||
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
|
|
||||||
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
|
|
||||||
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
|
|
||||||
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
|
|
||||||
|
|
||||||
try: # skip buildinfo file if it exists
|
|
||||||
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
|
|
||||||
except OSError:
|
|
||||||
files_to_skip = []
|
|
||||||
|
|
||||||
dir_stack = [prefix]
|
|
||||||
while dir_stack:
|
|
||||||
dir = dir_stack.pop()
|
|
||||||
|
|
||||||
# Add the dir before its contents
|
|
||||||
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
|
|
||||||
dir_info.type = tarfile.DIRTYPE
|
|
||||||
dir_info.mode = 0o755
|
|
||||||
tar.addfile(dir_info)
|
|
||||||
|
|
||||||
# Sort by name: reproducible & improves compression
|
|
||||||
with os.scandir(dir) as it:
|
|
||||||
entries = sorted(it, key=lambda entry: entry.name)
|
|
||||||
|
|
||||||
new_dirs = []
|
|
||||||
for entry in entries:
|
|
||||||
if entry.is_dir(follow_symlinks=False):
|
|
||||||
new_dirs.append(entry.path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
|
|
||||||
|
|
||||||
s = entry.stat(follow_symlinks=False)
|
|
||||||
|
|
||||||
# Skip existing binary distribution files.
|
|
||||||
id = stat_key(s)
|
|
||||||
if id in files_to_skip:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Normalize the mode
|
|
||||||
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
|
|
||||||
|
|
||||||
if entry.is_symlink():
|
|
||||||
file_info.type = tarfile.SYMTYPE
|
|
||||||
file_info.linkname = os.readlink(entry.path)
|
|
||||||
tar.addfile(file_info)
|
|
||||||
|
|
||||||
elif entry.is_file(follow_symlinks=False):
|
|
||||||
# Deduplicate hardlinks
|
|
||||||
if s.st_nlink > 1:
|
|
||||||
if id in hardlink_to_tarinfo_name:
|
|
||||||
file_info.type = tarfile.LNKTYPE
|
|
||||||
file_info.linkname = hardlink_to_tarinfo_name[id]
|
|
||||||
tar.addfile(file_info)
|
|
||||||
continue
|
|
||||||
hardlink_to_tarinfo_name[id] = file_info.name
|
|
||||||
|
|
||||||
# If file not yet seen, copy it.
|
|
||||||
file_info.type = tarfile.REGTYPE
|
|
||||||
file_info.size = s.st_size
|
|
||||||
|
|
||||||
with open(entry.path, "rb") as f:
|
|
||||||
tar.addfile(file_info, f)
|
|
||||||
|
|
||||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
|
||||||
|
|
||||||
|
|
||||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||||
# Tarball the install prefix
|
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||||
|
|
||||||
# Serialize buildinfo for the tarball
|
|
||||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
|
||||||
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
|
|
||||||
tarinfo.type = tarfile.REGTYPE
|
|
||||||
tarinfo.size = len(bstring)
|
|
||||||
tarinfo.mode = 0o644
|
|
||||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
|
||||||
|
|
||||||
|
|
||||||
class PushOptions(NamedTuple):
|
class PushOptions(NamedTuple):
|
||||||
#: Overwrite existing tarball/metadata files in buildcache
|
#: Overwrite existing tarball/metadata files in buildcache
|
||||||
force: bool = False
|
force: bool = False
|
||||||
|
|
||||||
|
#: Whether to use relative RPATHs
|
||||||
|
relative: bool = False
|
||||||
|
|
||||||
|
#: Allow absolute paths to package prefixes when creating a tarball
|
||||||
|
allow_root: bool = False
|
||||||
|
|
||||||
#: Regenerated indices after pushing
|
#: Regenerated indices after pushing
|
||||||
regenerate_index: bool = False
|
regenerate_index: bool = False
|
||||||
|
|
||||||
@@ -1300,7 +1259,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
# without concretizing with the current spack packages
|
# without concretizing with the current spack packages
|
||||||
# and preferences
|
# and preferences
|
||||||
|
|
||||||
spec_file = spack.store.STORE.layout.spec_file_path(spec)
|
spec_file = spack.store.layout.spec_file_path(spec)
|
||||||
specfile_name = tarball_name(spec, ".spec.json")
|
specfile_name = tarball_name(spec, ".spec.json")
|
||||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||||
@@ -1321,12 +1280,41 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
):
|
):
|
||||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||||
|
|
||||||
binaries_dir = spec.prefix
|
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||||
|
workdir = os.path.join(stage_dir, pkg_dir)
|
||||||
|
|
||||||
|
# TODO: We generally don't want to mutate any files, but when using relative
|
||||||
|
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||||
|
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||||
|
|
||||||
|
if options.relative:
|
||||||
|
# tarfile is used because it preserves hardlink etc best.
|
||||||
|
binaries_dir = workdir
|
||||||
|
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||||
|
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||||
|
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||||
|
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||||
|
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||||
|
tar.extractall(workdir)
|
||||||
|
os.remove(temp_tarfile_path)
|
||||||
|
else:
|
||||||
|
binaries_dir = spec.prefix
|
||||||
|
|
||||||
# create info for later relocation and create tar
|
# create info for later relocation and create tar
|
||||||
buildinfo = get_buildinfo_dict(spec)
|
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||||
|
|
||||||
_do_create_tarball(tarfile_path, binaries_dir, buildinfo)
|
# optionally make the paths in the binaries relative to each other
|
||||||
|
# in the spack install tree before creating tarball
|
||||||
|
if options.relative:
|
||||||
|
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||||
|
elif not options.allow_root:
|
||||||
|
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||||
|
|
||||||
|
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||||
|
|
||||||
|
# remove copy of install directory
|
||||||
|
if options.relative:
|
||||||
|
shutil.rmtree(workdir)
|
||||||
|
|
||||||
# get the sha256 checksum of the tarball
|
# get the sha256 checksum of the tarball
|
||||||
checksum = checksum_tarball(tarfile_path)
|
checksum = checksum_tarball(tarfile_path)
|
||||||
@@ -1340,7 +1328,16 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
|||||||
else:
|
else:
|
||||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||||
spec_dict["buildcache_layout_version"] = 1
|
spec_dict["buildcache_layout_version"] = 1
|
||||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
bchecksum = {}
|
||||||
|
bchecksum["hash_algorithm"] = "sha256"
|
||||||
|
bchecksum["hash"] = checksum
|
||||||
|
spec_dict["binary_cache_checksum"] = bchecksum
|
||||||
|
# Add original install prefix relative to layout root to spec.json.
|
||||||
|
# This will be used to determine is the directory layout has changed.
|
||||||
|
buildinfo = {}
|
||||||
|
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||||
|
buildinfo["relative_rpaths"] = options.relative
|
||||||
|
spec_dict["buildinfo"] = buildinfo
|
||||||
|
|
||||||
with open(specfile_path, "w") as outfile:
|
with open(specfile_path, "w") as outfile:
|
||||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||||
@@ -1397,7 +1394,7 @@ def specs_to_be_packaged(
|
|||||||
packageable = lambda n: not n.external and n.installed
|
packageable = lambda n: not n.external and n.installed
|
||||||
|
|
||||||
# Mass install check
|
# Mass install check
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.db.read_transaction():
|
||||||
return list(filter(packageable, nodes))
|
return list(filter(packageable, nodes))
|
||||||
|
|
||||||
|
|
||||||
@@ -1458,7 +1455,7 @@ def try_fetch(url_to_fetch):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
except spack.error.FetchError:
|
except web_util.FetchError:
|
||||||
stage.destroy()
|
stage.destroy()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -1499,9 +1496,8 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
if not spack.mirror.MirrorCollection():
|
||||||
if not configured_mirrors:
|
tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.")
|
||||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
|
||||||
|
|
||||||
tarball = tarball_path_name(spec, ".spack")
|
tarball = tarball_path_name(spec, ".spack")
|
||||||
specfile_prefix = tarball_name(spec, ".spec")
|
specfile_prefix = tarball_name(spec, ".spec")
|
||||||
@@ -1518,7 +1514,11 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
# we need was in an un-indexed mirror. No need to check any
|
# we need was in an un-indexed mirror. No need to check any
|
||||||
# mirror for the spec twice though.
|
# mirror for the spec twice though.
|
||||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
try_next = [
|
||||||
|
i.fetch_url
|
||||||
|
for i in spack.mirror.MirrorCollection().values()
|
||||||
|
if i.fetch_url not in try_first
|
||||||
|
]
|
||||||
|
|
||||||
for url in try_first + try_next:
|
for url in try_first + try_next:
|
||||||
mirrors_to_try.append(
|
mirrors_to_try.append(
|
||||||
@@ -1596,6 +1596,41 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||||
|
"""
|
||||||
|
Change paths in binaries to relative paths. Change absolute symlinks
|
||||||
|
to relative symlinks.
|
||||||
|
"""
|
||||||
|
prefix = spec.prefix
|
||||||
|
old_layout_root = buildinfo["buildpath"]
|
||||||
|
orig_path_names = list()
|
||||||
|
cur_path_names = list()
|
||||||
|
for filename in buildinfo["relocate_binaries"]:
|
||||||
|
orig_path_names.append(os.path.join(prefix, filename))
|
||||||
|
cur_path_names.append(os.path.join(workdir, filename))
|
||||||
|
|
||||||
|
platform = spack.platforms.by_name(spec.platform)
|
||||||
|
if "macho" in platform.binary_formats:
|
||||||
|
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||||
|
|
||||||
|
if "elf" in platform.binary_formats:
|
||||||
|
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||||
|
|
||||||
|
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||||
|
orig_path_names = list()
|
||||||
|
cur_path_names = list()
|
||||||
|
for linkname in buildinfo.get("relocate_links", []):
|
||||||
|
orig_path_names.append(os.path.join(prefix, linkname))
|
||||||
|
cur_path_names.append(os.path.join(workdir, linkname))
|
||||||
|
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||||
|
"""Check if package binaries are relocatable."""
|
||||||
|
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||||
|
relocate.ensure_binaries_are_relocatable(binaries)
|
||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
"""Updates a buildinfo dict for old archives that did
|
"""Updates a buildinfo dict for old archives that did
|
||||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||||
@@ -1621,10 +1656,9 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
|||||||
for rel_path in buildinfo[key]:
|
for rel_path in buildinfo[key]:
|
||||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||||
if stat_result.st_nlink > 1:
|
if identifier in visited:
|
||||||
if identifier in visited:
|
continue
|
||||||
continue
|
visited.add(identifier)
|
||||||
visited.add(identifier)
|
|
||||||
new_list.append(rel_path)
|
new_list.append(rel_path)
|
||||||
buildinfo[key] = new_list
|
buildinfo[key] = new_list
|
||||||
|
|
||||||
@@ -1635,7 +1669,7 @@ def relocate_package(spec):
|
|||||||
"""
|
"""
|
||||||
workdir = str(spec.prefix)
|
workdir = str(spec.prefix)
|
||||||
buildinfo = read_buildinfo_file(workdir)
|
buildinfo = read_buildinfo_file(workdir)
|
||||||
new_layout_root = str(spack.store.STORE.layout.root)
|
new_layout_root = str(spack.store.layout.root)
|
||||||
new_prefix = str(spec.prefix)
|
new_prefix = str(spec.prefix)
|
||||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||||
new_spack_prefix = str(spack.paths.prefix)
|
new_spack_prefix = str(spack.paths.prefix)
|
||||||
@@ -1820,46 +1854,16 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
|||||||
return tarfile_path
|
return tarfile_path
|
||||||
|
|
||||||
|
|
||||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
|
||||||
# Including trailing /, otherwise we end up with absolute paths.
|
|
||||||
regex = re.compile(re.escape(prefix) + "/*")
|
|
||||||
|
|
||||||
# Remove the top-level directory from the member (link)names.
|
|
||||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
|
||||||
# expanded to matching member names of tarfile entries. So, we have
|
|
||||||
# to ensure that those are updated too.
|
|
||||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
|
||||||
# them.
|
|
||||||
for m in tar.getmembers():
|
|
||||||
result = regex.match(m.name)
|
|
||||||
assert result is not None
|
|
||||||
m.name = m.name[result.end() :]
|
|
||||||
if m.linkname:
|
|
||||||
result = regex.match(m.linkname)
|
|
||||||
if result:
|
|
||||||
m.linkname = m.linkname[result.end() :]
|
|
||||||
|
|
||||||
|
|
||||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
|
||||||
"""
|
"""
|
||||||
extract binary tarball for given package into install area
|
extract binary tarball for given package into install area
|
||||||
"""
|
"""
|
||||||
timer.start("extract")
|
|
||||||
if os.path.exists(spec.prefix):
|
if os.path.exists(spec.prefix):
|
||||||
if force:
|
if force:
|
||||||
shutil.rmtree(spec.prefix)
|
shutil.rmtree(spec.prefix)
|
||||||
else:
|
else:
|
||||||
raise NoOverwriteException(str(spec.prefix))
|
raise NoOverwriteException(str(spec.prefix))
|
||||||
|
|
||||||
# Create the install prefix
|
|
||||||
fsys.mkdirp(
|
|
||||||
spec.prefix,
|
|
||||||
mode=get_package_dir_permissions(spec),
|
|
||||||
group=get_package_group(spec),
|
|
||||||
default_perms="parents",
|
|
||||||
)
|
|
||||||
|
|
||||||
specfile_path = download_result["specfile_stage"].save_filename
|
specfile_path = download_result["specfile_stage"].save_filename
|
||||||
|
|
||||||
with open(specfile_path, "r") as inputfile:
|
with open(specfile_path, "r") as inputfile:
|
||||||
@@ -1913,59 +1917,56 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
|||||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
# if the original relative prefix is in the spec file use it
|
||||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
buildinfo = spec_dict.get("buildinfo", {})
|
||||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||||
tar.extractall(path=spec.prefix)
|
rel = buildinfo.get("relative_rpaths")
|
||||||
except Exception:
|
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||||
_delete_staged_downloads(download_result)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||||
|
# The directory created is the base directory name of the old prefix.
|
||||||
|
# Moving the old prefix name to the new prefix location should preserve
|
||||||
|
# hard links and symbolic links.
|
||||||
|
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
|
||||||
|
mkdirp(extract_tmp)
|
||||||
|
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||||
|
|
||||||
|
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||||
|
try:
|
||||||
|
tar.extractall(path=extract_tmp)
|
||||||
|
except Exception as e:
|
||||||
|
_delete_staged_downloads(download_result)
|
||||||
|
shutil.rmtree(extracted_dir)
|
||||||
|
raise e
|
||||||
|
try:
|
||||||
|
shutil.move(extracted_dir, spec.prefix)
|
||||||
|
except Exception as e:
|
||||||
|
_delete_staged_downloads(download_result)
|
||||||
|
shutil.rmtree(extracted_dir)
|
||||||
|
raise e
|
||||||
os.remove(tarfile_path)
|
os.remove(tarfile_path)
|
||||||
os.remove(specfile_path)
|
os.remove(specfile_path)
|
||||||
timer.stop("extract")
|
|
||||||
|
|
||||||
timer.start("relocate")
|
|
||||||
try:
|
try:
|
||||||
relocate_package(spec)
|
relocate_package(spec)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix)
|
||||||
raise e
|
raise e
|
||||||
else:
|
else:
|
||||||
manifest_file = os.path.join(
|
manifest_file = os.path.join(
|
||||||
spec.prefix,
|
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
)
|
||||||
if not os.path.exists(manifest_file):
|
if not os.path.exists(manifest_file):
|
||||||
spec_id = spec.format("{name}/{hash:7}")
|
spec_id = spec.format("{name}/{hash:7}")
|
||||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||||
finally:
|
finally:
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir)
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
os.remove(filename)
|
os.remove(filename)
|
||||||
_delete_staged_downloads(download_result)
|
_delete_staged_downloads(download_result)
|
||||||
timer.stop("relocate")
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
|
||||||
# Get the shortest length directory.
|
|
||||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
|
||||||
|
|
||||||
if common_prefix is None:
|
|
||||||
raise ValueError("Tarball does not contain a common prefix")
|
|
||||||
|
|
||||||
# Validate that each file starts with the prefix
|
|
||||||
for member in tar.getmembers():
|
|
||||||
if not member.name.startswith(common_prefix):
|
|
||||||
raise ValueError(
|
|
||||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return common_prefix
|
|
||||||
|
|
||||||
|
|
||||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||||
@@ -2014,7 +2015,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, unsigned, force)
|
extract_tarball(spec, download_result, unsigned, force)
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.db.add(spec, spack.store.layout)
|
||||||
|
|
||||||
|
|
||||||
def install_single_spec(spec, unsigned=False, force=False):
|
def install_single_spec(spec, unsigned=False, force=False):
|
||||||
@@ -2039,9 +2040,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
specfile_is_signed = False
|
specfile_is_signed = False
|
||||||
found_specs = []
|
found_specs = []
|
||||||
|
|
||||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||||
|
|
||||||
for mirror in binary_mirrors:
|
|
||||||
buildcache_fetch_url_json = url_util.join(
|
buildcache_fetch_url_json = url_util.join(
|
||||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||||
)
|
)
|
||||||
@@ -2104,7 +2103,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
|||||||
if spec is None:
|
if spec is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check):
|
||||||
tty.debug("No Spack mirrors are currently configured")
|
tty.debug("No Spack mirrors are currently configured")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -2143,7 +2142,7 @@ def clear_spec_cache():
|
|||||||
|
|
||||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
mirror_collection = mirrors or spack.mirror.MirrorCollection()
|
||||||
|
|
||||||
if not mirror_collection:
|
if not mirror_collection:
|
||||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||||
@@ -2186,7 +2185,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
if not os.path.exists(stage.save_filename):
|
if not os.path.exists(stage.save_filename):
|
||||||
try:
|
try:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
except spack.error.FetchError:
|
except web_util.FetchError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tty.debug("Found key {0}".format(fingerprint))
|
tty.debug("Found key {0}".format(fingerprint))
|
||||||
@@ -2304,7 +2303,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
rebuilds = {}
|
rebuilds = {}
|
||||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||||
|
|
||||||
rebuild_list = []
|
rebuild_list = []
|
||||||
@@ -2338,7 +2337,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
|||||||
try:
|
try:
|
||||||
stage.fetch()
|
stage.fetch()
|
||||||
break
|
break
|
||||||
except spack.error.FetchError as e:
|
except web_util.FetchError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
else:
|
else:
|
||||||
if fail_if_missing:
|
if fail_if_missing:
|
||||||
@@ -2348,7 +2347,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
|||||||
|
|
||||||
|
|
||||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
if not mirror_url and not spack.mirror.MirrorCollection():
|
||||||
tty.die(
|
tty.die(
|
||||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||||
)
|
)
|
||||||
@@ -2357,7 +2356,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
|||||||
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
|
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
|
||||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||||
|
|
||||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
for mirror in spack.mirror.MirrorCollection().values():
|
||||||
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
|
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
|
||||||
|
|
||||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||||
@@ -2396,7 +2395,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
|||||||
return download_buildcache_entry(files_to_fetch, mirror_url)
|
return download_buildcache_entry(files_to_fetch, mirror_url)
|
||||||
|
|
||||||
|
|
||||||
class BinaryCacheQuery:
|
class BinaryCacheQuery(object):
|
||||||
"""Callable object to query if a spec is in a binary cache"""
|
"""Callable object to query if a spec is in a binary cache"""
|
||||||
|
|
||||||
def __init__(self, all_architectures):
|
def __init__(self, all_architectures):
|
||||||
@@ -2415,12 +2414,22 @@ def __init__(self, all_architectures):
|
|||||||
|
|
||||||
self.possible_specs = specs
|
self.possible_specs = specs
|
||||||
|
|
||||||
def __call__(self, spec: Spec, **kwargs):
|
def __call__(self, spec, **kwargs):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec: The spec being searched for
|
spec (str): The spec being searched for in its string representation or hash.
|
||||||
"""
|
"""
|
||||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
matches = []
|
||||||
|
if spec.startswith("/"):
|
||||||
|
# Matching a DAG hash
|
||||||
|
query_hash = spec.replace("/", "")
|
||||||
|
for candidate_spec in self.possible_specs:
|
||||||
|
if candidate_spec.dag_hash().startswith(query_hash):
|
||||||
|
matches.append(candidate_spec)
|
||||||
|
else:
|
||||||
|
# Matching a spec constraint
|
||||||
|
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||||
|
return matches
|
||||||
|
|
||||||
|
|
||||||
class FetchIndexError(Exception):
|
class FetchIndexError(Exception):
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
@@ -18,5 +18,4 @@
|
|||||||
"ensure_environment_dependencies",
|
"ensure_environment_dependencies",
|
||||||
"BootstrapEnvironment",
|
"BootstrapEnvironment",
|
||||||
"status_message",
|
"status_message",
|
||||||
"store_path",
|
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def _try_import_from_store(
|
|||||||
# We have to run as part of this python interpreter
|
# We have to run as part of this python interpreter
|
||||||
query_spec += " ^" + spec_for_current_python()
|
query_spec += " ^" + spec_for_current_python()
|
||||||
|
|
||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec["python"].package
|
pkg = candidate_spec["python"].package
|
||||||
@@ -183,7 +183,7 @@ def _executables_in_store(
|
|||||||
executables_str = ", ".join(executables)
|
executables_str = ", ".join(executables)
|
||||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||||
tty.debug(msg.format(executables_str, query_spec))
|
tty.debug(msg.format(executables_str, query_spec))
|
||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||||
if installed_specs:
|
if installed_specs:
|
||||||
for concrete_spec in installed_specs:
|
for concrete_spec in installed_specs:
|
||||||
bin_dir = concrete_spec.prefix.bin
|
bin_dir = concrete_spec.prefix.bin
|
||||||
|
|||||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
|||||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||||
]
|
]
|
||||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
platform_scope = spack.config.ConfigScope(
|
platform_scope = spack.config.ConfigScope(
|
||||||
@@ -150,19 +150,18 @@ def _add_compilers_if_missing() -> None:
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _ensure_bootstrap_configuration() -> Generator:
|
def _ensure_bootstrap_configuration() -> Generator:
|
||||||
spack.store.ensure_singleton_created()
|
|
||||||
bootstrap_store_path = store_path()
|
bootstrap_store_path = store_path()
|
||||||
user_configuration = _read_and_sanitize_configuration()
|
user_configuration = _read_and_sanitize_configuration()
|
||||||
with spack.environment.no_active_environment():
|
with spack.environment.no_active_environment():
|
||||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||||
spack.platforms.real_host()
|
spack.platforms.real_host()
|
||||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||||
|
bootstrap_store_path
|
||||||
|
):
|
||||||
# Default configuration scopes excluding command line
|
# Default configuration scopes excluding command line
|
||||||
# and builtin but accounting for platform specific scopes
|
# and builtin but accounting for platform specific scopes
|
||||||
config_scopes = _bootstrap_config_scopes()
|
config_scopes = _bootstrap_config_scopes()
|
||||||
with spack.config.use_configuration(*config_scopes), spack.store.use_store(
|
with spack.config.use_configuration(*config_scopes):
|
||||||
bootstrap_store_path, extra_data={"padded_length": 0}
|
|
||||||
):
|
|
||||||
# We may need to compile code from sources, so ensure we
|
# We may need to compile code from sources, so ensure we
|
||||||
# have compilers for the current platform
|
# have compilers for the current platform
|
||||||
_add_compilers_if_missing()
|
_add_compilers_if_missing()
|
||||||
|
|||||||
@@ -476,22 +476,15 @@ def ensure_executables_in_path_or_raise(
|
|||||||
def _add_externals_if_missing() -> None:
|
def _add_externals_if_missing() -> None:
|
||||||
search_list = [
|
search_list = [
|
||||||
# clingo
|
# clingo
|
||||||
"cmake",
|
spack.repo.path.get_pkg_class("cmake"),
|
||||||
"bison",
|
spack.repo.path.get_pkg_class("bison"),
|
||||||
# GnuPG
|
# GnuPG
|
||||||
"gawk",
|
spack.repo.path.get_pkg_class("gawk"),
|
||||||
# develop deps
|
|
||||||
"git",
|
|
||||||
]
|
]
|
||||||
if IS_WINDOWS:
|
if IS_WINDOWS:
|
||||||
search_list.append("winbison")
|
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||||
externals = spack.detection.by_path(search_list)
|
detected_packages = spack.detection.by_executable(search_list)
|
||||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
|
||||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
|
||||||
spack.detection.update_configuration(
|
|
||||||
non_buildable_externals, scope="bootstrap", buildable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def clingo_root_spec() -> str:
|
def clingo_root_spec() -> str:
|
||||||
|
|||||||
@@ -15,15 +15,14 @@
|
|||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.cpus
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.environment import depfile
|
from spack.environment import depfile
|
||||||
|
|
||||||
from ._common import _root_spec
|
from ._common import _root_spec
|
||||||
from .config import root_path, spec_for_current_python, store_path
|
from .config import root_path, spec_for_current_python, store_path
|
||||||
from .core import _add_externals_if_missing
|
|
||||||
|
|
||||||
|
|
||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
@@ -137,7 +136,7 @@ def _install_with_depfile(self) -> None:
|
|||||||
"-C",
|
"-C",
|
||||||
str(self.environment_root()),
|
str(self.environment_root()),
|
||||||
"-j",
|
"-j",
|
||||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -176,17 +175,16 @@ def black_root_spec() -> str:
|
|||||||
|
|
||||||
def flake8_root_spec() -> str:
|
def flake8_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap flake8"""
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
return _root_spec("py-flake8@3.8.2:")
|
return _root_spec("py-flake8")
|
||||||
|
|
||||||
|
|
||||||
def pytest_root_spec() -> str:
|
def pytest_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap flake8"""
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
return _root_spec("py-pytest@6.2.4:")
|
return _root_spec("py-pytest")
|
||||||
|
|
||||||
|
|
||||||
def ensure_environment_dependencies() -> None:
|
def ensure_environment_dependencies() -> None:
|
||||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||||
_add_externals_if_missing()
|
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.update_syspath_and_environ()
|
env.update_syspath_and_environ()
|
||||||
|
|||||||
@@ -43,7 +43,6 @@
|
|||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path
|
||||||
from llnl.util.lang import dedupe
|
from llnl.util.lang import dedupe
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
@@ -69,7 +68,7 @@
|
|||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import spack_install_test_log
|
from spack.install_test import spack_install_test_log
|
||||||
from spack.installer import InstallError
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
from spack.util.cpus import cpus_available
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIRS,
|
SYSTEM_DIRS,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
@@ -83,6 +82,7 @@
|
|||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||||
|
from spack.util.string import plural
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -148,7 +148,7 @@ class MakeExecutable(Executable):
|
|||||||
|
|
||||||
def __init__(self, name, jobs, **kwargs):
|
def __init__(self, name, jobs, **kwargs):
|
||||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||||
super().__init__(name, **kwargs)
|
super(MakeExecutable, self).__init__(name, **kwargs)
|
||||||
self.supports_jobserver = supports_jobserver
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ def __call__(self, *args, **kwargs):
|
|||||||
if jobs_env_jobs is not None:
|
if jobs_env_jobs is not None:
|
||||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||||
|
|
||||||
return super().__call__(*args, **kwargs)
|
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _on_cray():
|
def _on_cray():
|
||||||
@@ -537,6 +537,39 @@ def update_compiler_args_for_dep(dep):
|
|||||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||||
|
|
||||||
|
|
||||||
|
def determine_number_of_jobs(
|
||||||
|
parallel=False, command_line=None, config_default=None, max_cpus=None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||||
|
number of jobs set on the command line. If not set, then we use the config
|
||||||
|
defaults (which is usually set through the builtin config scope), but we
|
||||||
|
cap to the number of CPUs available to avoid oversubscription.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
parallel (bool or None): true when package supports parallel builds
|
||||||
|
command_line (int or None): command line override
|
||||||
|
config_default (int or None): config default number of jobs
|
||||||
|
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||||
|
value is automatically determined.
|
||||||
|
"""
|
||||||
|
if not parallel:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if command_line is None and "command_line" in spack.config.scopes():
|
||||||
|
command_line = spack.config.get("config:build_jobs", scope="command_line")
|
||||||
|
|
||||||
|
if command_line is not None:
|
||||||
|
return command_line
|
||||||
|
|
||||||
|
max_cpus = max_cpus or cpus_available()
|
||||||
|
|
||||||
|
# in some rare cases _builtin config may not be set, so default to max 16
|
||||||
|
config_default = config_default or spack.config.get("config:build_jobs", 16)
|
||||||
|
|
||||||
|
return min(max_cpus, config_default)
|
||||||
|
|
||||||
|
|
||||||
def set_module_variables_for_package(pkg):
|
def set_module_variables_for_package(pkg):
|
||||||
"""Populate the Python module of a package with some useful global names.
|
"""Populate the Python module of a package with some useful global names.
|
||||||
This makes things easier for package writers.
|
This makes things easier for package writers.
|
||||||
@@ -994,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||||
):
|
):
|
||||||
context = kwargs.get("context", "build")
|
context = kwargs.get("context", "build")
|
||||||
|
|
||||||
@@ -1015,12 +1048,12 @@ def _setup_pkg_and_run(
|
|||||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||||
)
|
)
|
||||||
return_value = function(pkg, kwargs)
|
return_value = function(pkg, kwargs)
|
||||||
write_pipe.send(return_value)
|
child_pipe.send(return_value)
|
||||||
|
|
||||||
except StopPhase as e:
|
except StopPhase as e:
|
||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
write_pipe.send(e)
|
child_pipe.send(e)
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
exc_type, exc, tb = sys.exc_info()
|
exc_type, exc, tb = sys.exc_info()
|
||||||
@@ -1069,10 +1102,10 @@ def _setup_pkg_and_run(
|
|||||||
context,
|
context,
|
||||||
package_context,
|
package_context,
|
||||||
)
|
)
|
||||||
write_pipe.send(ce)
|
child_pipe.send(ce)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
write_pipe.close()
|
child_pipe.close()
|
||||||
if input_multiprocess_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
@@ -1116,7 +1149,7 @@ def child_fun():
|
|||||||
For more information on `multiprocessing` child process creation
|
For more information on `multiprocessing` child process creation
|
||||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||||
"""
|
"""
|
||||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||||
input_multiprocess_fd = None
|
input_multiprocess_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
@@ -1141,7 +1174,7 @@ def child_fun():
|
|||||||
serialized_pkg,
|
serialized_pkg,
|
||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
write_pipe,
|
child_pipe,
|
||||||
input_multiprocess_fd,
|
input_multiprocess_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
@@ -1150,12 +1183,6 @@ def child_fun():
|
|||||||
|
|
||||||
p.start()
|
p.start()
|
||||||
|
|
||||||
# We close the writable end of the pipe now to be sure that p is the
|
|
||||||
# only process which owns a handle for it. This ensures that when p
|
|
||||||
# closes its handle for the writable end, read_pipe.recv() will
|
|
||||||
# promptly report the readable end as being ready.
|
|
||||||
write_pipe.close()
|
|
||||||
|
|
||||||
except InstallError as e:
|
except InstallError as e:
|
||||||
e.pkg = pkg
|
e.pkg = pkg
|
||||||
raise
|
raise
|
||||||
@@ -1165,16 +1192,7 @@ def child_fun():
|
|||||||
if input_multiprocess_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
def exitcode_msg(p):
|
child_result = parent_pipe.recv()
|
||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
|
||||||
return f"{typ} {abs(p.exitcode)}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
child_result = read_pipe.recv()
|
|
||||||
except EOFError:
|
|
||||||
p.join()
|
|
||||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
|
||||||
|
|
||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
# If returns a StopPhase, raise it
|
# If returns a StopPhase, raise it
|
||||||
@@ -1194,10 +1212,6 @@ def exitcode_msg(p):
|
|||||||
child_result.print_context()
|
child_result.print_context()
|
||||||
raise child_result
|
raise child_result
|
||||||
|
|
||||||
# Fallback. Usually caught beforehand in EOFError above.
|
|
||||||
if p.exitcode != 0:
|
|
||||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
|
||||||
|
|
||||||
return child_result
|
return child_result
|
||||||
|
|
||||||
|
|
||||||
@@ -1242,8 +1256,9 @@ def make_stack(tb, stack=None):
|
|||||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||||
if func:
|
if func:
|
||||||
typename, *_ = func.__qualname__.partition(".")
|
typename, *_ = func.__qualname__.partition(".")
|
||||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
|
||||||
break
|
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -1317,7 +1332,7 @@ class ChildError(InstallError):
|
|||||||
build_errors = [("spack.util.executable", "ProcessError")]
|
build_errors = [("spack.util.executable", "ProcessError")]
|
||||||
|
|
||||||
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
||||||
super().__init__(msg)
|
super(ChildError, self).__init__(msg)
|
||||||
self.module = module
|
self.module = module
|
||||||
self.name = classname
|
self.name = classname
|
||||||
self.traceback = traceback_string
|
self.traceback = traceback_string
|
||||||
@@ -1358,7 +1373,7 @@ def long_message(self):
|
|||||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||||
if os.path.isfile(test_log):
|
if os.path.isfile(test_log):
|
||||||
out.write("\nSee test log for details:\n")
|
out.write("\nSee test log for details:\n")
|
||||||
out.write(" {0}\n".format(test_log))
|
out.write(" {0}n".format(test_log))
|
||||||
|
|
||||||
return out.getvalue()
|
return out.getvalue()
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||||
|
|
||||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
|
||||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||||
msg = "Install failed for {0}. Nothing was installed!"
|
msg = "Install failed for {0}. Nothing was installed!"
|
||||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||||
|
|||||||
@@ -55,8 +55,7 @@ def flags_to_build_system_args(self, flags):
|
|||||||
setattr(self, "configure_flag_args", [])
|
setattr(self, "configure_flag_args", [])
|
||||||
for flag, values in flags.items():
|
for flag, values in flags.items():
|
||||||
if values:
|
if values:
|
||||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
|
||||||
self.configure_flag_args.append(values_str)
|
self.configure_flag_args.append(values_str)
|
||||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||||
# additionaly set FCFLAGS if required.
|
# additionaly set FCFLAGS if required.
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections.abc
|
|
||||||
import os
|
import os
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
@@ -14,24 +13,21 @@
|
|||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment="", force=False):
|
def cmake_cache_path(name, value, comment=""):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
force_str = " FORCE" if force else ""
|
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
|
||||||
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
|
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_string(name, value, comment="", force=False):
|
def cmake_cache_string(name, value, comment=""):
|
||||||
"""Generate a string for a cmake cache variable"""
|
"""Generate a string for a cmake cache variable"""
|
||||||
force_str = " FORCE" if force else ""
|
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
|
||||||
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
|
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_option(name, boolean_value, comment="", force=False):
|
def cmake_cache_option(name, boolean_value, comment=""):
|
||||||
"""Generate a string for a cmake configuration option"""
|
"""Generate a string for a cmake configuration option"""
|
||||||
|
|
||||||
value = "ON" if boolean_value else "OFF"
|
value = "ON" if boolean_value else "OFF"
|
||||||
force_str = " FORCE" if force else ""
|
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
||||||
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
|
|
||||||
|
|
||||||
|
|
||||||
class CachedCMakeBuilder(CMakeBuilder):
|
class CachedCMakeBuilder(CMakeBuilder):
|
||||||
@@ -67,34 +63,6 @@ def cache_name(self):
|
|||||||
def cache_path(self):
|
def cache_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||||
|
|
||||||
# Implement a version of the define_from_variant for Cached packages
|
|
||||||
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
|
||||||
"""Return a Cached CMake field from the given variant's value.
|
|
||||||
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
|
|
||||||
"""
|
|
||||||
|
|
||||||
if variant is None:
|
|
||||||
variant = cmake_var.lower()
|
|
||||||
|
|
||||||
if variant not in self.pkg.variants:
|
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
|
||||||
|
|
||||||
if variant not in self.pkg.spec.variants:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
value = self.pkg.spec.variants[variant].value
|
|
||||||
field = None
|
|
||||||
if isinstance(value, bool):
|
|
||||||
field = cmake_cache_option(cmake_var, value, comment)
|
|
||||||
else:
|
|
||||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
|
||||||
value = ";".join(str(v) for v in value)
|
|
||||||
else:
|
|
||||||
value = str(value)
|
|
||||||
field = cmake_cache_string(cmake_var, value, comment)
|
|
||||||
|
|
||||||
return field
|
|
||||||
|
|
||||||
def initconfig_compiler_entries(self):
|
def initconfig_compiler_entries(self):
|
||||||
# This will tell cmake to use the Spack compiler wrappers when run
|
# This will tell cmake to use the Spack compiler wrappers when run
|
||||||
# through Spack, but use the underlying compiler when run outside of
|
# through Spack, but use the underlying compiler when run outside of
|
||||||
@@ -227,58 +195,26 @@ def initconfig_hardware_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Provide standard CMake arguments for dependent CachedCMakePackages
|
|
||||||
if spec.satisfies("^cuda"):
|
if spec.satisfies("^cuda"):
|
||||||
entries.append("#------------------{0}".format("-" * 30))
|
entries.append("#------------------{0}".format("-" * 30))
|
||||||
entries.append("# Cuda")
|
entries.append("# Cuda")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
cudatoolkitdir = spec["cuda"].prefix
|
cudatoolkitdir = spec["cuda"].prefix
|
||||||
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
|
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
|
|
||||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
|
||||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
|
||||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||||
|
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||||
archs = spec.variants["cuda_arch"].value
|
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||||
if archs[0] != "none":
|
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||||
arch_str = ";".join(archs)
|
|
||||||
entries.append(
|
|
||||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
|
||||||
)
|
|
||||||
|
|
||||||
if "+rocm" in spec:
|
|
||||||
entries.append("#------------------{0}".format("-" * 30))
|
|
||||||
entries.append("# ROCm")
|
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
|
||||||
|
|
||||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
|
||||||
entries.append(
|
|
||||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
|
||||||
)
|
|
||||||
archs = self.spec.variants["amdgpu_target"].value
|
|
||||||
if archs[0] != "none":
|
|
||||||
arch_str = ";".join(archs)
|
|
||||||
entries.append(
|
|
||||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
|
||||||
)
|
|
||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
|
||||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
|
||||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def initconfig_package_entries(self):
|
def initconfig_package_entries(self):
|
||||||
@@ -301,7 +237,7 @@ def initconfig(self, pkg, spec, prefix):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def std_cmake_args(self):
|
def std_cmake_args(self):
|
||||||
args = super().std_cmake_args
|
args = super(CachedCMakeBuilder, self).std_cmake_args
|
||||||
args.extend(["-C", self.cache_path])
|
args.extend(["-C", self.cache_path])
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import pathlib
|
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -16,6 +15,7 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.util.path
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -248,8 +248,7 @@ def std_cmake_args(self):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def std_args(pkg, generator=None):
|
def std_args(pkg, generator=None):
|
||||||
"""Computes the standard cmake arguments for a generic package"""
|
"""Computes the standard cmake arguments for a generic package"""
|
||||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
generator = generator or "Unix Makefiles"
|
||||||
generator = generator or default_generator
|
|
||||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||||
primary_generator = _extract_primary_generator(generator)
|
primary_generator = _extract_primary_generator(generator)
|
||||||
if primary_generator not in valid_primary_generators:
|
if primary_generator not in valid_primary_generators:
|
||||||
@@ -272,8 +271,9 @@ def std_args(pkg, generator=None):
|
|||||||
args = [
|
args = [
|
||||||
"-G",
|
"-G",
|
||||||
generator,
|
generator,
|
||||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||||
define("CMAKE_BUILD_TYPE", build_type),
|
define("CMAKE_BUILD_TYPE", build_type),
|
||||||
|
define("BUILD_TESTING", pkg.run_tests),
|
||||||
]
|
]
|
||||||
|
|
||||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||||
@@ -296,46 +296,8 @@ def std_args(pkg, generator=None):
|
|||||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def define_cuda_architectures(pkg):
|
|
||||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
|
||||||
|
|
||||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
|
||||||
it is declared in the cuda package.
|
|
||||||
|
|
||||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
|
||||||
|
|
||||||
"""
|
|
||||||
cmake_flag = str()
|
|
||||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
|
||||||
cmake_flag = CMakeBuilder.define(
|
|
||||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
|
||||||
)
|
|
||||||
|
|
||||||
return cmake_flag
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def define_hip_architectures(pkg):
|
|
||||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
|
||||||
|
|
||||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
|
||||||
architectures and it is declared in the rocm package.
|
|
||||||
|
|
||||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
|
||||||
not set.
|
|
||||||
|
|
||||||
"""
|
|
||||||
cmake_flag = str()
|
|
||||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
|
||||||
cmake_flag = CMakeBuilder.define(
|
|
||||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
|
||||||
)
|
|
||||||
|
|
||||||
return cmake_flag
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def define(cmake_var, value):
|
def define(cmake_var, value):
|
||||||
"""Return a CMake command line argument that defines a variable.
|
"""Return a CMake command line argument that defines a variable.
|
||||||
@@ -450,6 +412,7 @@ def cmake_args(self):
|
|||||||
|
|
||||||
* CMAKE_INSTALL_PREFIX
|
* CMAKE_INSTALL_PREFIX
|
||||||
* CMAKE_BUILD_TYPE
|
* CMAKE_BUILD_TYPE
|
||||||
|
* BUILD_TESTING
|
||||||
|
|
||||||
which will be set automatically.
|
which will be set automatically.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -102,10 +102,11 @@ def cuda_flags(arch_list):
|
|||||||
|
|
||||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
|
||||||
|
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||||
|
|
||||||
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||||
|
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
@@ -154,7 +155,7 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
|
||||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
@@ -47,13 +47,6 @@ class MesonPackage(spack.package_base.PackageBase):
|
|||||||
variant("strip", default=False, description="Strip targets on install")
|
variant("strip", default=False, description="Strip targets on install")
|
||||||
depends_on("meson", type="build")
|
depends_on("meson", type="build")
|
||||||
depends_on("ninja", type="build")
|
depends_on("ninja", type="build")
|
||||||
# Python detection in meson requires distutils to be importable, but distutils no longer
|
|
||||||
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
|
|
||||||
# because the distutils-precedence.pth startup file that setuptools ships with is not run
|
|
||||||
# when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson
|
|
||||||
# release, the distutils requirement will be dropped, so this conflict can be relaxed.
|
|
||||||
# We have patches to make it work with meson 1.1 and above.
|
|
||||||
conflicts("^python@3.12:", when="^meson@:1.0")
|
|
||||||
|
|
||||||
def flags_to_build_system_args(self, flags):
|
def flags_to_build_system_args(self, flags):
|
||||||
"""Produces a list of all command line arguments to pass the specified
|
"""Produces a list of all command line arguments to pass the specified
|
||||||
@@ -216,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
|||||||
def check(self):
|
def check(self):
|
||||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self.pkg._if_ninja_target_execute("test")
|
self._if_ninja_target_execute("test")
|
||||||
self.pkg._if_ninja_target_execute("check")
|
self._if_ninja_target_execute("check")
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ def makefile_root(self):
|
|||||||
return self.stage.source_path
|
return self.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def makefile_name(self):
|
def nmakefile_name(self):
|
||||||
"""Name of the current makefile. This is currently an empty value.
|
"""Name of the current makefile. This is currently an empty value.
|
||||||
If a project defines this value, it will be used with the /f argument
|
If a project defines this value, it will be used with the /f argument
|
||||||
to provide nmake an explicit makefile. This is usefule in scenarios where
|
to provide nmake an explicit makefile. This is usefule in scenarios where
|
||||||
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
|
|||||||
"""Run "nmake" on the build targets specified by the builder."""
|
"""Run "nmake" on the build targets specified by the builder."""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
if self.makefile_name:
|
if self.nmakefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/f {}".format(self.nmakefile_name))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||||
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.nmakefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/f {}".format(self.nmakefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", prefix))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
|
|||||||
@@ -61,11 +61,6 @@ def component_prefix(self):
|
|||||||
"""Path to component <prefix>/<component>/<version>."""
|
"""Path to component <prefix>/<component>/<version>."""
|
||||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||||
|
|
||||||
@property
|
|
||||||
def env_script_args(self):
|
|
||||||
"""Additional arguments to pass to vars.sh script."""
|
|
||||||
return ()
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
self.install_component(basename(self.url_for_version(spec.version)))
|
self.install_component(basename(self.url_for_version(spec.version)))
|
||||||
|
|
||||||
@@ -126,10 +121,10 @@ def setup_run_environment(self, env):
|
|||||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||||
"""
|
"""
|
||||||
# Only if environment modifications are desired (default is +envmods)
|
# Only if environment modifications are desired (default is +envmods)
|
||||||
if "~envmods" not in self.spec:
|
if "+envmods" in self.spec:
|
||||||
env.extend(
|
env.extend(
|
||||||
EnvironmentModifications.from_sourcing_file(
|
EnvironmentModifications.from_sourcing_file(
|
||||||
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
|
join_path(self.component_prefix, "env", "vars.sh")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -180,7 +175,7 @@ def libs(self):
|
|||||||
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiStaticLibraryList:
|
class IntelOneApiStaticLibraryList(object):
|
||||||
"""Provides ld_flags when static linking is needed
|
"""Provides ld_flags when static linking is needed
|
||||||
|
|
||||||
Oneapi puts static and dynamic libraries in the same directory, so
|
Oneapi puts static and dynamic libraries in the same directory, so
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import archspec
|
import archspec
|
||||||
@@ -17,7 +16,6 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
@@ -25,15 +23,13 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends, maintainers
|
from spack.directives import build_system, depends_on, extends, maintainers
|
||||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||||
from spack.install_test import test_part
|
|
||||||
from spack.util.executable import Executable
|
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class PythonExtension(spack.package_base.PackageBase):
|
class PythonExtension(spack.package_base.PackageBase):
|
||||||
maintainers("adamjstewart")
|
maintainers("adamjstewart", "pradyunsg")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -171,106 +167,18 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
|
|
||||||
view.remove_files(to_remove)
|
view.remove_files(to_remove)
|
||||||
|
|
||||||
def test_imports(self):
|
def test(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
python = inspect.getmodule(self).python
|
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
with test_part(
|
self.run_test(
|
||||||
self,
|
inspect.getmodule(self).python.path,
|
||||||
f"test_imports_{module}",
|
["-c", "import {0}".format(module)],
|
||||||
purpose=f"checking import of {module}",
|
purpose="checking import of {0}".format(module),
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
):
|
)
|
||||||
python("-c", f"import {module}")
|
|
||||||
|
|
||||||
def update_external_dependencies(self, extendee_spec=None):
|
|
||||||
"""
|
|
||||||
Ensure all external python packages have a python dependency
|
|
||||||
|
|
||||||
If another package in the DAG depends on python, we use that
|
|
||||||
python for the dependency of the external. If not, we assume
|
|
||||||
that the external PythonPackage is installed into the same
|
|
||||||
directory as the python it depends on.
|
|
||||||
"""
|
|
||||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
|
||||||
if "python" not in self.spec:
|
|
||||||
if extendee_spec:
|
|
||||||
python = extendee_spec
|
|
||||||
elif "python" in self.spec.root:
|
|
||||||
python = self.spec.root["python"]
|
|
||||||
else:
|
|
||||||
python = self.get_external_python_for_prefix()
|
|
||||||
if not python.concrete:
|
|
||||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
|
||||||
python.namespace = repo.namespace
|
|
||||||
|
|
||||||
# Ensure architecture information is present
|
|
||||||
if not python.architecture:
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
host_target = host_platform.target("default_target")
|
|
||||||
python.architecture = spack.spec.ArchSpec(
|
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if not python.architecture.platform:
|
|
||||||
python.architecture.platform = spack.platforms.host()
|
|
||||||
if not python.architecture.os:
|
|
||||||
python.architecture.os = "default_os"
|
|
||||||
if not python.architecture.target:
|
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
|
||||||
|
|
||||||
# Ensure compiler information is present
|
|
||||||
if not python.compiler:
|
|
||||||
python.compiler = self.spec.compiler
|
|
||||||
|
|
||||||
python.external_path = self.spec.external_path
|
|
||||||
python._mark_concrete()
|
|
||||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
|
||||||
|
|
||||||
def get_external_python_for_prefix(self):
|
|
||||||
"""
|
|
||||||
For an external package that extends python, find the most likely spec for the python
|
|
||||||
it depends on.
|
|
||||||
|
|
||||||
First search: an "installed" external that shares a prefix with this package
|
|
||||||
Second search: a configured external that shares a prefix with this package
|
|
||||||
Third search: search this prefix for a python package
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
|
||||||
"""
|
|
||||||
python_externals_installed = [
|
|
||||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
|
||||||
]
|
|
||||||
if python_externals_installed:
|
|
||||||
return python_externals_installed[0]
|
|
||||||
|
|
||||||
python_external_config = spack.config.get("packages:python:externals", [])
|
|
||||||
python_externals_configured = [
|
|
||||||
spack.spec.parse_with_version_concrete(item["spec"])
|
|
||||||
for item in python_external_config
|
|
||||||
if item["prefix"] == self.spec.external_path
|
|
||||||
]
|
|
||||||
if python_externals_configured:
|
|
||||||
return python_externals_configured[0]
|
|
||||||
|
|
||||||
python_externals_detection = spack.detection.by_path(
|
|
||||||
["python"], path_hints=[self.spec.external_path]
|
|
||||||
)
|
|
||||||
|
|
||||||
python_externals_detected = [
|
|
||||||
d.spec
|
|
||||||
for d in python_externals_detection.get("python", [])
|
|
||||||
if d.prefix == self.spec.external_path
|
|
||||||
]
|
|
||||||
if python_externals_detected:
|
|
||||||
return python_externals_detected[0]
|
|
||||||
|
|
||||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
|
||||||
|
|
||||||
|
|
||||||
class PythonPackage(PythonExtension):
|
class PythonPackage(PythonExtension):
|
||||||
@@ -317,16 +225,99 @@ def list_url(cls):
|
|||||||
name = cls.pypi.split("/")[0]
|
name = cls.pypi.split("/")[0]
|
||||||
return "https://pypi.org/simple/" + name + "/"
|
return "https://pypi.org/simple/" + name + "/"
|
||||||
|
|
||||||
|
def update_external_dependencies(self, extendee_spec=None):
|
||||||
|
"""
|
||||||
|
Ensure all external python packages have a python dependency
|
||||||
|
|
||||||
|
If another package in the DAG depends on python, we use that
|
||||||
|
python for the dependency of the external. If not, we assume
|
||||||
|
that the external PythonPackage is installed into the same
|
||||||
|
directory as the python it depends on.
|
||||||
|
"""
|
||||||
|
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||||
|
if "python" not in self.spec:
|
||||||
|
if extendee_spec:
|
||||||
|
python = extendee_spec
|
||||||
|
elif "python" in self.spec.root:
|
||||||
|
python = self.spec.root["python"]
|
||||||
|
else:
|
||||||
|
python = self.get_external_python_for_prefix()
|
||||||
|
if not python.concrete:
|
||||||
|
repo = spack.repo.path.repo_for_pkg(python)
|
||||||
|
python.namespace = repo.namespace
|
||||||
|
|
||||||
|
# Ensure architecture information is present
|
||||||
|
if not python.architecture:
|
||||||
|
host_platform = spack.platforms.host()
|
||||||
|
host_os = host_platform.operating_system("default_os")
|
||||||
|
host_target = host_platform.target("default_target")
|
||||||
|
python.architecture = spack.spec.ArchSpec(
|
||||||
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if not python.architecture.platform:
|
||||||
|
python.architecture.platform = spack.platforms.host()
|
||||||
|
if not python.architecture.os:
|
||||||
|
python.architecture.os = "default_os"
|
||||||
|
if not python.architecture.target:
|
||||||
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
# Ensure compiler information is present
|
||||||
|
if not python.compiler:
|
||||||
|
python.compiler = self.spec.compiler
|
||||||
|
|
||||||
|
python.external_path = self.spec.external_path
|
||||||
|
python._mark_concrete()
|
||||||
|
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||||
|
|
||||||
|
def get_external_python_for_prefix(self):
|
||||||
|
"""
|
||||||
|
For an external package that extends python, find the most likely spec for the python
|
||||||
|
it depends on.
|
||||||
|
|
||||||
|
First search: an "installed" external that shares a prefix with this package
|
||||||
|
Second search: a configured external that shares a prefix with this package
|
||||||
|
Third search: search this prefix for a python package
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||||
|
"""
|
||||||
|
python_externals_installed = [
|
||||||
|
s for s in spack.store.db.query("python") if s.prefix == self.spec.external_path
|
||||||
|
]
|
||||||
|
if python_externals_installed:
|
||||||
|
return python_externals_installed[0]
|
||||||
|
|
||||||
|
python_external_config = spack.config.get("packages:python:externals", [])
|
||||||
|
python_externals_configured = [
|
||||||
|
spack.spec.parse_with_version_concrete(item["spec"])
|
||||||
|
for item in python_external_config
|
||||||
|
if item["prefix"] == self.spec.external_path
|
||||||
|
]
|
||||||
|
if python_externals_configured:
|
||||||
|
return python_externals_configured[0]
|
||||||
|
|
||||||
|
python_externals_detection = spack.detection.by_executable(
|
||||||
|
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||||
|
)
|
||||||
|
|
||||||
|
python_externals_detected = [
|
||||||
|
d.spec
|
||||||
|
for d in python_externals_detection.get("python", [])
|
||||||
|
if d.prefix == self.spec.external_path
|
||||||
|
]
|
||||||
|
if python_externals_detected:
|
||||||
|
return python_externals_detected[0]
|
||||||
|
|
||||||
|
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
|
|
||||||
# Remove py- prefix in package name
|
|
||||||
name = self.spec.name[3:]
|
|
||||||
|
|
||||||
# Headers may be in either location
|
# Headers may be in either location
|
||||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
include = self.prefix.join(self.spec["python"].package.include)
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||||
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
@@ -340,64 +331,18 @@ def libs(self):
|
|||||||
"""Discover libraries in platlib."""
|
"""Discover libraries in platlib."""
|
||||||
|
|
||||||
# Remove py- prefix in package name
|
# Remove py- prefix in package name
|
||||||
name = self.spec.name[3:]
|
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||||
|
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||||
|
|
||||||
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
for shared in [True, False]:
|
||||||
|
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||||
libs = fs.find_all_libraries(root, recursive=True)
|
if libs:
|
||||||
|
return libs
|
||||||
if libs:
|
|
||||||
return libs
|
|
||||||
|
|
||||||
msg = "Unable to recursively locate {} libraries in {}"
|
msg = "Unable to recursively locate {} libraries in {}"
|
||||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||||
|
|
||||||
|
|
||||||
def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes):
|
|
||||||
# Recurse into the install prefix and fixup shebangs
|
|
||||||
exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
|
||||||
dirs = [path]
|
|
||||||
hardlinks = set()
|
|
||||||
|
|
||||||
while dirs:
|
|
||||||
with os.scandir(dirs.pop()) as entries:
|
|
||||||
for entry in entries:
|
|
||||||
if entry.is_dir(follow_symlinks=False):
|
|
||||||
dirs.append(entry.path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Only consider files, not symlinks
|
|
||||||
if not entry.is_file(follow_symlinks=False):
|
|
||||||
continue
|
|
||||||
|
|
||||||
lstat = entry.stat(follow_symlinks=False)
|
|
||||||
|
|
||||||
# Skip over files that are not executable
|
|
||||||
if not (lstat.st_mode & exe):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Don't modify hardlinks more than once
|
|
||||||
if lstat.st_nlink > 1:
|
|
||||||
key = (lstat.st_ino, lstat.st_dev)
|
|
||||||
if key in hardlinks:
|
|
||||||
continue
|
|
||||||
hardlinks.add(key)
|
|
||||||
|
|
||||||
# Finally replace shebangs if any.
|
|
||||||
with open(entry.path, "rb+") as f:
|
|
||||||
contents = f.read(2)
|
|
||||||
if contents != b"#!":
|
|
||||||
continue
|
|
||||||
contents += f.read()
|
|
||||||
|
|
||||||
if old_interpreter not in contents:
|
|
||||||
continue
|
|
||||||
|
|
||||||
f.seek(0)
|
|
||||||
f.write(contents.replace(old_interpreter, new_interpreter))
|
|
||||||
f.truncate()
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("python_pip")
|
@spack.builder.builder("python_pip")
|
||||||
class PythonPipBuilder(BaseBuilder):
|
class PythonPipBuilder(BaseBuilder):
|
||||||
phases = ("install",)
|
phases = ("install",)
|
||||||
@@ -453,8 +398,7 @@ def build_directory(self):
|
|||||||
|
|
||||||
def config_settings(self, spec, prefix):
|
def config_settings(self, spec, prefix):
|
||||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||||
|
Requires pip 22.1+, which requires Python 3.7+.
|
||||||
Requires pip 22.1 or newer.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec (spack.spec.Spec): build spec
|
spec (spack.spec.Spec): build spec
|
||||||
@@ -468,8 +412,6 @@ def config_settings(self, spec, prefix):
|
|||||||
def install_options(self, spec, prefix):
|
def install_options(self, spec, prefix):
|
||||||
"""Extra arguments to be supplied to the setup.py install command.
|
"""Extra arguments to be supplied to the setup.py install command.
|
||||||
|
|
||||||
Requires pip 23.0 or older.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec (spack.spec.Spec): build spec
|
spec (spack.spec.Spec): build spec
|
||||||
prefix (spack.util.prefix.Prefix): installation prefix
|
prefix (spack.util.prefix.Prefix): installation prefix
|
||||||
@@ -483,8 +425,6 @@ def global_options(self, spec, prefix):
|
|||||||
"""Extra global options to be supplied to the setup.py call before the install
|
"""Extra global options to be supplied to the setup.py call before the install
|
||||||
or bdist_wheel command.
|
or bdist_wheel command.
|
||||||
|
|
||||||
Deprecated in pip 23.1.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec (spack.spec.Spec): build spec
|
spec (spack.spec.Spec): build spec
|
||||||
prefix (spack.util.prefix.Prefix): installation prefix
|
prefix (spack.util.prefix.Prefix): installation prefix
|
||||||
@@ -494,36 +434,8 @@ def global_options(self, spec, prefix):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@property
|
|
||||||
def _build_venv_path(self):
|
|
||||||
"""Return the path to the virtual environment used for building when
|
|
||||||
python is external."""
|
|
||||||
return os.path.join(self.spec.package.stage.path, "build_env")
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _build_venv_python(self) -> Executable:
|
|
||||||
"""Return the Python executable in the build virtual environment when
|
|
||||||
python is external."""
|
|
||||||
return Executable(os.path.join(self._build_venv_path, "bin", "python"))
|
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
python: Executable = spec["python"].command
|
|
||||||
# Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot
|
|
||||||
# execute hooks from user and system site-packages.
|
|
||||||
if spec["python"].external:
|
|
||||||
# There are no environment variables to disable the system site-packages, so we use a
|
|
||||||
# virtual environment instead. The downside of this approach is that pip produces
|
|
||||||
# incorrect shebangs that refer to the virtual environment, which we have to fix up.
|
|
||||||
python("-m", "venv", "--without-pip", self._build_venv_path)
|
|
||||||
pip = self._build_venv_python
|
|
||||||
else:
|
|
||||||
# For a Spack managed Python, system site-packages is empty/unused by design, so it
|
|
||||||
# suffices to disable user site-packages, for which there is an environment variable.
|
|
||||||
pip = python
|
|
||||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
|
||||||
pip.add_default_arg("-m")
|
|
||||||
pip.add_default_arg("pip")
|
|
||||||
|
|
||||||
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
||||||
|
|
||||||
@@ -547,31 +459,8 @@ def install(self, pkg, spec, prefix):
|
|||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
|
pip = inspect.getmodule(pkg).pip
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
@spack.builder.run_after("install")
|
|
||||||
def fixup_shebangs_pointing_to_build(self):
|
|
||||||
"""When installing a package using an external python, we use a temporary virtual
|
|
||||||
environment which improves build isolation. The downside is that pip produces shebangs
|
|
||||||
that point to the temporary virtual environment. This method fixes them up to point to the
|
|
||||||
underlying Python."""
|
|
||||||
# No need to fixup shebangs if no build venv was used. (this post install function also
|
|
||||||
# runs when install was overridden in another package, so check existence of the venv path)
|
|
||||||
if not os.path.exists(self._build_venv_path):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Use sys.executable, since that's what pip uses.
|
|
||||||
interpreter = (
|
|
||||||
lambda python: python("-c", "import sys; print(sys.executable)", output=str)
|
|
||||||
.strip()
|
|
||||||
.encode("utf-8")
|
|
||||||
)
|
|
||||||
|
|
||||||
fixup_shebangs(
|
|
||||||
path=self.spec.prefix,
|
|
||||||
old_interpreter=interpreter(self._build_venv_python),
|
|
||||||
new_interpreter=interpreter(self.spec["python"].command),
|
|
||||||
)
|
|
||||||
|
|
||||||
spack.builder.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
build_system("qmake")
|
build_system("qmake")
|
||||||
|
|
||||||
depends_on("qmake", type="build", when="build_system=qmake")
|
depends_on("qt", type="build", when="build_system=qmake")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("qmake")
|
@spack.builder.builder("qmake")
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends, maintainers
|
||||||
|
|
||||||
from .generic import GenericBuilder, Package
|
from .generic import GenericBuilder, Package
|
||||||
|
|
||||||
@@ -71,6 +71,8 @@ class RPackage(Package):
|
|||||||
|
|
||||||
GenericBuilder = RBuilder
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
|
maintainers("glennpj")
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "RPackage"
|
build_system_class = "RPackage"
|
||||||
|
|||||||
@@ -10,10 +10,9 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
from spack.util.cpus import determine_number_of_jobs
|
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
@@ -64,7 +63,7 @@ class RacketBuilder(spack.builder.Builder):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def subdirectory(self):
|
def subdirectory(self):
|
||||||
if self.pkg.racket_name:
|
if self.racket_name:
|
||||||
return "pkgs/{0}".format(self.pkg.racket_name)
|
return "pkgs/{0}".format(self.pkg.racket_name)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -93,7 +92,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
"--copy",
|
"--copy",
|
||||||
"-i",
|
"-i",
|
||||||
"-j",
|
"-j",
|
||||||
str(determine_number_of_jobs(parallel=parallel)),
|
str(determine_number_of_jobs(parallel)),
|
||||||
"--",
|
"--",
|
||||||
os.getcwd(),
|
os.getcwd(),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -140,6 +140,8 @@ class ROCmPackage(PackageBase):
|
|||||||
depends_on("hsa-rocr-dev", when="+rocm")
|
depends_on("hsa-rocr-dev", when="+rocm")
|
||||||
depends_on("hip +rocm", when="+rocm")
|
depends_on("hip +rocm", when="+rocm")
|
||||||
|
|
||||||
|
conflicts("^blt@:0.3.6", when="+rocm")
|
||||||
|
|
||||||
# need amd gpu type for rocm builds
|
# need amd gpu type for rocm builds
|
||||||
conflicts("amdgpu_target=none", when="+rocm")
|
conflicts("amdgpu_target=none", when="+rocm")
|
||||||
|
|
||||||
|
|||||||
@@ -7,14 +7,12 @@
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import find, working_dir
|
from llnl.util.filesystem import find, join_path, working_dir
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.install_test
|
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import Executable
|
|
||||||
|
|
||||||
from ._checks import BaseBuilder, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
@@ -32,16 +30,17 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
#: Name of private sip module to install alongside package
|
#: Name of private sip module to install alongside package
|
||||||
sip_module = "sip"
|
sip_module = "sip"
|
||||||
|
|
||||||
#: Callback names for install-time testing
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test_imports"]
|
install_time_test_callbacks = ["test"]
|
||||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
legacy_buildsystem = "sip"
|
legacy_buildsystem = "sip"
|
||||||
|
|
||||||
build_system("sip")
|
build_system("sip")
|
||||||
|
|
||||||
with when("build_system=sip"):
|
with when("build_system=sip"):
|
||||||
extends("python", type=("build", "link", "run"))
|
extends("python")
|
||||||
depends_on("py-sip", type="build")
|
depends_on("qt")
|
||||||
|
depends_on("py-sip")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -88,20 +87,18 @@ def python(self, *args, **kwargs):
|
|||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
inspect.getmodule(self).python(*args, **kwargs)
|
||||||
|
|
||||||
def test_imports(self):
|
def test(self):
|
||||||
"""Attempts to import modules of the installed package."""
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
# Make sure we are importing the installed modules,
|
||||||
# not the ones in the source directory
|
# not the ones in the source directory
|
||||||
python = inspect.getmodule(self).python
|
|
||||||
for module in self.import_modules:
|
for module in self.import_modules:
|
||||||
with spack.install_test.test_part(
|
self.run_test(
|
||||||
self,
|
inspect.getmodule(self).python.path,
|
||||||
"test_imports_{0}".format(module),
|
["-c", "import {0}".format(module)],
|
||||||
purpose="checking import of {0}".format(module),
|
purpose="checking import of {0}".format(module),
|
||||||
work_dir="spack-test",
|
work_dir="spack-test",
|
||||||
):
|
)
|
||||||
python("-c", "import {0}".format(module))
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("sip")
|
@spack.builder.builder("sip")
|
||||||
@@ -113,13 +110,13 @@ class SIPBuilder(BaseBuilder):
|
|||||||
* install
|
* install
|
||||||
|
|
||||||
The configure phase already adds a set of default flags. To see more
|
The configure phase already adds a set of default flags. To see more
|
||||||
options, run ``sip-build --help``.
|
options, run ``python configure.py --help``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
phases = ("configure", "build", "install")
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = (
|
legacy_attributes = (
|
||||||
@@ -130,17 +127,34 @@ class SIPBuilder(BaseBuilder):
|
|||||||
"build_directory",
|
"build_directory",
|
||||||
)
|
)
|
||||||
|
|
||||||
build_directory = "build"
|
def configure_file(self):
|
||||||
|
"""Returns the name of the configure file to use."""
|
||||||
|
return "configure.py"
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
|
configure = self.configure_file()
|
||||||
|
|
||||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
args = self.configure_args()
|
||||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
|
||||||
args.extend(self.configure_args())
|
|
||||||
|
|
||||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
args.extend(
|
||||||
sip_build(*args)
|
[
|
||||||
|
"--verbose",
|
||||||
|
"--confirm-license",
|
||||||
|
"--qmake",
|
||||||
|
spec["qt"].prefix.bin.qmake,
|
||||||
|
"--sip",
|
||||||
|
spec["py-sip"].prefix.bin.sip,
|
||||||
|
"--sip-incdir",
|
||||||
|
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||||
|
"--bindir",
|
||||||
|
prefix.bin,
|
||||||
|
"--destdir",
|
||||||
|
inspect.getmodule(self.pkg).python_platlib,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pkg.python(configure, *args)
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
@@ -150,8 +164,7 @@ def build(self, pkg, spec, prefix):
|
|||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
inspect.getmodule(self.pkg).make(*args)
|
||||||
inspect.getmodule(self.pkg).make(*args)
|
|
||||||
|
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
@@ -161,11 +174,21 @@ def install(self, pkg, spec, prefix):
|
|||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||||
inspect.getmodule(self.pkg).make("install", *args)
|
|
||||||
|
|
||||||
def install_args(self):
|
def install_args(self):
|
||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
spack.builder.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
|
@spack.builder.run_after("install")
|
||||||
|
def extend_path_setup(self):
|
||||||
|
# See github issue #14121 and PR #15297
|
||||||
|
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||||
|
if module != "sip":
|
||||||
|
module = module.split(".")[0]
|
||||||
|
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||||
|
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||||
|
f.write("from pkgutil import extend_path\n")
|
||||||
|
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ def create(pkg):
|
|||||||
return _BUILDERS[id(pkg)]
|
return _BUILDERS[id(pkg)]
|
||||||
|
|
||||||
|
|
||||||
class _PhaseAdapter:
|
class _PhaseAdapter(object):
|
||||||
def __init__(self, builder, phase_fn):
|
def __init__(self, builder, phase_fn):
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
self.phase_fn = phase_fn
|
self.phase_fn = phase_fn
|
||||||
@@ -115,7 +115,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
|||||||
# package. The semantic should be the same as the method in the base builder were still
|
# package. The semantic should be the same as the method in the base builder were still
|
||||||
# present in the base class of the package.
|
# present in the base class of the package.
|
||||||
|
|
||||||
class _ForwardToBaseBuilder:
|
class _ForwardToBaseBuilder(object):
|
||||||
def __init__(self, wrapped_pkg_object, root_builder):
|
def __init__(self, wrapped_pkg_object, root_builder):
|
||||||
self.wrapped_package_object = wrapped_pkg_object
|
self.wrapped_package_object = wrapped_pkg_object
|
||||||
self.root_builder = root_builder
|
self.root_builder = root_builder
|
||||||
@@ -188,7 +188,7 @@ def __init__(self, pkg):
|
|||||||
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
||||||
# method that will forward certain calls to the default builder.
|
# method that will forward certain calls to the default builder.
|
||||||
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
||||||
super().__init__(pkg)
|
super(Adapter, self).__init__(pkg)
|
||||||
|
|
||||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||||
@@ -388,7 +388,7 @@ def __new__(mcs, name, bases, attr_dict):
|
|||||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
|
|
||||||
class InstallationPhase:
|
class InstallationPhase(object):
|
||||||
"""Manages a single phase of the installation.
|
"""Manages a single phase of the installation.
|
||||||
|
|
||||||
This descriptor stores at creation time the name of the method it should
|
This descriptor stores at creation time the name of the method it should
|
||||||
@@ -530,9 +530,9 @@ def setup_build_environment(self, env):
|
|||||||
modifications to be applied when the package is built. Package authors
|
modifications to be applied when the package is built. Package authors
|
||||||
can call methods on it to alter the build environment.
|
can call methods on it to alter the build environment.
|
||||||
"""
|
"""
|
||||||
if not hasattr(super(), "setup_build_environment"):
|
if not hasattr(super(Builder, self), "setup_build_environment"):
|
||||||
return
|
return
|
||||||
super().setup_build_environment(env)
|
super(Builder, self).setup_build_environment(env)
|
||||||
|
|
||||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
"""Sets up the build environment of packages that depend on this one.
|
"""Sets up the build environment of packages that depend on this one.
|
||||||
@@ -563,9 +563,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
|||||||
the dependent's state. Note that *this* package's spec is
|
the dependent's state. Note that *this* package's spec is
|
||||||
available as ``self.spec``
|
available as ``self.spec``
|
||||||
"""
|
"""
|
||||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
|
||||||
return
|
return
|
||||||
super().setup_dependent_build_environment(env, dependent_spec)
|
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
def __getitem__(self, idx):
|
def __getitem__(self, idx):
|
||||||
key = self.phases[idx]
|
key = self.phases[idx]
|
||||||
|
|||||||
@@ -20,9 +20,9 @@
|
|||||||
|
|
||||||
|
|
||||||
def misc_cache_location():
|
def misc_cache_location():
|
||||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
"""The ``misc_cache`` is Spack's cache for small data.
|
||||||
|
|
||||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||||
providers and for which packages provide which tags.
|
providers and for which packages provide which tags.
|
||||||
"""
|
"""
|
||||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
MISC_CACHE: Union[
|
misc_cache: Union[
|
||||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||||
] = llnl.util.lang.Singleton(_misc_cache)
|
] = llnl.util.lang.Singleton(_misc_cache)
|
||||||
|
|
||||||
@@ -58,7 +58,7 @@ def _fetch_cache():
|
|||||||
return spack.fetch_strategy.FsCache(path)
|
return spack.fetch_strategy.FsCache(path)
|
||||||
|
|
||||||
|
|
||||||
class MirrorCache:
|
class MirrorCache(object):
|
||||||
def __init__(self, root, skip_unstable_versions):
|
def __init__(self, root, skip_unstable_versions):
|
||||||
self.root = os.path.abspath(root)
|
self.root = os.path.abspath(root)
|
||||||
self.skip_unstable_versions = skip_unstable_versions
|
self.skip_unstable_versions = skip_unstable_versions
|
||||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's local cache for downloaded source archives
|
#: Spack's local cache for downloaded source archives
|
||||||
FETCH_CACHE: Union[
|
fetch_cache: Union[
|
||||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -11,7 +13,6 @@
|
|||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import List, Match, Tuple
|
from typing import List, Match, Tuple
|
||||||
|
|
||||||
import llnl.string
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path
|
||||||
from llnl.util.lang import attr_setdefault, index_by
|
from llnl.util.lang import attr_setdefault, index_by
|
||||||
@@ -30,6 +31,7 @@
|
|||||||
import spack.user_environment as uenv
|
import spack.user_environment as uenv
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.string
|
||||||
|
|
||||||
# cmd has a submodule called "list" so preserve the python list module
|
# cmd has a submodule called "list" so preserve the python list module
|
||||||
python_list = list
|
python_list = list
|
||||||
@@ -147,7 +149,7 @@ def get_command(cmd_name):
|
|||||||
return getattr(get_module(cmd_name), pname)
|
return getattr(get_module(cmd_name), pname)
|
||||||
|
|
||||||
|
|
||||||
class _UnquotedFlags:
|
class _UnquotedFlags(object):
|
||||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||||
|
|
||||||
@@ -273,9 +275,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
|
|||||||
See ``spack.database.Database._query`` for details.
|
See ``spack.database.Database._query`` for details.
|
||||||
"""
|
"""
|
||||||
if local:
|
if local:
|
||||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
matching_specs = spack.store.db.query_local(spec, hashes=hashes, installed=installed)
|
||||||
else:
|
else:
|
||||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
matching_specs = spack.store.db.query(spec, hashes=hashes, installed=installed)
|
||||||
if not matching_specs:
|
if not matching_specs:
|
||||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||||
|
|
||||||
@@ -291,7 +293,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
|||||||
if len(matching_specs) <= 1:
|
if len(matching_specs) <= 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||||
args += [
|
args += [
|
||||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||||
@@ -342,9 +344,9 @@ def iter_groups(specs, indent, all_headers):
|
|||||||
print()
|
print()
|
||||||
|
|
||||||
header = "%s{%s} / %s{%s}" % (
|
header = "%s{%s} / %s{%s}" % (
|
||||||
spack.spec.ARCHITECTURE_COLOR,
|
spack.spec.architecture_color,
|
||||||
architecture if architecture else "no arch",
|
architecture if architecture else "no arch",
|
||||||
spack.spec.COMPILER_COLOR,
|
spack.spec.compiler_color,
|
||||||
f"{compiler.display_str}" if compiler else "no compiler",
|
f"{compiler.display_str}" if compiler else "no compiler",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -383,7 +385,7 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
deps (bool): Display dependencies with specs
|
deps (bool): Display dependencies with specs
|
||||||
long (bool): Display short hashes with specs
|
long (bool): Display short hashes with specs
|
||||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||||
namespaces (bool): Print namespaces along with names
|
namespace (bool): Print namespaces along with names
|
||||||
show_flags (bool): Show compiler flags with specs
|
show_flags (bool): Show compiler flags with specs
|
||||||
variants (bool): Show variants with specs
|
variants (bool): Show variants with specs
|
||||||
indent (int): indent each line this much
|
indent (int): indent each line this much
|
||||||
@@ -407,7 +409,7 @@ def get_arg(name, default=None):
|
|||||||
paths = get_arg("paths", False)
|
paths = get_arg("paths", False)
|
||||||
deps = get_arg("deps", False)
|
deps = get_arg("deps", False)
|
||||||
hashes = get_arg("long", False)
|
hashes = get_arg("long", False)
|
||||||
namespaces = get_arg("namespaces", False)
|
namespace = get_arg("namespace", False)
|
||||||
flags = get_arg("show_flags", False)
|
flags = get_arg("show_flags", False)
|
||||||
full_compiler = get_arg("show_full_compiler", False)
|
full_compiler = get_arg("show_full_compiler", False)
|
||||||
variants = get_arg("variants", False)
|
variants = get_arg("variants", False)
|
||||||
@@ -428,7 +430,7 @@ def get_arg(name, default=None):
|
|||||||
|
|
||||||
format_string = get_arg("format", None)
|
format_string = get_arg("format", None)
|
||||||
if format_string is None:
|
if format_string is None:
|
||||||
nfmt = "{fullname}" if namespaces else "{name}"
|
nfmt = "{fullname}" if namespace else "{name}"
|
||||||
ffmt = ""
|
ffmt = ""
|
||||||
if full_compiler or flags:
|
if full_compiler or flags:
|
||||||
ffmt += "{%compiler.name}"
|
ffmt += "{%compiler.name}"
|
||||||
@@ -473,7 +475,7 @@ def format_list(specs):
|
|||||||
out = ""
|
out = ""
|
||||||
# getting lots of prefixes requires DB lookups. Ensure
|
# getting lots of prefixes requires DB lookups. Ensure
|
||||||
# all spec.prefix calls are in one transaction.
|
# all spec.prefix calls are in one transaction.
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.db.read_transaction():
|
||||||
for string, spec in formatted:
|
for string, spec in formatted:
|
||||||
if not string:
|
if not string:
|
||||||
# print newline from above
|
# print newline from above
|
||||||
@@ -516,7 +518,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
|||||||
category, e.g. if pkg_type is "installed" then the message
|
category, e.g. if pkg_type is "installed" then the message
|
||||||
would be "3 installed packages"
|
would be "3 installed packages"
|
||||||
"""
|
"""
|
||||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
|
||||||
|
|
||||||
|
|
||||||
def spack_is_git_repo():
|
def spack_is_git_repo():
|
||||||
@@ -545,7 +547,7 @@ class PythonNameError(spack.error.SpackError):
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
super().__init__("{0} is not a permissible Python name.".format(name))
|
super(PythonNameError, self).__init__("{0} is not a permissible Python name.".format(name))
|
||||||
|
|
||||||
|
|
||||||
class CommandNameError(spack.error.SpackError):
|
class CommandNameError(spack.error.SpackError):
|
||||||
@@ -553,7 +555,9 @@ class CommandNameError(spack.error.SpackError):
|
|||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
super(CommandNameError, self).__init__(
|
||||||
|
"{0} is not a permissible Spack command name.".format(name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
@@ -584,14 +588,14 @@ def require_active_env(cmd_name):
|
|||||||
|
|
||||||
if env:
|
if env:
|
||||||
return env
|
return env
|
||||||
|
else:
|
||||||
tty.die(
|
tty.die(
|
||||||
"`spack %s` requires an environment" % cmd_name,
|
"`spack %s` requires an environment" % cmd_name,
|
||||||
"activate an environment first:",
|
"activate an environment first:",
|
||||||
" spack env activate ENV",
|
" spack env activate ENV",
|
||||||
"or use:",
|
"or use:",
|
||||||
" spack -e ENV %s ..." % cmd_name,
|
" spack -e ENV %s ..." % cmd_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def find_environment(args):
|
def find_environment(args):
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.colify
|
|
||||||
import llnl.util.tty.color as cl
|
import llnl.util.tty.color as cl
|
||||||
|
|
||||||
import spack.audit
|
import spack.audit
|
||||||
@@ -21,15 +20,6 @@ def setup_parser(subparser):
|
|||||||
# Audit configuration files
|
# Audit configuration files
|
||||||
sp.add_parser("configs", help="audit configuration files")
|
sp.add_parser("configs", help="audit configuration files")
|
||||||
|
|
||||||
# Audit package recipes
|
|
||||||
external_parser = sp.add_parser("externals", help="check external detection in packages")
|
|
||||||
external_parser.add_argument(
|
|
||||||
"--list",
|
|
||||||
action="store_true",
|
|
||||||
dest="list_externals",
|
|
||||||
help="if passed, list which packages have detection tests",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Https and other linting
|
# Https and other linting
|
||||||
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
||||||
https_parser.add_argument(
|
https_parser.add_argument(
|
||||||
@@ -39,7 +29,7 @@ def setup_parser(subparser):
|
|||||||
# Audit package recipes
|
# Audit package recipes
|
||||||
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
||||||
|
|
||||||
for group in [pkg_parser, https_parser, external_parser]:
|
for group in [pkg_parser, https_parser]:
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"name",
|
"name",
|
||||||
metavar="PKG",
|
metavar="PKG",
|
||||||
@@ -57,7 +47,7 @@ def configs(parser, args):
|
|||||||
|
|
||||||
|
|
||||||
def packages(parser, args):
|
def packages(parser, args):
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
pkgs = args.name or spack.repo.path.all_package_names()
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||||
_process_reports(reports)
|
_process_reports(reports)
|
||||||
|
|
||||||
@@ -67,19 +57,7 @@ def packages_https(parser, args):
|
|||||||
if not args.check_all and not args.name:
|
if not args.check_all and not args.name:
|
||||||
tty.die("Please specify one or more packages to audit, or --all.")
|
tty.die("Please specify one or more packages to audit, or --all.")
|
||||||
|
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
pkgs = args.name or spack.repo.path.all_package_names()
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
|
||||||
_process_reports(reports)
|
|
||||||
|
|
||||||
|
|
||||||
def externals(parser, args):
|
|
||||||
if args.list_externals:
|
|
||||||
msg = "@*{The following packages have detection tests:}"
|
|
||||||
tty.msg(cl.colorize(msg))
|
|
||||||
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
|
|
||||||
return
|
|
||||||
|
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||||
_process_reports(reports)
|
_process_reports(reports)
|
||||||
|
|
||||||
@@ -100,7 +78,6 @@ def list(parser, args):
|
|||||||
def audit(parser, args):
|
def audit(parser, args):
|
||||||
subcommands = {
|
subcommands = {
|
||||||
"configs": configs,
|
"configs": configs,
|
||||||
"externals": externals,
|
|
||||||
"packages": packages,
|
"packages": packages,
|
||||||
"packages-https": packages_https,
|
"packages-https": packages_https,
|
||||||
"list": list,
|
"list": list,
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"package_or_file",
|
"package_or_file",
|
||||||
help="name of package to show contributions for, or path to a file in the spack repo",
|
help="name of package to show contributions for, " "or path to a file in the spack repo",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
|||||||
blame_file = path
|
blame_file = path
|
||||||
|
|
||||||
if not blame_file:
|
if not blame_file:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||||
|
|
||||||
# get git blame for the package
|
# get git blame for the package
|
||||||
|
|||||||
@@ -2,9 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
@@ -69,10 +70,11 @@
|
|||||||
|
|
||||||
def _add_scope_option(parser):
|
def _add_scope_option(parser):
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
|
scopes_metavar = spack.config.scopes_metavar
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=spack.config.SCOPES_METAVAR,
|
metavar=scopes_metavar,
|
||||||
help="configuration scope to read/modify",
|
help="configuration scope to read/modify",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -169,7 +171,7 @@ def _reset(args):
|
|||||||
if not ok_to_continue:
|
if not ok_to_continue:
|
||||||
raise RuntimeError("Aborting")
|
raise RuntimeError("Aborting")
|
||||||
|
|
||||||
for scope in spack.config.CONFIG.file_scopes:
|
for scope in spack.config.config.file_scopes:
|
||||||
# The default scope should stay untouched
|
# The default scope should stay untouched
|
||||||
if scope.name == "defaults":
|
if scope.name == "defaults":
|
||||||
continue
|
continue
|
||||||
@@ -186,7 +188,7 @@ def _reset(args):
|
|||||||
if os.path.exists(bootstrap_yaml):
|
if os.path.exists(bootstrap_yaml):
|
||||||
shutil.move(bootstrap_yaml, backup_file)
|
shutil.move(bootstrap_yaml, backup_file)
|
||||||
|
|
||||||
spack.config.CONFIG.clear_caches()
|
spack.config.config.clear_caches()
|
||||||
|
|
||||||
|
|
||||||
def _root(args):
|
def _root(args):
|
||||||
@@ -326,7 +328,6 @@ def _status(args):
|
|||||||
if missing:
|
if missing:
|
||||||
print(llnl.util.tty.color.colorize(legend))
|
print(llnl.util.tty.color.colorize(legend))
|
||||||
print()
|
print()
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def _add(args):
|
def _add(args):
|
||||||
|
|||||||
@@ -2,18 +2,15 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import argparse
|
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util.lang import elide_list
|
from llnl.util.lang import elide_list
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
@@ -21,7 +18,7 @@
|
|||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.hash_types as ht
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@@ -31,68 +28,111 @@
|
|||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.cmd import display_specs
|
from spack.cmd import display_specs
|
||||||
|
from spack.error import SpecError
|
||||||
from spack.spec import Spec, save_dependency_specfiles
|
from spack.spec import Spec, save_dependency_specfiles
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
|
from spack.util.string import plural
|
||||||
|
|
||||||
description = "create, download and install binary packages"
|
description = "create, download and install binary packages"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser: argparse.ArgumentParser):
|
def setup_parser(subparser):
|
||||||
setattr(setup_parser, "parser", subparser)
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||||
|
|
||||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
# TODO: remove from Spack 0.21
|
||||||
|
push.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--rel",
|
||||||
|
action="store_true",
|
||||||
|
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||||
|
)
|
||||||
|
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||||
|
push.add_argument(
|
||||||
|
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||||
|
)
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--allow-root",
|
|
||||||
"-a",
|
"-a",
|
||||||
|
"--allow-root",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="allow install root string in binary files after RPATH substitution",
|
help="allow install root string in binary files after RPATH substitution",
|
||||||
)
|
)
|
||||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
push.add_argument(
|
||||||
push_sign.add_argument(
|
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||||
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
|
|
||||||
)
|
)
|
||||||
push_sign.add_argument(
|
output = push.add_mutually_exclusive_group(required=False)
|
||||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
# TODO: remove from Spack 0.21
|
||||||
|
output.add_argument(
|
||||||
|
"-d",
|
||||||
|
"--directory",
|
||||||
|
metavar="directory",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_directory,
|
||||||
|
help="local directory where buildcaches will be written. (deprecated)",
|
||||||
)
|
)
|
||||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
# TODO: remove from Spack 0.21
|
||||||
|
output.add_argument(
|
||||||
|
"-m",
|
||||||
|
"--mirror-name",
|
||||||
|
metavar="mirror-name",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_name,
|
||||||
|
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove from Spack 0.21
|
||||||
|
output.add_argument(
|
||||||
|
"--mirror-url",
|
||||||
|
metavar="mirror-url",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_url,
|
||||||
|
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||||
|
)
|
||||||
|
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||||
|
# because we have further positional arguments.
|
||||||
|
# TODO: require from Spack 0.21
|
||||||
|
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--update-index",
|
"--update-index",
|
||||||
"--rebuild-index",
|
"--rebuild-index",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="regenerate buildcache index after building package(s)",
|
help="Regenerate buildcache index after building package(s)",
|
||||||
)
|
)
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||||
)
|
)
|
||||||
push.add_argument(
|
push.add_argument(
|
||||||
"--only",
|
"--only",
|
||||||
default="package,dependencies",
|
default="package,dependencies",
|
||||||
dest="things_to_install",
|
dest="things_to_install",
|
||||||
choices=["package", "dependencies"],
|
choices=["package", "dependencies"],
|
||||||
help="select the buildcache mode. "
|
help=(
|
||||||
"The default is to build a cache for the package along with all its dependencies. "
|
"Select the buildcache mode. the default is to"
|
||||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
" build a cache for the package along with all"
|
||||||
"dependencies",
|
" its dependencies. Alternatively, one can"
|
||||||
)
|
" decide to build a cache for only the package"
|
||||||
push.add_argument(
|
" or only the dependencies"
|
||||||
"--fail-fast",
|
),
|
||||||
action="store_true",
|
|
||||||
help="stop pushing on first failure (default is best effort)",
|
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(push, ["specs"])
|
arguments.add_common_arguments(push, ["specs"])
|
||||||
push.set_defaults(func=push_fn)
|
push.set_defaults(func=push_fn)
|
||||||
|
|
||||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||||
install.add_argument(
|
install.add_argument(
|
||||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists"
|
"-f", "--force", action="store_true", help="overwrite install directory if it exists."
|
||||||
)
|
)
|
||||||
install.add_argument(
|
install.add_argument(
|
||||||
"-m", "--multiple", action="store_true", help="allow all matching packages"
|
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||||
|
)
|
||||||
|
# TODO: remove from Spack 0.21
|
||||||
|
install.add_argument(
|
||||||
|
"-a",
|
||||||
|
"--allow-root",
|
||||||
|
action="store_true",
|
||||||
|
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||||
)
|
)
|
||||||
install.add_argument(
|
install.add_argument(
|
||||||
"-u",
|
"-u",
|
||||||
@@ -111,7 +151,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
install.set_defaults(func=install_fn)
|
install.set_defaults(func=install_fn)
|
||||||
|
|
||||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||||
listcache.add_argument(
|
listcache.add_argument(
|
||||||
"-v",
|
"-v",
|
||||||
"--variants",
|
"--variants",
|
||||||
@@ -146,49 +186,49 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
"-m",
|
"-m",
|
||||||
"--mirror-url",
|
"--mirror-url",
|
||||||
default=None,
|
default=None,
|
||||||
help="override any configured mirrors with this mirror URL",
|
help="Override any configured mirrors with this mirror URL",
|
||||||
)
|
)
|
||||||
|
|
||||||
check.add_argument(
|
check.add_argument(
|
||||||
"-o", "--output-file", default=None, help="file where rebuild info should be written"
|
"-o", "--output-file", default=None, help="File where rebuild info should be written"
|
||||||
)
|
)
|
||||||
|
|
||||||
# used to construct scope arguments below
|
# used to construct scope arguments below
|
||||||
scopes = spack.config.scopes()
|
scopes = spack.config.scopes()
|
||||||
|
scopes_metavar = spack.config.scopes_metavar
|
||||||
|
|
||||||
check.add_argument(
|
check.add_argument(
|
||||||
"--scope",
|
"--scope",
|
||||||
choices=scopes,
|
choices=scopes,
|
||||||
metavar=spack.config.SCOPES_METAVAR,
|
metavar=scopes_metavar,
|
||||||
default=spack.config.default_modify_scope(),
|
default=spack.config.default_modify_scope(),
|
||||||
help="configuration scope containing mirrors to check",
|
help="configuration scope containing mirrors to check",
|
||||||
)
|
)
|
||||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
|
||||||
check_spec_or_specfile.add_argument(
|
check.add_argument(
|
||||||
"-s", "--spec", help="check single spec instead of release specs file"
|
"-s", "--spec", default=None, help="Check single spec instead of release specs file"
|
||||||
)
|
)
|
||||||
check_spec_or_specfile.add_argument(
|
|
||||||
|
check.add_argument(
|
||||||
"--spec-file",
|
"--spec-file",
|
||||||
help="check single spec from json or yaml file instead of release specs file",
|
default=None,
|
||||||
|
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||||
)
|
)
|
||||||
|
|
||||||
check.set_defaults(func=check_fn)
|
check.set_defaults(func=check_fn)
|
||||||
|
|
||||||
# Download tarball and specfile
|
# Download tarball and specfile
|
||||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
download.add_argument(
|
||||||
download_spec_or_specfile.add_argument(
|
"-s", "--spec", default=None, help="Download built tarball for spec from mirror"
|
||||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
|
||||||
)
|
|
||||||
download_spec_or_specfile.add_argument(
|
|
||||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
|
||||||
)
|
)
|
||||||
download.add_argument(
|
download.add_argument(
|
||||||
"-p",
|
"--spec-file",
|
||||||
"--path",
|
|
||||||
required=True,
|
|
||||||
default=None,
|
default=None,
|
||||||
help="path to directory where tarball should be downloaded",
|
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||||
|
)
|
||||||
|
download.add_argument(
|
||||||
|
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||||
)
|
)
|
||||||
download.set_defaults(func=download_fn)
|
download.set_defaults(func=download_fn)
|
||||||
|
|
||||||
@@ -196,53 +236,107 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
getbuildcachename = subparsers.add_parser(
|
getbuildcachename = subparsers.add_parser(
|
||||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||||
)
|
)
|
||||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
getbuildcachename.add_argument(
|
||||||
required=True
|
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
|
||||||
)
|
)
|
||||||
getbuildcachename_spec_or_specfile.add_argument(
|
getbuildcachename.add_argument(
|
||||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
"--spec-file",
|
||||||
)
|
default=None,
|
||||||
getbuildcachename_spec_or_specfile.add_argument(
|
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
|
||||||
)
|
)
|
||||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||||
|
|
||||||
# Given the root spec, save the yaml of the dependent spec to a file
|
# Given the root spec, save the yaml of the dependent spec to a file
|
||||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
|
||||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
savespecfile.add_argument(
|
||||||
savespecfile_spec_or_specfile.add_argument(
|
"--root-specfile",
|
||||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
default=None,
|
||||||
|
help="Path to json or yaml file containing root spec of dependent spec",
|
||||||
)
|
)
|
||||||
savespecfile.add_argument(
|
savespecfile.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
"--specs",
|
"--specs",
|
||||||
required=True,
|
default=None,
|
||||||
help="list of dependent specs for which saved yaml is desired",
|
help="List of dependent specs for which saved yaml is desired",
|
||||||
)
|
)
|
||||||
savespecfile.add_argument(
|
savespecfile.add_argument(
|
||||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
|
||||||
)
|
)
|
||||||
savespecfile.set_defaults(func=save_specfile_fn)
|
savespecfile.set_defaults(func=save_specfile_fn)
|
||||||
|
|
||||||
# Sync buildcache entries from one mirror to another
|
# Sync buildcache entries from one mirror to another
|
||||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||||
sync.add_argument(
|
sync.add_argument(
|
||||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
"--manifest-glob",
|
||||||
|
default=None,
|
||||||
|
help="A quoted glob pattern identifying copy manifest files",
|
||||||
)
|
)
|
||||||
sync.add_argument(
|
source = sync.add_mutually_exclusive_group(required=False)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
source.add_argument(
|
||||||
|
"--src-directory",
|
||||||
|
metavar="DIRECTORY",
|
||||||
|
dest="src_mirror_flag",
|
||||||
|
type=arguments.mirror_directory,
|
||||||
|
help="Source mirror as a local file path (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
source.add_argument(
|
||||||
|
"--src-mirror-name",
|
||||||
|
metavar="MIRROR_NAME",
|
||||||
|
dest="src_mirror_flag",
|
||||||
|
type=arguments.mirror_name,
|
||||||
|
help="Name of the source mirror (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
source.add_argument(
|
||||||
|
"--src-mirror-url",
|
||||||
|
metavar="MIRROR_URL",
|
||||||
|
dest="src_mirror_flag",
|
||||||
|
type=arguments.mirror_url,
|
||||||
|
help="URL of the source mirror (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: only support this in 0.21
|
||||||
|
source.add_argument(
|
||||||
"src_mirror",
|
"src_mirror",
|
||||||
metavar="source mirror",
|
metavar="source mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
type=arguments.mirror_name_or_url,
|
||||||
|
help="Source mirror name, path, or URL",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help="source mirror name, path, or URL",
|
|
||||||
)
|
)
|
||||||
sync.add_argument(
|
dest = sync.add_mutually_exclusive_group(required=False)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
dest.add_argument(
|
||||||
|
"--dest-directory",
|
||||||
|
metavar="DIRECTORY",
|
||||||
|
dest="dest_mirror_flag",
|
||||||
|
type=arguments.mirror_directory,
|
||||||
|
help="Destination mirror as a local file path (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
dest.add_argument(
|
||||||
|
"--dest-mirror-name",
|
||||||
|
metavar="MIRROR_NAME",
|
||||||
|
type=arguments.mirror_name,
|
||||||
|
dest="dest_mirror_flag",
|
||||||
|
help="Name of the destination mirror (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
dest.add_argument(
|
||||||
|
"--dest-mirror-url",
|
||||||
|
metavar="MIRROR_URL",
|
||||||
|
dest="dest_mirror_flag",
|
||||||
|
type=arguments.mirror_url,
|
||||||
|
help="URL of the destination mirror (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: only support this in 0.21
|
||||||
|
dest.add_argument(
|
||||||
"dest_mirror",
|
"dest_mirror",
|
||||||
metavar="destination mirror",
|
metavar="destination mirror",
|
||||||
type=arguments.mirror_name_or_url,
|
type=arguments.mirror_name_or_url,
|
||||||
|
help="Destination mirror name, path, or URL",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
help="destination mirror name, path, or URL",
|
|
||||||
)
|
)
|
||||||
sync.set_defaults(func=sync_fn)
|
sync.set_defaults(func=sync_fn)
|
||||||
|
|
||||||
@@ -250,48 +344,124 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
update_index = subparsers.add_parser(
|
update_index = subparsers.add_parser(
|
||||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||||
)
|
)
|
||||||
update_index.add_argument(
|
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||||
"mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
|
# TODO: remove in Spack 0.21
|
||||||
|
update_index_out.add_argument(
|
||||||
|
"-d",
|
||||||
|
"--directory",
|
||||||
|
metavar="directory",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_directory,
|
||||||
|
help="local directory where buildcaches will be written (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
update_index_out.add_argument(
|
||||||
|
"-m",
|
||||||
|
"--mirror-name",
|
||||||
|
metavar="mirror-name",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_name,
|
||||||
|
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
update_index_out.add_argument(
|
||||||
|
"--mirror-url",
|
||||||
|
metavar="mirror-url",
|
||||||
|
dest="mirror_flag",
|
||||||
|
type=arguments.mirror_url,
|
||||||
|
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||||
|
)
|
||||||
|
# TODO: require from Spack 0.21
|
||||||
|
update_index_out.add_argument(
|
||||||
|
"mirror",
|
||||||
|
type=arguments.mirror_name_or_url,
|
||||||
|
help="Destination mirror name, path, or URL",
|
||||||
|
nargs="?",
|
||||||
)
|
)
|
||||||
update_index.add_argument(
|
update_index.add_argument(
|
||||||
"-k",
|
"-k",
|
||||||
"--keys",
|
"--keys",
|
||||||
default=False,
|
default=False,
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="if provided, key index will be updated as well as package index",
|
help="If provided, key index will be updated as well as package index",
|
||||||
)
|
)
|
||||||
update_index.set_defaults(func=update_index_fn)
|
update_index.set_defaults(func=update_index_fn)
|
||||||
|
|
||||||
|
|
||||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
def _matching_specs(specs, spec_file):
|
||||||
"""Disambiguate specs and return a list of matching specs"""
|
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
a query over the store or a query over the active environment.
|
||||||
|
"""
|
||||||
|
env = ev.active_environment()
|
||||||
|
hashes = env.all_hashes() if env else None
|
||||||
|
if spec_file:
|
||||||
|
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||||
|
|
||||||
|
if specs:
|
||||||
|
constraints = spack.cmd.parse_specs(specs)
|
||||||
|
return spack.store.find(constraints, hashes=hashes)
|
||||||
|
|
||||||
|
if env:
|
||||||
|
return [concrete for _, concrete in env.concretized_specs()]
|
||||||
|
|
||||||
|
tty.die(
|
||||||
|
"build cache file creation requires at least one"
|
||||||
|
" installed package spec, an active environment,"
|
||||||
|
" or else a path to a json or yaml file containing a spec"
|
||||||
|
" to install"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def push_fn(args: argparse.Namespace):
|
def _concrete_spec_from_args(args):
|
||||||
|
spec_str, specfile_path = args.spec, args.spec_file
|
||||||
|
|
||||||
|
if not spec_str and not specfile_path:
|
||||||
|
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if spec_str:
|
||||||
|
try:
|
||||||
|
constraints = spack.cmd.parse_specs(spec_str)
|
||||||
|
spec = spack.store.find(constraints)[0]
|
||||||
|
spec.concretize()
|
||||||
|
except SpecError as spec_error:
|
||||||
|
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||||
|
tty.debug(spec_error)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return spec
|
||||||
|
|
||||||
|
return Spec.from_specfile(specfile_path)
|
||||||
|
|
||||||
|
|
||||||
|
def push_fn(args):
|
||||||
"""create a binary package and push it to a mirror"""
|
"""create a binary package and push it to a mirror"""
|
||||||
if args.spec_file:
|
if args.mirror_flag:
|
||||||
tty.warn(
|
mirror = args.mirror_flag
|
||||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
elif not args.mirror:
|
||||||
"Use positional arguments instead."
|
raise ValueError("No mirror provided")
|
||||||
)
|
|
||||||
|
|
||||||
if args.specs or args.spec_file:
|
|
||||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
|
||||||
else:
|
else:
|
||||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||||
|
|
||||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
if args.mirror_flag:
|
||||||
|
|
||||||
if args.allow_root:
|
|
||||||
tty.warn(
|
tty.warn(
|
||||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||||
|
"Spack 0.21, use positional arguments instead."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if args.rel:
|
||||||
|
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||||
|
|
||||||
|
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||||
|
# spec is in the positional mirror arg due to argparse limitations.
|
||||||
|
input_specs = args.specs
|
||||||
|
if args.mirror_flag and args.mirror:
|
||||||
|
input_specs.insert(0, args.mirror)
|
||||||
|
|
||||||
url = mirror.push_url
|
url = mirror.push_url
|
||||||
|
|
||||||
specs = bindist.specs_to_be_packaged(
|
specs = bindist.specs_to_be_packaged(
|
||||||
specs,
|
_matching_specs(input_specs, args.spec_file),
|
||||||
root="package" in args.things_to_install,
|
root="package" in args.things_to_install,
|
||||||
dependencies="dependencies" in args.things_to_install,
|
dependencies="dependencies" in args.things_to_install,
|
||||||
)
|
)
|
||||||
@@ -302,7 +472,6 @@ def push_fn(args: argparse.Namespace):
|
|||||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||||
|
|
||||||
skipped = []
|
skipped = []
|
||||||
failed = []
|
|
||||||
|
|
||||||
# tty printing
|
# tty printing
|
||||||
color = clr.get_color_when()
|
color = clr.get_color_when()
|
||||||
@@ -317,7 +486,9 @@ def push_fn(args: argparse.Namespace):
|
|||||||
url,
|
url,
|
||||||
bindist.PushOptions(
|
bindist.PushOptions(
|
||||||
force=args.force,
|
force=args.force,
|
||||||
|
relative=args.rel,
|
||||||
unsigned=args.unsigned,
|
unsigned=args.unsigned,
|
||||||
|
allow_root=args.allow_root,
|
||||||
key=args.key,
|
key=args.key,
|
||||||
regenerate_index=args.update_index,
|
regenerate_index=args.update_index,
|
||||||
),
|
),
|
||||||
@@ -333,17 +504,11 @@ def push_fn(args: argparse.Namespace):
|
|||||||
except bindist.NoOverwriteException:
|
except bindist.NoOverwriteException:
|
||||||
skipped.append(format_spec(spec))
|
skipped.append(format_spec(spec))
|
||||||
|
|
||||||
# Catch any other exception unless the fail fast option is set
|
|
||||||
except Exception as e:
|
|
||||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
|
||||||
raise
|
|
||||||
failed.append((format_spec(spec), e))
|
|
||||||
|
|
||||||
if skipped:
|
if skipped:
|
||||||
if len(specs) == 1:
|
if len(specs) == 1:
|
||||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||||
elif len(skipped) == len(specs):
|
elif len(skipped) == len(specs):
|
||||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||||
else:
|
else:
|
||||||
tty.info(
|
tty.info(
|
||||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||||
@@ -353,23 +518,15 @@ def push_fn(args: argparse.Namespace):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if failed:
|
|
||||||
if len(failed) == 1:
|
|
||||||
raise failed[0][1]
|
|
||||||
|
|
||||||
raise spack.error.SpackError(
|
|
||||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
|
||||||
"\n".join(
|
|
||||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_fn(args):
|
def install_fn(args):
|
||||||
"""install from a binary package"""
|
"""install from a binary package"""
|
||||||
if not args.specs:
|
if not args.specs:
|
||||||
tty.die("a spec argument is required to install from a buildcache")
|
tty.die("a spec argument is required to install from a buildcache")
|
||||||
|
|
||||||
|
if args.allow_root:
|
||||||
|
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||||
|
|
||||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||||
for match in matches:
|
for match in matches:
|
||||||
@@ -407,31 +564,32 @@ def keys_fn(args):
|
|||||||
|
|
||||||
|
|
||||||
def preview_fn(args):
|
def preview_fn(args):
|
||||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
"""analyze an installed spec and reports whether executables
|
||||||
tty.warn(
|
and libraries are relocatable
|
||||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
|
||||||
"now the default. This command will be removed in Spack 0.22"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_fn(args: argparse.Namespace):
|
|
||||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
|
||||||
|
|
||||||
this command uses the process exit code to indicate its result, specifically, if the
|
|
||||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
|
||||||
"""
|
"""
|
||||||
if args.spec_file:
|
constraints = spack.cmd.parse_specs(args.specs)
|
||||||
tty.warn(
|
specs = spack.store.find(constraints, multiple=True)
|
||||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
|
||||||
"Use --spec instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
# Cycle over the specs that match
|
||||||
|
for spec in specs:
|
||||||
|
print("Relocatable nodes")
|
||||||
|
print("--------------------------------")
|
||||||
|
print(spec.tree(status_fn=spack.relocate.is_relocatable))
|
||||||
|
|
||||||
if specs:
|
|
||||||
specs = _matching_specs(specs)
|
def check_fn(args):
|
||||||
|
"""Check specs (either a single spec from --spec, or else the full set
|
||||||
|
of release specs) against remote binary mirror(s) to see if any need
|
||||||
|
to be rebuilt. This command uses the process exit code to indicate
|
||||||
|
its result, specifically, if the exit code is non-zero, then at least
|
||||||
|
one of the indicated specs needs to be rebuilt.
|
||||||
|
"""
|
||||||
|
if args.spec or args.spec_file:
|
||||||
|
specs = [_concrete_spec_from_args(args)]
|
||||||
else:
|
else:
|
||||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||||
|
env.concretize()
|
||||||
|
specs = env.all_specs()
|
||||||
|
|
||||||
if not specs:
|
if not specs:
|
||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
@@ -455,61 +613,63 @@ def check_fn(args: argparse.Namespace):
|
|||||||
|
|
||||||
|
|
||||||
def download_fn(args):
|
def download_fn(args):
|
||||||
"""download buildcache entry from a remote mirror to local folder
|
"""Download buildcache entry from a remote mirror to local folder. This
|
||||||
|
command uses the process exit code to indicate its result, specifically,
|
||||||
|
a non-zero exit code indicates that the command failed to download at
|
||||||
|
least one of the required buildcache components."""
|
||||||
|
if not args.spec and not args.spec_file:
|
||||||
|
tty.msg("No specs provided, exiting.")
|
||||||
|
return
|
||||||
|
|
||||||
this command uses the process exit code to indicate its result, specifically, a non-zero exit
|
if not args.path:
|
||||||
code indicates that the command failed to download at least one of the required buildcache
|
tty.msg("No download path provided, exiting")
|
||||||
components
|
return
|
||||||
"""
|
|
||||||
if args.spec_file:
|
|
||||||
tty.warn(
|
|
||||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
|
||||||
"Use --spec instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
spec = _concrete_spec_from_args(args)
|
||||||
|
result = bindist.download_single_spec(spec, args.path)
|
||||||
|
|
||||||
if len(specs) != 1:
|
if not result:
|
||||||
tty.die("a single spec argument is required to download from a buildcache")
|
|
||||||
|
|
||||||
if not bindist.download_single_spec(specs[0], args.path):
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def get_buildcache_name_fn(args):
|
def get_buildcache_name_fn(args):
|
||||||
"""get name (prefix) of buildcache entries for this spec"""
|
"""Get name (prefix) of buildcache entries for this spec"""
|
||||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
spec = _concrete_spec_from_args(args)
|
||||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
buildcache_name = bindist.tarball_name(spec, "")
|
||||||
if len(specs) != 1:
|
print("{0}".format(buildcache_name))
|
||||||
tty.die("a single spec argument is required to get buildcache name")
|
|
||||||
print(bindist.tarball_name(specs[0], ""))
|
|
||||||
|
|
||||||
|
|
||||||
def save_specfile_fn(args):
|
def save_specfile_fn(args):
|
||||||
"""get full spec for dependencies and write them to files in the specified output directory
|
"""Get full spec for dependencies, relative to root spec, and write them
|
||||||
|
to files in the specified output directory. Uses exit code to signal
|
||||||
uses exit code to signal success or failure. an exit code of zero means the command was likely
|
success or failure. An exit code of zero means the command was likely
|
||||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
successful. If any errors or exceptions are encountered, or if expected
|
||||||
are not provided, then the exit code will be non-zero
|
command-line arguments are not provided, then the exit code will be
|
||||||
|
non-zero.
|
||||||
"""
|
"""
|
||||||
|
if not args.root_spec and not args.root_specfile:
|
||||||
|
tty.msg("No root spec provided, exiting.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not args.specs:
|
||||||
|
tty.msg("No dependent specs provided, exiting.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not args.specfile_dir:
|
||||||
|
tty.msg("No yaml directory provided, exiting.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if args.root_specfile:
|
if args.root_specfile:
|
||||||
tty.warn(
|
with open(args.root_specfile) as fd:
|
||||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
root_spec_as_json = fd.read()
|
||||||
"Use --root-spec instead."
|
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||||
)
|
else:
|
||||||
|
root_spec = Spec(args.root_spec)
|
||||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
root_spec.concretize()
|
||||||
|
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||||
if len(specs) != 1:
|
spec_format = "json"
|
||||||
tty.die("a single spec argument is required to save specfile")
|
|
||||||
|
|
||||||
root = specs[0]
|
|
||||||
|
|
||||||
if not root.concrete:
|
|
||||||
root.concretize()
|
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -527,7 +687,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
|||||||
temp_stage.create()
|
temp_stage.create()
|
||||||
temp_stage.fetch()
|
temp_stage.fetch()
|
||||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||||
except spack.error.FetchError as e:
|
except web_util.FetchError as e:
|
||||||
# Expected, since we have to try all the possible extensions
|
# Expected, since we have to try all the possible extensions
|
||||||
tty.debug("no such file: {0}".format(src_url))
|
tty.debug("no such file: {0}".format(src_url))
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
@@ -539,19 +699,32 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
|||||||
|
|
||||||
|
|
||||||
def sync_fn(args):
|
def sync_fn(args):
|
||||||
"""sync binaries (and associated metadata) from one mirror to another
|
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||||
|
Requires an active environment in order to know which specs to sync.
|
||||||
|
|
||||||
requires an active environment in order to know which specs to sync
|
Args:
|
||||||
|
src (str): Source mirror URL
|
||||||
|
dest (str): Destination mirror URL
|
||||||
"""
|
"""
|
||||||
if args.manifest_glob:
|
if args.manifest_glob:
|
||||||
manifest_copy(glob.glob(args.manifest_glob))
|
manifest_copy(glob.glob(args.manifest_glob))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
if args.src_mirror is None or args.dest_mirror is None:
|
# If no manifest_glob, require a source and dest mirror.
|
||||||
tty.die("Provide mirrors to sync from and to.")
|
# TODO: Simplify in Spack 0.21
|
||||||
|
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||||
|
args.dest_mirror_flag or args.dest_mirror
|
||||||
|
):
|
||||||
|
raise ValueError("Source and destination mirror are required.")
|
||||||
|
|
||||||
src_mirror = args.src_mirror
|
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||||
dest_mirror = args.dest_mirror
|
tty.warn(
|
||||||
|
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||||
|
"Spack 0.21, use positional arguments instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||||
|
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||||
|
|
||||||
src_mirror_url = src_mirror.fetch_url
|
src_mirror_url = src_mirror.fetch_url
|
||||||
dest_mirror_url = dest_mirror.push_url
|
dest_mirror_url = dest_mirror.push_url
|
||||||
@@ -629,8 +802,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
|||||||
|
|
||||||
|
|
||||||
def update_index_fn(args):
|
def update_index_fn(args):
|
||||||
"""update a buildcache index"""
|
"""Update a buildcache index."""
|
||||||
update_index(args.mirror, update_keys=args.keys)
|
if args.mirror_flag:
|
||||||
|
tty.warn(
|
||||||
|
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||||
|
"Spack 0.21, use positional arguments instead."
|
||||||
|
)
|
||||||
|
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||||
|
update_index(mirror, update_keys=args.keys)
|
||||||
|
|
||||||
|
|
||||||
def buildcache(parser, args):
|
def buildcache(parser, args):
|
||||||
|
|||||||
@@ -3,22 +3,21 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.tty as tty
|
||||||
from llnl.util import tty
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.cmd.common import arguments
|
from spack.package_base import deprecated_version, preferred_version
|
||||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
|
||||||
from spack.util.editor import editor
|
from spack.util.editor import editor
|
||||||
from spack.util.format import get_version_lines
|
|
||||||
from spack.util.naming import valid_fully_qualified_module_name
|
from spack.util.naming import valid_fully_qualified_module_name
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -34,39 +33,36 @@ def setup_parser(subparser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="don't clean up staging area when command completes",
|
help="don't clean up staging area when command completes",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
sp = subparser.add_mutually_exclusive_group()
|
||||||
|
sp.add_argument(
|
||||||
"-b",
|
"-b",
|
||||||
"--batch",
|
"--batch",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="don't ask which versions to checksum",
|
help="don't ask which versions to checksum",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
sp.add_argument(
|
||||||
"-l",
|
"-l",
|
||||||
"--latest",
|
"--latest",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="checksum the latest available version",
|
help="checksum the latest available version only",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
sp.add_argument(
|
||||||
"-p",
|
"-p",
|
||||||
"--preferred",
|
"--preferred",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="checksum the known Spack preferred version",
|
help="checksum the preferred version only",
|
||||||
)
|
)
|
||||||
modes_parser = subparser.add_mutually_exclusive_group()
|
subparser.add_argument(
|
||||||
modes_parser.add_argument(
|
|
||||||
"-a",
|
"-a",
|
||||||
"--add-to-package",
|
"--add-to-package",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="add new versions to package",
|
help="add new versions to package",
|
||||||
)
|
)
|
||||||
modes_parser.add_argument(
|
arguments.add_common_arguments(subparser, ["package"])
|
||||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
|
||||||
)
|
|
||||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||||
)
|
)
|
||||||
@@ -83,174 +79,89 @@ def checksum(parser, args):
|
|||||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||||
|
|
||||||
# Get the package we're going to generate checksums for
|
# Get the package we're going to generate checksums for
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||||
|
|
||||||
# Build a list of versions to checksum
|
|
||||||
versions = [Version(v) for v in args.versions]
|
|
||||||
|
|
||||||
# Define placeholder for remote versions.
|
|
||||||
# This'll help reduce redundant work if we need to check for the existance
|
|
||||||
# of remote versions more than once.
|
|
||||||
remote_versions = None
|
|
||||||
|
|
||||||
# Add latest version if requested
|
|
||||||
if args.latest:
|
|
||||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
|
||||||
if len(remote_versions) > 0:
|
|
||||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
|
||||||
versions.append(latest_version)
|
|
||||||
|
|
||||||
# Add preferred version if requested
|
|
||||||
if args.preferred:
|
|
||||||
versions.append(preferred_version(pkg))
|
|
||||||
|
|
||||||
# Store a dict of the form version -> URL
|
|
||||||
url_dict = {}
|
url_dict = {}
|
||||||
|
if not args.versions and args.preferred:
|
||||||
|
versions = [preferred_version(pkg)]
|
||||||
|
else:
|
||||||
|
versions = [Version(v) for v in args.versions]
|
||||||
|
|
||||||
for version in versions:
|
if versions:
|
||||||
if deprecated_version(pkg, version):
|
remote_versions = None
|
||||||
tty.warn(f"Version {version} is deprecated")
|
for version in versions:
|
||||||
|
if deprecated_version(pkg, version):
|
||||||
|
tty.warn("Version {0} is deprecated".format(version))
|
||||||
|
|
||||||
url = pkg.find_valid_url_for_version(version)
|
url = pkg.find_valid_url_for_version(version)
|
||||||
if url is not None:
|
if url is not None:
|
||||||
url_dict[version] = url
|
url_dict[version] = url
|
||||||
continue
|
continue
|
||||||
# if we get here, it's because no valid url was provided by the package
|
# if we get here, it's because no valid url was provided by the package
|
||||||
# do expensive fallback to try to recover
|
# do expensive fallback to try to recover
|
||||||
if remote_versions is None:
|
if remote_versions is None:
|
||||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
remote_versions = pkg.fetch_remote_versions()
|
||||||
if version in remote_versions:
|
if version in remote_versions:
|
||||||
url_dict[version] = remote_versions[version]
|
url_dict[version] = remote_versions[version]
|
||||||
|
else:
|
||||||
if len(versions) <= 0:
|
url_dict = pkg.fetch_remote_versions()
|
||||||
if remote_versions is None:
|
|
||||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
|
||||||
url_dict = remote_versions
|
|
||||||
|
|
||||||
if not url_dict:
|
if not url_dict:
|
||||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||||
|
|
||||||
# print an empty line to create a new output section block
|
version_lines = spack.stage.get_checksums_for_versions(
|
||||||
print()
|
|
||||||
|
|
||||||
version_hashes = spack.stage.get_checksums_for_versions(
|
|
||||||
url_dict,
|
url_dict,
|
||||||
pkg.name,
|
pkg.name,
|
||||||
keep_stage=args.keep_stage,
|
keep_stage=args.keep_stage,
|
||||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||||
|
latest=args.latest,
|
||||||
fetch_options=pkg.fetch_options,
|
fetch_options=pkg.fetch_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
if args.verify:
|
|
||||||
print_checksum_status(pkg, version_hashes)
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# convert dict into package.py version statements
|
|
||||||
version_lines = get_version_lines(version_hashes, url_dict)
|
|
||||||
print()
|
print()
|
||||||
print(version_lines)
|
print(version_lines)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if args.add_to_package:
|
if args.add_to_package:
|
||||||
add_versions_to_package(pkg, version_lines)
|
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||||
|
# Make sure we also have a newline after the last version
|
||||||
|
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||||
|
versions.append("\n")
|
||||||
|
# We need to insert the versions in reversed order
|
||||||
|
versions.reverse()
|
||||||
|
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||||
|
version_line = None
|
||||||
|
|
||||||
|
with open(filename, "r") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
for i in range(len(lines)):
|
||||||
|
# Black is drunk, so this is what it looks like for now
|
||||||
|
# See https://github.com/psf/black/issues/2156 for more information
|
||||||
|
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||||
|
i
|
||||||
|
].startswith(" version("):
|
||||||
|
version_line = i
|
||||||
|
break
|
||||||
|
|
||||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
if version_line is not None:
|
||||||
"""
|
for v in versions:
|
||||||
Verify checksums present in version_hashes against those present
|
lines.insert(version_line, v)
|
||||||
in the package's instructions.
|
|
||||||
|
|
||||||
Args:
|
with open(filename, "w") as f:
|
||||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
f.writelines(lines)
|
||||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
|
||||||
|
|
||||||
"""
|
msg = "opening editor to verify"
|
||||||
results = []
|
|
||||||
num_verified = 0
|
|
||||||
failed = False
|
|
||||||
|
|
||||||
max_len = max(len(str(v)) for v in version_hashes)
|
if not sys.stdout.isatty():
|
||||||
num_total = len(version_hashes)
|
msg = "please verify"
|
||||||
|
|
||||||
for version, sha in version_hashes.items():
|
tty.info(
|
||||||
if version not in pkg.versions:
|
"Added {0} new versions to {1}, "
|
||||||
msg = "No previous checksum"
|
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||||
status = "-"
|
)
|
||||||
|
|
||||||
elif sha == pkg.versions[version]["sha256"]:
|
|
||||||
msg = "Correct"
|
|
||||||
status = "="
|
|
||||||
num_verified += 1
|
|
||||||
|
|
||||||
|
if sys.stdout.isatty():
|
||||||
|
editor(filename)
|
||||||
else:
|
else:
|
||||||
msg = sha
|
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||||
status = "x"
|
|
||||||
failed = True
|
|
||||||
|
|
||||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
|
||||||
|
|
||||||
# Display table of checksum results.
|
|
||||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
|
||||||
|
|
||||||
# Terminate at the end of function to prevent additional output.
|
|
||||||
if failed:
|
|
||||||
print()
|
|
||||||
tty.die("Invalid checksums found.")
|
|
||||||
|
|
||||||
|
|
||||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
|
||||||
"""
|
|
||||||
Add checksumed versions to a package's instructions and open a user's
|
|
||||||
editor so they may double check the work of the function.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
|
||||||
version_lines (str): A string of rendered version lines.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Get filename and path for package
|
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
|
||||||
num_versions_added = 0
|
|
||||||
|
|
||||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
|
||||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
|
||||||
|
|
||||||
# Split rendered version lines into tuple of (version, version_line)
|
|
||||||
# We reverse sort here to make sure the versions match the version_lines
|
|
||||||
new_versions = []
|
|
||||||
for ver_line in version_lines.split("\n"):
|
|
||||||
match = version_re.match(ver_line)
|
|
||||||
if match:
|
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
|
||||||
|
|
||||||
with open(filename, "r+") as f:
|
|
||||||
contents = f.read()
|
|
||||||
split_contents = version_statement_re.split(contents)
|
|
||||||
|
|
||||||
for i, subsection in enumerate(split_contents):
|
|
||||||
# If there are no more versions to add we should exit
|
|
||||||
if len(new_versions) <= 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Check if the section contains a version
|
|
||||||
contents_version = version_re.match(subsection)
|
|
||||||
if contents_version is not None:
|
|
||||||
parsed_version = Version(contents_version.group(1))
|
|
||||||
|
|
||||||
if parsed_version < new_versions[0][0]:
|
|
||||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
|
||||||
num_versions_added += 1
|
|
||||||
|
|
||||||
elif parsed_version == new_versions[0][0]:
|
|
||||||
new_versions.pop(0)
|
|
||||||
|
|
||||||
# Seek back to the start of the file so we can rewrite the file contents.
|
|
||||||
f.seek(0)
|
|
||||||
f.writelines("".join(split_contents))
|
|
||||||
|
|
||||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
|
||||||
tty.msg(f"Open {filename} to review the additions.")
|
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
|
||||||
editor(filename)
|
|
||||||
|
|||||||
@@ -18,8 +18,6 @@
|
|||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirror
|
import spack.mirror
|
||||||
import spack.util.gpg as gpg_util
|
|
||||||
import spack.util.timer as timer
|
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
|
|
||||||
@@ -49,36 +47,40 @@ def setup_parser(subparser):
|
|||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--output-file",
|
"--output-file",
|
||||||
default=None,
|
default=None,
|
||||||
help="pathname for the generated gitlab ci yaml file\n\n"
|
help="""pathname for the generated gitlab ci yaml file
|
||||||
"path to the file where generated jobs file should be written. "
|
Path to the file where generated jobs file should
|
||||||
"default is .gitlab-ci.yml in the root of the repository",
|
be written. Default is .gitlab-ci.yml in the root of
|
||||||
|
the repository.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--copy-to",
|
"--copy-to",
|
||||||
default=None,
|
default=None,
|
||||||
help="path to additional directory for job files\n\n"
|
help="""path to additional directory for job files
|
||||||
"this option provides an absolute path to a directory where the generated "
|
This option provides an absolute path to a directory
|
||||||
"jobs yaml file should be copied. default is not to copy",
|
where the generated jobs yaml file should be copied.
|
||||||
|
Default is not to copy.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
help="""(Experimental) optimize the gitlab yaml file for size
|
||||||
"run the generated document through a series of optimization passes "
|
Run the generated document through a series of
|
||||||
"designed to reduce the size of the generated file",
|
optimization passes designed to reduce the size
|
||||||
|
of the generated file.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--dependencies",
|
"--dependencies",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--buildcache-destination",
|
"--buildcache-destination",
|
||||||
default=None,
|
default=None,
|
||||||
help="override the mirror configured in the environment\n\n"
|
help="Override the mirror configured in the environment (spack.yaml) "
|
||||||
"allows for pushing binaries from the generated pipeline to a different location",
|
+ "in order to push binaries from the generated pipeline to a "
|
||||||
|
+ "different location.",
|
||||||
)
|
)
|
||||||
prune_group = generate.add_mutually_exclusive_group()
|
prune_group = generate.add_mutually_exclusive_group()
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
@@ -86,37 +88,45 @@ def setup_parser(subparser):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
default=True,
|
default=True,
|
||||||
help="skip up-to-date specs\n\n"
|
help="""skip up-to-date specs
|
||||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
Do not generate jobs for specs that are up-to-date
|
||||||
|
on the mirror.""",
|
||||||
)
|
)
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
"--no-prune-dag",
|
"--no-prune-dag",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
default=True,
|
default=True,
|
||||||
help="process up-to-date specs\n\n"
|
help="""process up-to-date specs
|
||||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
Generate jobs for specs even when they are up-to-date
|
||||||
|
on the mirror.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--check-index-only",
|
"--check-index-only",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="index_only",
|
dest="index_only",
|
||||||
default=False,
|
default=False,
|
||||||
help="only check spec state from buildcache indices\n\n"
|
help="""only check spec state from buildcache indices
|
||||||
"Spack always checks specs against configured binary mirrors, regardless of the DAG "
|
Spack always checks specs against configured binary
|
||||||
"pruning option. if enabled, Spack will assume all remote buildcache indices are "
|
mirrors, regardless of the DAG pruning option.
|
||||||
"up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
|
If enabled, Spack will assume all remote buildcache
|
||||||
"this has the benefit of reducing pipeline generation time but at the potential cost of "
|
indices are up-to-date when assessing whether the spec
|
||||||
"needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
|
on the mirror, if present, is up-to-date. This has the
|
||||||
"fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
|
benefit of reducing pipeline generation time but at the
|
||||||
|
potential cost of needlessly rebuilding specs when the
|
||||||
|
indices are outdated.
|
||||||
|
If not enabled, Spack will fetch remote spec files
|
||||||
|
directly to assess whether the spec on the mirror is
|
||||||
|
up-to-date.""",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--artifacts-root",
|
"--artifacts-root",
|
||||||
default=None,
|
default=None,
|
||||||
help="path to the root of the artifacts directory\n\n"
|
help="""path to the root of the artifacts directory
|
||||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
If provided, concrete environment files (spack.yaml,
|
||||||
"this directory. their location will be passed to generated child jobs through the "
|
spack.lock) will be generated under this directory.
|
||||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
Their location will be passed to generated child jobs
|
||||||
|
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||||
)
|
)
|
||||||
generate.set_defaults(func=ci_generate)
|
generate.set_defaults(func=ci_generate)
|
||||||
|
|
||||||
@@ -140,13 +150,13 @@ def setup_parser(subparser):
|
|||||||
"--tests",
|
"--tests",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="run stand-alone tests after the build",
|
help="""run stand-alone tests after the build""",
|
||||||
)
|
)
|
||||||
rebuild.add_argument(
|
rebuild.add_argument(
|
||||||
"--fail-fast",
|
"--fail-fast",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="stop stand-alone tests after the first failure",
|
help="""stop stand-alone tests after the first failure""",
|
||||||
)
|
)
|
||||||
rebuild.set_defaults(func=ci_rebuild)
|
rebuild.set_defaults(func=ci_rebuild)
|
||||||
|
|
||||||
@@ -156,39 +166,23 @@ def setup_parser(subparser):
|
|||||||
description=deindent(ci_reproduce.__doc__),
|
description=deindent(ci_reproduce.__doc__),
|
||||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||||
)
|
)
|
||||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||||
reproduce.add_argument(
|
|
||||||
"--runtime",
|
|
||||||
help="Container runtime to use.",
|
|
||||||
default="docker",
|
|
||||||
choices=["docker", "podman"],
|
|
||||||
)
|
|
||||||
reproduce.add_argument(
|
reproduce.add_argument(
|
||||||
"--working-dir",
|
"--working-dir",
|
||||||
help="where to unpack artifacts",
|
help="Where to unpack artifacts",
|
||||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||||
)
|
)
|
||||||
reproduce.add_argument(
|
|
||||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
|
||||||
)
|
|
||||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
|
||||||
gpg_group.add_argument(
|
|
||||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
|
||||||
)
|
|
||||||
gpg_group.add_argument(
|
|
||||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
|
||||||
)
|
|
||||||
|
|
||||||
reproduce.set_defaults(func=ci_reproduce)
|
reproduce.set_defaults(func=ci_reproduce)
|
||||||
|
|
||||||
|
|
||||||
def ci_generate(args):
|
def ci_generate(args):
|
||||||
"""generate jobs file from a CI-aware spack file
|
"""Generate jobs file from a CI-aware spack file.
|
||||||
|
|
||||||
if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
|
If you want to report the results on CDash, you will need to set
|
||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||||
a build group and register all generated jobs under it
|
value must be the CDash authorization token needed to create a
|
||||||
"""
|
build group and register all generated jobs under it."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
@@ -229,13 +223,12 @@ def ci_generate(args):
|
|||||||
|
|
||||||
|
|
||||||
def ci_reindex(args):
|
def ci_reindex(args):
|
||||||
"""rebuild the buildcache index for the remote mirror
|
"""Rebuild the buildcache index for the remote mirror.
|
||||||
|
|
||||||
use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
|
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||||
mirror
|
index for the associated mirror."""
|
||||||
"""
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
yaml_root = ev.config_dict(env.manifest)
|
||||||
|
|
||||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||||
@@ -249,13 +242,10 @@ def ci_reindex(args):
|
|||||||
|
|
||||||
|
|
||||||
def ci_rebuild(args):
|
def ci_rebuild(args):
|
||||||
"""rebuild a spec if it is not on the remote mirror
|
"""Rebuild a spec if it is not on the remote mirror.
|
||||||
|
|
||||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
|
||||||
not contain the hash
|
|
||||||
"""
|
|
||||||
rebuild_timer = timer.Timer()
|
|
||||||
|
|
||||||
|
Check a single spec against the remote mirror, and rebuild it from
|
||||||
|
source if the mirror does not contain the hash."""
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||||
|
|
||||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||||
@@ -284,23 +274,13 @@ def ci_rebuild(args):
|
|||||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||||
|
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
|
||||||
|
|
||||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
|
||||||
if signing_key:
|
|
||||||
spack_ci.import_signing_key(signing_key)
|
|
||||||
|
|
||||||
# Fail early if signing is required but we don't have a signing key
|
|
||||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
|
||||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
|
||||||
gpg_util.list(False, True)
|
|
||||||
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
|
||||||
|
|
||||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||||
@@ -315,6 +295,7 @@ def ci_rebuild(args):
|
|||||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||||
|
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||||
|
|
||||||
# Query the environment manifest to find out whether we're reporting to a
|
# Query the environment manifest to find out whether we're reporting to a
|
||||||
# CDash instance, and if so, gather some information from the manifest to
|
# CDash instance, and if so, gather some information from the manifest to
|
||||||
@@ -425,6 +406,19 @@ def ci_rebuild(args):
|
|||||||
dst_file = os.path.join(repro_dir, file_name)
|
dst_file = os.path.join(repro_dir, file_name)
|
||||||
shutil.copyfile(src_file, dst_file)
|
shutil.copyfile(src_file, dst_file)
|
||||||
|
|
||||||
|
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||||
|
# import it.
|
||||||
|
if signing_key:
|
||||||
|
spack_ci.import_signing_key(signing_key)
|
||||||
|
|
||||||
|
# Depending on the specifics of this job, we might need to turn on the
|
||||||
|
# "config:install_missing compilers" option (to build this job spec
|
||||||
|
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
||||||
|
# (to build a bootstrap compiler or one of its deps in a
|
||||||
|
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
||||||
|
# using a compiler already installed on the target system).
|
||||||
|
spack_ci.configure_compilers(compiler_action)
|
||||||
|
|
||||||
# Write this job's spec json into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||||
@@ -622,7 +616,7 @@ def ci_rebuild(args):
|
|||||||
)
|
)
|
||||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||||
if args.tests and broken_tests:
|
if args.tests and broken_tests:
|
||||||
tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
|
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
msg = "Package is listed in ci's broken-tests-packages"
|
msg = "Package is listed in ci's broken-tests-packages"
|
||||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||||
@@ -665,7 +659,7 @@ def ci_rebuild(args):
|
|||||||
tty.warn("No recognized test results reporting option")
|
tty.warn("No recognized test results reporting option")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
|
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
msg = "Failed to install the package"
|
msg = "Failed to install the package"
|
||||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||||
@@ -681,7 +675,7 @@ def ci_rebuild(args):
|
|||||||
input_spec=job_spec,
|
input_spec=job_spec,
|
||||||
buildcache_mirror_url=buildcache_mirror_url,
|
buildcache_mirror_url=buildcache_mirror_url,
|
||||||
pipeline_mirror_url=pipeline_mirror_url,
|
pipeline_mirror_url=pipeline_mirror_url,
|
||||||
sign_binaries=spack_ci.can_sign_binaries(),
|
pr_pipeline=spack_is_pr_pipeline,
|
||||||
):
|
):
|
||||||
msg = tty.msg if result.success else tty.warn
|
msg = tty.msg if result.success else tty.warn
|
||||||
msg(
|
msg(
|
||||||
@@ -725,7 +719,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
\033[34mTo reproduce this build locally, run:
|
\033[34mTo reproduce this build locally, run:
|
||||||
|
|
||||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||||
|
|
||||||
If this project does not have public pipelines, you will need to first:
|
If this project does not have public pipelines, you will need to first:
|
||||||
|
|
||||||
@@ -739,38 +733,19 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
print(reproduce_msg)
|
print(reproduce_msg)
|
||||||
|
|
||||||
rebuild_timer.stop()
|
|
||||||
try:
|
|
||||||
with open("install_timers.json", "w") as timelog:
|
|
||||||
extra_attributes = {"name": ".ci-rebuild"}
|
|
||||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
|
||||||
except Exception as e:
|
|
||||||
tty.debug(str(e))
|
|
||||||
|
|
||||||
# Tie job success/failure to the success/failure of building the spec
|
# Tie job success/failure to the success/failure of building the spec
|
||||||
return install_exit_code
|
return install_exit_code
|
||||||
|
|
||||||
|
|
||||||
def ci_reproduce(args):
|
def ci_reproduce(args):
|
||||||
"""generate instructions for reproducing the spec rebuild job
|
"""Generate instructions for reproducing the spec rebuild job.
|
||||||
|
|
||||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||||
instructions for reproducing the build locally
|
used to derive instructions for reproducing the build locally."""
|
||||||
"""
|
|
||||||
job_url = args.job_url
|
job_url = args.job_url
|
||||||
work_dir = args.working_dir
|
work_dir = args.working_dir
|
||||||
autostart = args.autostart
|
|
||||||
runtime = args.runtime
|
|
||||||
|
|
||||||
# Allow passing GPG key for reprocuding protected CI jobs
|
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||||
if args.gpg_file:
|
|
||||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
|
||||||
elif args.gpg_url:
|
|
||||||
gpg_key_url = args.gpg_url
|
|
||||||
else:
|
|
||||||
gpg_key_url = None
|
|
||||||
|
|
||||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
|
||||||
|
|
||||||
|
|
||||||
def ci(parser, args):
|
def ci(parser, args):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user