Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
e8f03fa9dd modules/common.py: format_path instead of format 2024-10-21 09:44:12 +02:00
1827 changed files with 19803 additions and 28774 deletions

View File

@@ -28,8 +28,8 @@ jobs:
run: run:
shell: ${{ matrix.system.shell }} shell: ${{ matrix.system.shell }}
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: ${{inputs.python_version}} python-version: ${{inputs.python_version}}
- name: Install Python packages - name: Install Python packages

View File

@@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
set -e set -e
source share/spack/setup-env.sh source share/spack/setup-env.sh
$PYTHON bin/spack bootstrap disable github-actions-v0.5 $PYTHON bin/spack bootstrap disable github-actions-v0.4
$PYTHON bin/spack bootstrap disable spack-install $PYTHON bin/spack bootstrap disable spack-install
$PYTHON bin/spack $SPACK_FLAGS solve zlib $PYTHON bin/spack $SPACK_FLAGS solve zlib
tree $BOOTSTRAP/store tree $BOOTSTRAP/store

View File

@@ -37,14 +37,14 @@ jobs:
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison cmake bison
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.6
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison spack external find cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@@ -60,17 +60,17 @@ jobs:
run: | run: |
brew install cmake bison tree brew install cmake bison tree
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: "3.12" python-version: "3.12"
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.6
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison spack external find --not-buildable cmake bison
spack -d solve zlib spack -d solve zlib
tree $HOME/.spack/bootstrap/store/ tree $HOME/.spack/bootstrap/store/
@@ -83,22 +83,22 @@ jobs:
steps: steps:
- name: Setup macOS - name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }} if: ${{ matrix.runner != 'ubuntu-latest' }}
run: brew install tree gawk
- name: Remove system executables
run: | run: |
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do brew install tree gawk
sudo rm $(command -v gpg gpg2 patchelf) sudo rm -rf $(command -v gpg gpg2)
done - name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack solve zlib spack solve zlib
spack bootstrap disable github-actions-v0.6
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
@@ -110,17 +110,19 @@ jobs:
steps: steps:
- name: Setup macOS - name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }} if: ${{ matrix.runner != 'ubuntu-latest' }}
run: brew install tree
- name: Remove system executables
run: | run: |
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do brew install tree
sudo rm $(command -v gpg gpg2 patchelf) # Remove GnuPG since we want to bootstrap it
done sudo rm -rf /usr/local/bin/gpg
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: | python-version: |
3.8 3.8
@@ -128,16 +130,15 @@ jobs:
3.10 3.10
3.11 3.11
3.12 3.12
3.13
- name: Set bootstrap sources - name: Set bootstrap sources
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install spack bootstrap disable spack-install
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
set -e set -e
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
not_found=1 not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)" ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
if [[ -d "$ver_dir" ]] ; then if [[ -d "$ver_dir" ]] ; then
@@ -171,10 +172,10 @@ jobs:
runs-on: "windows-latest" runs-on: "windows-latest"
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: "3.12" python-version: "3.12"
- name: Setup Windows - name: Setup Windows
@@ -184,8 +185,8 @@ jobs:
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
./share/spack/setup-env.ps1 ./share/spack/setup-env.ps1
spack bootstrap disable github-actions-v0.6
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison spack external find --not-buildable cmake bison
spack -d solve zlib spack -d solve zlib
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1

View File

@@ -55,15 +55,9 @@ jobs:
if: github.repository == 'spack/spack' if: github.repository == 'spack/spack'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- name: Determine latest release tag - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: latest
run: |
git fetch --quiet --tags
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
id: docker_meta id: docker_meta
with: with:
images: | images: |
@@ -77,7 +71,6 @@ jobs:
type=semver,pattern={{major}} type=semver,pattern={{major}}
type=ref,event=branch type=ref,event=branch
type=ref,event=pr type=ref,event=pr
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
- name: Generate the Dockerfile - name: Generate the Dockerfile
env: env:
@@ -120,7 +113,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }} - name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
with: with:
context: dockerfiles/${{ matrix.dockerfile[0] }} context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }} platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -24,7 +24,7 @@ jobs:
core: ${{ steps.filter.outputs.core }} core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }} packages: ${{ steps.filter.outputs.packages }}
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -83,17 +83,10 @@ jobs:
all-prechecks: all-prechecks:
needs: [ prechecks ] needs: [ prechecks ]
if: ${{ always() }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Success - name: Success
run: | run: "true"
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
echo "Unit tests failed."
exit 1
else
exit 0
fi
coverage: coverage:
needs: [ unit-tests, prechecks ] needs: [ unit-tests, prechecks ]
@@ -101,19 +94,8 @@ jobs:
secrets: inherit secrets: inherit
all: all:
needs: [ unit-tests, coverage, bootstrap ] needs: [ coverage, bootstrap ]
if: ${{ always() }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
steps: steps:
- name: Status summary - name: Success
run: | run: "true"
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
echo "Unit tests failed."
exit 1
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
echo "Bootstrap tests failed."
exit 1
else
exit 0
fi

View File

@@ -8,8 +8,8 @@ jobs:
upload: upload:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
@@ -29,7 +29,6 @@ jobs:
- run: coverage xml - run: coverage xml
- name: "Upload coverage report to CodeCov" - name: "Upload coverage report to CodeCov"
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766 uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
with: with:
verbose: true verbose: true
fail_ci_if_error: true

View File

@@ -14,10 +14,10 @@ jobs:
build-paraview-deps: build-paraview-deps:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages

View File

@@ -1,7 +1,7 @@
black==24.10.0 black==24.8.0
clingo==5.7.1 clingo==5.7.1
flake8==7.1.1 flake8==7.1.1
isort==5.13.2 isort==5.13.2
mypy==1.8.0 mypy==1.8.0
types-six==1.16.21.20241105 types-six==1.16.21.20240513
vermin==1.6.0 vermin==1.6.0

View File

@@ -40,10 +40,10 @@ jobs:
on_develop: false on_develop: false
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install System packages - name: Install System packages
@@ -55,7 +55,7 @@ jobs:
cmake bison libbison-dev kcov cmake bison libbison-dev kcov
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov clingo pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black" pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
- name: Setup git configuration - name: Setup git configuration
run: | run: |
@@ -89,10 +89,10 @@ jobs:
shell: shell:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: '3.11' python-version: '3.11'
- name: Install System packages - name: Install System packages
@@ -130,7 +130,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
@@ -149,10 +149,10 @@ jobs:
clingo-cffi: clingo-cffi:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: '3.13' python-version: '3.13'
- name: Install System packages - name: Install System packages
@@ -170,10 +170,11 @@ jobs:
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
spack bootstrap disable spack-install spack bootstrap disable spack-install
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.6
spack bootstrap status spack bootstrap status
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py spack solve zlib
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with: with:
name: coverage-clingo-cffi name: coverage-clingo-cffi
@@ -187,10 +188,10 @@ jobs:
os: [macos-13, macos-14] os: [macos-13, macos-14]
python-version: ["3.11"] python-version: ["3.11"]
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install Python packages - name: Install Python packages
@@ -210,7 +211,7 @@ jobs:
. share/spack/setup-env.sh . share/spack/setup-env.sh
$(which spack) bootstrap disable spack-install $(which spack) bootstrap disable spack-install
$(which spack) solve zlib $(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python') common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}" $(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with: with:
@@ -225,10 +226,10 @@ jobs:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0} powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
@@ -241,7 +242,7 @@ jobs:
env: env:
COVERAGE_FILE: coverage/.coverage-windows COVERAGE_FILE: coverage/.coverage-windows
run: | run: |
spack unit-test --verbose --cov --cov-config=pyproject.toml spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with: with:

View File

@@ -18,8 +18,8 @@ jobs:
validate: validate:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
@@ -35,10 +35,10 @@ jobs:
style: style:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
@@ -70,7 +70,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
@@ -98,14 +98,14 @@ jobs:
# PR: use the base of the PR as the old commit # PR: use the base of the PR as the old commit
- name: Checkout PR base commit - name: Checkout PR base commit
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
ref: ${{ github.event.pull_request.base.sha }} ref: ${{ github.event.pull_request.base.sha }}
path: old path: old
# not a PR: use the previous commit as the old commit # not a PR: use the previous commit as the old commit
- name: Checkout previous commit - name: Checkout previous commit
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
fetch-depth: 2 fetch-depth: 2
path: old path: old
@@ -114,14 +114,14 @@ jobs:
run: git -C old reset --hard HEAD^ run: git -C old reset --hard HEAD^
- name: Checkout new commit - name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
path: new path: new
- name: Install circular import checker - name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with: with:
repository: haampie/circular-import-fighter repository: haampie/circular-import-fighter
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427 ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
path: circular-import-fighter path: circular-import-fighter
- name: Install dependencies - name: Install dependencies
working-directory: circular-import-fighter working-directory: circular-import-fighter

View File

@@ -14,26 +14,3 @@ sphinx:
python: python:
install: install:
- requirements: lib/spack/docs/requirements.txt - requirements: lib/spack/docs/requirements.txt
search:
ranking:
spack.html: -10
spack.*.html: -10
llnl.html: -10
llnl.*.html: -10
_modules/*: -10
command_index.html: -9
basic_usage.html: 5
configuration.html: 5
config_yaml.html: 5
packages_yaml.html: 5
build_settings.html: 5
environments.html: 5
containers.html: 5
mirrors.html: 5
module_file_support.html: 5
repositories.html: 5
binary_caches.html: 5
chain.html: 5
pipelines.html: 5
packaging_guide.html: 5

View File

@@ -70,7 +70,7 @@ Tutorial
---------------- ----------------
We maintain a We maintain a
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/). [**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
It covers basic to advanced usage, packaging, developer features, and large HPC It covers basic to advanced usage, packaging, developer features, and large HPC
deployments. You can do all of the exercises on your own laptop using a deployments. You can do all of the exercises on your own laptop using a
Docker container. Docker container.

View File

@@ -1,11 +1,71 @@
@ECHO OFF @ECHO OFF
setlocal EnableDelayedExpansion
:: (c) 2021 Lawrence Livermore National Laboratory :: (c) 2021 Lawrence Livermore National Laboratory
:: To use this file independently of Spack's installer, execute this script in its directory, or add the :: To use this file independently of Spack's installer, execute this script in its directory, or add the
:: associated bin directory to your PATH. Invoke to launch Spack Shell. :: associated bin directory to your PATH. Invoke to launch Spack Shell.
:: ::
:: source_dir/spack/bin/spack_cmd.bat :: source_dir/spack/bin/spack_cmd.bat
:: ::
pushd %~dp0..
set SPACK_ROOT=%CD%
pushd %CD%\..
set spackinstdir=%CD%
popd
call "%~dp0..\share\spack\setup-env.bat"
pushd %SPACK_ROOT% :: Check if Python is on the PATH
%comspec% /K if not defined python_pf_ver (
(for /f "delims=" %%F in ('where python.exe') do (
set "python_pf_ver=%%F"
goto :found_python
) ) 2> NUL
)
:found_python
if not defined python_pf_ver (
:: If not, look for Python from the Spack installer
:get_builtin
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
set "python_ver=%%g")) 2> NUL
if not defined python_ver (
echo Python was not found on your system.
echo Please install Python or add Python to your PATH.
) else (
set "py_path=!spackinstdir!\!python_ver!"
set "py_exe=!py_path!\python.exe"
)
goto :exitpoint
) else (
:: Python is already on the path
set "py_exe=!python_pf_ver!"
(for /F "tokens=* USEBACKQ" %%F in (
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
goto :exitpoint
)
:exitpoint
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
if defined py_path (
set "PATH=%py_path%;%PATH%"
)
if defined py_exe (
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
)
set "EDITOR=notepad"
DOSKEY spacktivate=spack env activate $*
@echo **********************************************************************
@echo ** Spack Package Manager
@echo **********************************************************************
IF "%1"=="" GOTO CONTINUE
set
GOTO:EOF
:continue
set PROMPT=[spack] %PROMPT%
%comspec% /k

View File

@@ -9,15 +9,15 @@ bootstrap:
# may not be able to bootstrap all the software that Spack needs, # may not be able to bootstrap all the software that Spack needs,
# depending on its type. # depending on its type.
sources: sources:
- name: github-actions-v0.6 - name: 'github-actions-v0.5'
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
- name: github-actions-v0.5
metadata: $spack/share/spack/bootstrap/github-actions-v0.5 metadata: $spack/share/spack/bootstrap/github-actions-v0.5
- name: spack-install - name: 'github-actions-v0.4'
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
- name: 'spack-install'
metadata: $spack/share/spack/bootstrap/spack-install metadata: $spack/share/spack/bootstrap/spack-install
trusted: trusted:
# By default we trust bootstrapping from sources and from binaries # By default we trust bootstrapping from sources and from binaries
# produced on Github via the workflow # produced on Github via the workflow
github-actions-v0.6: true
github-actions-v0.5: true github-actions-v0.5: true
github-actions-v0.4: true
spack-install: true spack-install: true

View File

@@ -39,27 +39,11 @@ concretizer:
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG. # Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
duplicates: duplicates:
# "none": allows a single node for any package in the DAG. # "none": allows a single node for any package in the DAG.
# "minimal": allows the duplication of 'build-tools' nodes only # "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal strategy: minimal
# Option to specify compatibility between operating systems for reuse of compilers and packages # Option to specify compatiblity between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's # Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's # it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]} # requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
os_compatible: {} os_compatible: {}
# Option to specify whether to support splicing. Splicing allows for
# the relinking of concrete package dependencies in order to better
# reuse already built packages with ABI compatible dependencies
splice:
explicit: []
automatic: false
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
timeout: 0
# If set to true, exceeding the timeout will always result in a concretization error. If false,
# the best (suboptimal) model computed before the timeout is used.
#
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true

View File

@@ -19,7 +19,7 @@ config:
install_tree: install_tree:
root: $spack/opt/spack root: $spack/opt/spack
projections: projections:
all: "{architecture.platform}/{architecture.target}/{name}-{version}-{hash}" all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# install_tree can include an optional padded length (int or boolean) # install_tree can include an optional padded length (int or boolean)
# default is False (do not pad) # default is False (do not pad)
# if padded_length is True, Spack will pad as close to the system max path # if padded_length is True, Spack will pad as close to the system max path

View File

@@ -15,11 +15,12 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler:
- apple-clang
- clang
- gcc
providers: providers:
c: [apple-clang, llvm, gcc]
cxx: [apple-clang, llvm, gcc]
elf: [libelf] elf: [libelf]
fortran: [gcc]
fuse: [macfuse] fuse: [macfuse]
gl: [apple-gl] gl: [apple-gl]
glu: [apple-glu] glu: [apple-glu]

View File

@@ -15,18 +15,19 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
providers: providers:
awk: [gawk] awk: [gawk]
armci: [armcimpi] armci: [armcimpi]
blas: [openblas, amdblis] blas: [openblas, amdblis]
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc] c: [gcc]
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc] cxx: [gcc]
D: [ldc] D: [ldc]
daal: [intel-oneapi-daal] daal: [intel-oneapi-daal]
elf: [elfutils] elf: [elfutils]
fftw-api: [fftw, amdfftw] fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame] flame: [libflame, amdlibflame]
fortran: [gcc, llvm] fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime] fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse] fuse: [libfuse]
gl: [glx, osmesa] gl: [glx, osmesa]
@@ -39,9 +40,9 @@ packages:
jpeg: [libjpeg-turbo, libjpeg] jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame] lapack: [openblas, amdlibflame]
libc: [glibc, musl] libc: [glibc, musl]
libgfortran: [gcc-runtime] libgfortran: [ gcc-runtime ]
libglx: [mesa+glx] libglx: [mesa+glx]
libifcore: [intel-oneapi-runtime] libifcore: [ intel-oneapi-runtime ]
libllvm: [llvm] libllvm: [llvm]
lua-lang: [lua, lua-luajit-openresty, lua-luajit] lua-lang: [lua, lua-luajit-openresty, lua-luajit]
luajit: [lua-luajit-openresty, lua-luajit] luajit: [lua-luajit-openresty, lua-luajit]
@@ -75,8 +76,6 @@ packages:
buildable: false buildable: false
cray-mvapich2: cray-mvapich2:
buildable: false buildable: false
egl:
buildable: false
fujitsu-mpi: fujitsu-mpi:
buildable: false buildable: false
hpcx-mpi: hpcx-mpi:

View File

@@ -15,8 +15,8 @@
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
packages: packages:
all: all:
compiler:
- msvc
providers: providers:
c : [msvc]
cxx: [msvc]
mpi: [msmpi] mpi: [msmpi]
gl: [wgl] gl: [wgl]

View File

@@ -1359,10 +1359,6 @@ For example, for the ``stackstart`` variant:
mpileaks stackstart==4 # variant will be propagated to dependencies mpileaks stackstart==4 # variant will be propagated to dependencies
mpileaks stackstart=4 # only mpileaks will have this variant value mpileaks stackstart=4 # only mpileaks will have this variant value
Spack also allows variants to be propagated from a package that does
not have that variant.
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Compiler Flags Compiler Flags
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^

View File

@@ -237,35 +237,3 @@ is optional -- by default, splices will be transitive.
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack ``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
will warn the user in this case, but will not fail the will warn the user in this case, but will not fail the
concretization. concretization.
.. _automatic_splicing:
^^^^^^^^^^^^^^^^^^
Automatic Splicing
^^^^^^^^^^^^^^^^^^
The Spack solver can be configured to do automatic splicing for
ABI-compatible packages. Automatic splices are enabled in the concretizer
config section
.. code-block:: yaml
concretizer:
splice:
automatic: True
Packages can include ABI-compatibility information using the
``can_splice`` directive. See :ref:`the packaging
guide<abi_compatibility>` for instructions on specifying ABI
compatibility using the ``can_splice`` directive.
.. note::
The ``can_splice`` directive is experimental and may be changed in
future versions.
When automatic splicing is enabled, the concretizer will combine any
number of ABI-compatible specs if possible to reuse installed packages
and packages available from binary caches. The end result of these
specs is equivalent to a series of transitive/intransitive splices,
but the series may be non-obvious.

View File

@@ -210,18 +210,16 @@ def setup(sphinx):
# Spack classes that are private and we don't want to expose # Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.repo._PrependFileLoader"),
("py:class", "spack.build_systems._checks.BuilderWithDefaults"), ("py:class", "spack.build_systems._checks.BaseBuilder"),
# Spack classes that intersphinx is unable to resolve # Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"), ("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"), ("py:class", "spack.spec.DependencySpec"),
("py:class", "spack.spec.ArchSpec"),
("py:class", "spack.spec.InstallStatus"), ("py:class", "spack.spec.InstallStatus"),
("py:class", "spack.spec.SpecfileReaderBase"), ("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"), ("py:class", "spack.install_test.Pb"),
("py:class", "spack.filesystem_view.SimpleFilesystemView"), ("py:class", "spack.filesystem_view.SimpleFilesystemView"),
("py:class", "spack.traverse.EdgeAndDepth"), ("py:class", "spack.traverse.EdgeAndDepth"),
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"), ("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly # TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"), ("py:class", "llnl.util.lang.T"),
] ]

View File

@@ -511,7 +511,6 @@ Spack understands over a dozen special variables. These are:
* ``$target_family``. The target family for the current host, as * ``$target_family``. The target family for the current host, as
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``. detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
* ``$date``: the current date in the format YYYY-MM-DD * ``$date``: the current date in the format YYYY-MM-DD
* ``$spack_short_version``: the Spack version truncated to the first components.
Note that, as with shell variables, you can write these as ``$varname`` Note that, as with shell variables, you can write these as ``$varname``

View File

@@ -184,7 +184,7 @@ Style Tests
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and `PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is `mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
a series of style guides for Python that provide suggestions for everything a series of style guides for Python that provide suggestions for everything
from variable naming to indentation. In order to limit the number of PRs that from variable naming to indentation. In order to limit the number of PRs that
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR were mostly style changes, we decided to enforce PEP 8 conformance. Your PR

View File

@@ -333,9 +333,13 @@ inserting them at different places in the spack code base. Whenever a hook
type triggers by way of a function call, we find all the hooks of that type, type triggers by way of a function call, we find all the hooks of that type,
and run them. and run them.
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory. Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
This module has to be registered in ``__init__.py`` so that Spack is aware of it. types of hooks in the ``__init__.py``, and then python files in that folder
This section will cover the basic kind of hooks, and how to write them. can use hook functions. The files are automatically parsed, so if you write
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
you can then write hook functions in that file that will be automatically detected,
and run whenever your hook is called. This section will cover the basic kind
of hooks, and how to write them.
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Types of Hooks Types of Hooks

View File

@@ -1042,7 +1042,7 @@ file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name, ``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given version, and compiler name to determine the path for a given
package. This view selects all packages that depend on MPI, and package. This view selects all packages that depend on MPI, and
excludes those built with the GCC compiler at version 18.5. excludes those built with the PGI compiler at version 18.5.
The root specs with their (transitive) link and run type dependencies The root specs with their (transitive) link and run type dependencies
will be put in the view due to the ``link: all`` option, will be put in the view due to the ``link: all`` option,
and the files in the view will be symlinks to the spack install and the files in the view will be symlinks to the spack install
@@ -1056,7 +1056,7 @@ directories.
mpis: mpis:
root: /path/to/view root: /path/to/view
select: [^mpi] select: [^mpi]
exclude: ['%gcc@18.5'] exclude: ['%pgi@18.5']
projections: projections:
all: '{name}/{version}-{compiler.name}' all: '{name}/{version}-{compiler.name}'
link: all link: all

View File

@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
.. code-block:: console .. code-block:: console
apt update apt update
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
.. tab-item:: RHEL .. tab-item:: RHEL
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
dnf install epel-release dnf install epel-release
dnf group install "Development Tools" dnf group install "Development Tools"
dnf install gcc-gfortran redhat-lsb-core python3 unzip dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
.. tab-item:: macOS Brew .. tab-item:: macOS Brew
.. code-block:: console .. code-block:: console
brew update brew update
brew install gcc git zip brew install curl gcc git gnupg zip
------------ ------------
Installation Installation
@@ -283,6 +283,10 @@ compilers`` or ``spack compiler list``:
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1 intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
-- clang ------------------------------------------------------- -- clang -------------------------------------------------------
clang@3.4 clang@3.3 clang@3.2 clang@3.1 clang@3.4 clang@3.3 clang@3.2 clang@3.1
-- pgi ---------------------------------------------------------
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
Any of these compilers can be used to build Spack packages. More on Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`. how this is done is in :ref:`sec-specs`.
@@ -802,6 +806,65 @@ flags to the ``icc`` command:
spec: intel@15.0.24.4.9.3 spec: intel@15.0.24.4.9.3
^^^
PGI
^^^
PGI comes with two sets of compilers for C++ and Fortran,
distinguishable by their names. "Old" compilers:
.. code-block:: yaml
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
"New" compilers:
.. code-block:: yaml
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
Older installations of PGI contains just the old compilers; whereas
newer installations contain the old and the new. The new compiler is
considered preferable, as some packages
(``hdf``) will not build with the old compiler.
When auto-detecting a PGI compiler, there are cases where Spack will
find the old compilers, when you really want it to find the new
compilers. It is best to check this ``compilers.yaml``; and if the old
compilers are being used, change ``pgf77`` and ``pgf90`` to
``pgfortran``.
Other issues:
* There are reports that some packages will not build with PGI,
including ``libpciaccess`` and ``openssl``. A workaround is to
build these packages with another compiler and then use them as
dependencies for PGI-build packages. For example:
.. code-block:: console
$ spack install openmpi%pgi ^libpciaccess%gcc
* PGI requires a license to use; see :ref:`licensed-compilers` for more
information on installation.
.. note::
It is believed the problem with HDF 4 is that everything is
compiled with the ``F77`` compiler, but at some point some Fortran
90 code slipped in there. So compilers that can handle both FORTRAN
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
work.
^^^ ^^^
NAG NAG
^^^ ^^^
@@ -1326,7 +1389,6 @@ Required:
* Microsoft Visual Studio * Microsoft Visual Studio
* Python * Python
* Git * Git
* 7z
Optional: Optional:
* Intel Fortran (needed for some packages) * Intel Fortran (needed for some packages)
@@ -1392,13 +1454,6 @@ as the project providing Git support on Windows. This is additionally the recomm
for installing Git on Windows, a link to which can be found above. Spack requires the for installing Git on Windows, a link to which can be found above. Spack requires the
utilities vendored by this project. utilities vendored by this project.
"""
7zip
"""
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
can be located at https://sourceforge.net/projects/sevenzip/.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 2: Install and setup Spack Step 2: Install and setup Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -12,6 +12,10 @@
Spack Spack
=================== ===================
.. epigraph::
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
Spack is a package management tool designed to support multiple Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms versions and configurations of software on a wide variety of platforms
and environments. It was designed for large supercomputing centers, and environments. It was designed for large supercomputing centers,

View File

@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently. large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files If paths provided are directories then all the subdirectories and associated files
will also be cloned. will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time. Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1367,8 +1367,8 @@ Submodules
git-submodule``. git-submodule``.
Sparse-Checkout Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory. ``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository. This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater. Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
@@ -1928,29 +1928,71 @@ to the empty list.
String. A URL pointing to license setup instructions for the software. String. A URL pointing to license setup instructions for the software.
Defaults to the empty string. Defaults to the empty string.
For example, let's take a look at the Arm Forge package. For example, let's take a look at the package for the PGI compilers.
.. code-block:: python .. code-block:: python
# Licensing # Licensing
license_required = True license_required = True
license_comment = "#" license_comment = "#"
license_files = ["licences/Licence"] license_files = ["license.dat"]
license_vars = [ license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
"ALLINEA_LICENSE_DIR", license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
"ALLINEA_LICENCE_DIR",
"ALLINEA_LICENSE_FILE",
"ALLINEA_LICENCE_FILE",
]
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment. As you can see, PGI requires a license. Its license manager, FlexNet, uses
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory the ``#`` symbol to denote a comment. It expects the license file to be
in the installation prefix. named ``license.dat`` and to be located directly in the installation prefix.
If you would like the installation file to be located elsewhere, simply set
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
further instructions on installation and licensing, see the URL provided.
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or Let's walk through a sample PGI installation to see exactly what Spack is
one of the other license variables after installation. For further instructions on installation and and isn't capable of. Since PGI does not provide a download URL, it must
licensing, see the URL provided. be downloaded manually. It can either be added to a mirror or located in
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
for instructions on setting up a mirror.
After running ``spack install pgi``, the first thing that will happen is
Spack will create a global license file located at
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
this:
.. code-block:: sh
# A license is required to use pgi.
#
# The recommended solution is to store your license key in this global
# license file. After installation, the following symlink(s) will be
# added to point to this file (relative to the installation prefix):
#
# license.dat
#
# Alternatively, use one of the following environment variable(s):
#
# PGROUPD_LICENSE_FILE
# LM_LICENSE_FILE
#
# If you choose to store your license in a non-standard location, you may
# set one of these variable(s) to the full pathname to the license file, or
# port@host if you store your license keys on a dedicated license server.
# You will likely want to set this variable in a module file so that it
# gets loaded every time someone tries to use pgi.
#
# For further information on how to acquire a license, please refer to:
#
# http://www.pgroup.com/doc/pgiinstall.pdf
#
# You may enter your license below.
You can add your license directly to this file, or tell FlexNet to use a
license stored on a separate license server. Here is an example that
points to a license server called licman1:
.. code-block:: none
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
USE_SERVER
If your package requires the license to install, you can reference the If your package requires the license to install, you can reference the
location of this global license using ``self.global_license_file``. location of this global license using ``self.global_license_file``.
@@ -2350,7 +2392,7 @@ by the ``--jobs`` option:
.. code-block:: python .. code-block:: python
:emphasize-lines: 7, 11 :emphasize-lines: 7, 11
:linenos: :linenos:
class Xios(Package): class Xios(Package):
... ...
def install(self, spec, prefix): def install(self, spec, prefix):
@@ -2461,14 +2503,15 @@ with. For example, suppose that in the ``libdwarf`` package you write:
depends_on("libelf@0.8") depends_on("libelf@0.8")
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version You can also specify a requirement for a particular variant or for
restrictions, you can also specify variants if this package requires specific compiler flags:
optional features of the dependency.
.. code-block:: python .. code-block:: python
depends_on("libelf@0.8 +parser +pic") depends_on("libelf@0.8+debug")
depends_on("libelf debug=True")
depends_on("libelf cppflags='-fPIC'")
Both users *and* package authors can use the same spec syntax to refer Both users *and* package authors can use the same spec syntax to refer
to different package configurations. Users use the spec syntax on the to different package configurations. Users use the spec syntax on the
@@ -2476,82 +2519,46 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe particular constraints, and package authors can use specs to describe
relationships between packages. relationships between packages.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Specifying backward and forward compatibility Version ranges
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Packages are often compatible with a range of versions of their Although some packages require a specific version for their dependencies,
dependencies. This is typically referred to as backward and forward most can be built with a range of versions. For example, if you are
compatibility. Spack allows you to specify this in the ``depends_on`` writing a package for a legacy Python module that only works with Python
directive using version ranges. 2.4 through 2.6, this would look like:
**Backwards compatibility** means that the package requires at least a
certain version of its dependency:
.. code-block:: python .. code-block:: python
depends_on("python@3.10:") depends_on("python@2.4:2.6")
In this case, the package requires Python 3.10 or newer. Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
Commonly, packages drop support for older versions of a dependency as you want to specify that a package works with any version of Python 3 (or
they release new versions. In Spack you can conveniently add every higher), this would look like:
backward compatibility rule as a separate line:
.. code-block:: python .. code-block:: python
# backward compatibility with Python depends_on("python@3:")
depends_on("python@3.8:")
depends_on("python@3.9:", when="@1.2:")
depends_on("python@3.10:", when="@1.4:")
This means that in general we need Python 3.8 or newer; from version Here we leave out the upper bound. If you want to say that a package
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we requires Python 2, you can similarly leave out the lower bound:
need Python 3.10 or newer. Notice that it's fine to have overlapping
ranges in the ``when`` clauses.
**Forward compatibility** means that the package requires at most a
certain version of its dependency. Forward compatibility rules are
necessary when there are breaking changes in the dependency that the
package cannot handle. In Spack we often add forward compatibility
bounds only at the time a new, breaking version of a dependency is
released. As with backward compatibility, it is typical to see a list
of forward compatibility bounds in a package file as seperate lines:
.. code-block:: python .. code-block:: python
# forward compatibility with Python depends_on("python@:2")
depends_on("python@:3.12", when="@:1.10")
depends_on("python@:3.13", when="@:1.12")
Notice how the ``:`` now appears before the version number both in the Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
dependency and in the ``when`` clause. This tells Spack that in general ``@:3`` means "up to and including any 3.x version".
we need Python 3.13 or older up to version ``1.12.x``, and up to version
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
with Python 3.13 was added in version 1.11, while version 1.13 added forward
compatibility with Python 3.14.
Notice that a version range ``@:3.12`` includes *any* patch version You can also simply write
number ``3.12.x``, which is often useful when specifying forward compatibility
bounds.
So far we have seen open-ended version ranges, which is by far the most
common use case. It is also possible to specify both a lower and an upper bound
on the version of a dependency, like this:
.. code-block:: python .. code-block:: python
depends_on("python@3.10:3.12") depends_on("python@2.7")
There is short syntax to specify that a package is compatible with say any to tell Spack that the package needs Python 2.7.x. This is equivalent to
``3.x`` version: ``@2.7:2.7``.
.. code-block:: python
depends_on("python@3")
The above is equivalent to ``depends_on("python@3:3")``, which means at least
Python version 3 and at most any version ``3.x.y``.
In very rare cases, you may need to specify an exact version, for example In very rare cases, you may need to specify an exact version, for example
if you need to distinguish between ``3.2`` and ``3.2.1``: if you need to distinguish between ``3.2`` and ``3.2.1``:
@@ -2925,9 +2932,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
it makes sense to let a dependency set the environment variables for its dependents. To allow all it makes sense to let a dependency set the environment variables for its dependents. To allow all
this, Spack provides four different methods that can be overridden in a package: this, Spack provides four different methods that can be overridden in a package:
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>` 1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>` 2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>` 3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>` 4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
The Qt package, for instance, uses this call: The Qt package, for instance, uses this call:
@@ -5378,7 +5385,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
determine whether it needs to also set up build dependencies (see determine whether it needs to also set up build dependencies (see
:ref:`test-build-tests`). :ref:`test-build-tests`).
The ``MyPackage`` package below provides two basic test examples: The ``MyPackage`` package below provides two basic test examples:
``test_example`` and ``test_example2``. The first runs the installed ``test_example`` and ``test_example2``. The first runs the installed
``example`` and ensures its output contains an expected string. The second ``example`` and ensures its output contains an expected string. The second
runs ``example2`` without checking output so is only concerned with confirming runs ``example2`` without checking output so is only concerned with confirming
@@ -5695,7 +5702,7 @@ subdirectory of the installation prefix. They are automatically copied to
the appropriate relative paths under the test stage directory prior to the appropriate relative paths under the test stage directory prior to
executing stand-alone tests. executing stand-alone tests.
.. tip:: .. tip::
*Perform test-related conversions once when copying files.* *Perform test-related conversions once when copying files.*
@@ -7071,46 +7078,6 @@ might write:
CXXFLAGS += -I$DWARF_PREFIX/include CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib CXXFLAGS += -L$DWARF_PREFIX/lib
.. _abi_compatibility:
----------------------------
Specifying ABI Compatibility
----------------------------
Packages can include ABI-compatibility information using the
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
always replace version 1.0, then the package could have:
.. code-block:: python
can_splice("foo@1.0", when="@1.1")
For virtual packages, packages can also specify ABI-compabitiliby with
other packages providing the same virtual. For example, ``zlib-ng``
could specify:
.. code-block:: python
can_splice("zlib@1.3.1", when="@2.2+compat")
Some packages have ABI-compatibility that is dependent on matching
variant values, either for all variants or for some set of
ABI-relevant variants. In those cases, it is not necessary to specify
the full combinatorial explosion. The ``match_variants`` keyword can
cover all single-value variants.
.. code-block:: python
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
The concretizer will use ABI compatibility to determine automatic
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
.. note::
The ``can_splice`` directive is experimental, and may be replaced
by a higher-level interface in future versions of Spack.
.. _package_class_structure: .. _package_class_structure:

View File

@@ -59,7 +59,7 @@ Functional Example
------------------ ------------------
The simplest fully functional standalone example of a working pipeline can be The simplest fully functional standalone example of a working pipeline can be
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_ examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
on gitlab.com. on gitlab.com.
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
@@ -67,46 +67,39 @@ pipeline:
.. code-block:: yaml .. code-block:: yaml
stages: [ "generate", "build" ] stages: [generate, build]
variables: variables:
SPACK_REPOSITORY: "https://github.com/spack/spack.git" SPACK_REPO: https://github.com/scottwittenburg/spack.git
SPACK_REF: "develop-2024-10-06" SPACK_REF: pipelines-reproducible-builds
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
SPACK_BACKTRACE: 1
generate-pipeline: generate-pipeline:
tags:
- saas-linux-small-amd64
stage: generate stage: generate
tags:
- docker
image: image:
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01 name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
script: entrypoint: [""]
- git clone ${SPACK_REPOSITORY} before_script:
- cd spack && git checkout ${SPACK_REF} && cd ../ - git clone ${SPACK_REPO}
- pushd spack && git checkout ${SPACK_REF} && popd
- . "./spack/share/spack/setup-env.sh" - . "./spack/share/spack/setup-env.sh"
- spack --version script:
- spack env activate --without-view . - spack env activate --without-view .
- spack -d -v --color=always - spack -d ci generate
ci generate
--check-index-only
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir" --artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml" --output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
artifacts: artifacts:
paths: paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir" - "${CI_PROJECT_DIR}/jobs_scratch_dir"
build-pipeline: build-jobs:
stage: build stage: build
trigger: trigger:
include: include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml - artifact: "jobs_scratch_dir/pipeline.yml"
job: generate-pipeline job: generate-pipeline
strategy: depend strategy: depend
needs:
- artifacts: True
job: generate-pipeline
The key thing to note above is that there are two jobs: The first job to run, The key thing to note above is that there are two jobs: The first job to run,
``generate-pipeline``, runs the ``spack ci generate`` command to generate a ``generate-pipeline``, runs the ``spack ci generate`` command to generate a
@@ -121,93 +114,82 @@ And here's the spack environment built by the pipeline represented as a
spack: spack:
view: false view: false
concretizer: concretizer:
unify: true unify: false
reuse: false
definitions: definitions:
- pkgs: - pkgs:
- zlib - zlib
- bzip2 ~debug - bzip2
- compiler: - arch:
- '%gcc' - '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
specs: specs:
- matrix: - matrix:
- - $pkgs - - $pkgs
- - $compiler - - $arch
mirrors: { "mirror": "s3://spack-public/mirror" }
ci: ci:
target: gitlab enable-artifacts-buildcache: True
rebuild-index: False
pipeline-gen: pipeline-gen:
- any-job: - any-job:
tags:
- saas-linux-small-amd64
image:
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
before_script: before_script:
- git clone ${SPACK_REPOSITORY} - git clone ${SPACK_REPO}
- cd spack && git checkout ${SPACK_REF} && cd ../ - pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
- . "./spack/share/spack/setup-env.sh" - . "./spack/share/spack/setup-env.sh"
- spack --version - build-job:
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR} tags: [docker]
- spack config blame mirrors image:
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
entrypoint: [""]
The elements of this file important to spack ci pipelines are described in more
detail below, but there are a couple of things to note about the above working
example:
.. note:: .. note::
The use of ``reuse: false`` in spack environments used for pipelines is There is no ``script`` attribute specified for here. The reason for this is
almost always what you want, as without it your pipelines will not rebuild Spack CI will automatically generate reasonable default scripts. More
packages even if package hashes have changed. This is due to the concretizer detail on what is in these scripts can be found below.
strongly preferring known hashes when ``reuse: true``.
The ``ci`` section in the above environment file contains the bare minimum Also notice the ``before_script`` section. It is required when using any of the
configuration required for ``spack ci generate`` to create a working pipeline. default scripts to source the ``setup-env.sh`` script in order to inform
The ``target: gitlab`` tells spack that the desired pipeline output is for the default scripts where to find the ``spack`` executable.
gitlab. However, this isn't strictly required, as currently gitlab is the
only possible output format for pipelines. The ``pipeline-gen`` section
contains the key information needed to specify attributes for the generated
jobs. Notice that it contains a list which has only a single element in
this case. In real pipelines it will almost certainly have more elements,
and in those cases, order is important: spack starts at the bottom of the
list and works upwards when applying attributes.
But in this simple case, we use only the special key ``any-job`` to Normally ``enable-artifacts-buildcache`` is not recommended in production as it
indicate that spack should apply the specified attributes (``tags``, ``image``, results in large binary artifacts getting transferred back and forth between
and ``before_script``) to any job it generates. This includes jobs for gitlab and the runners. But in this example on gitlab.com where there is no
building/pushing all packages, a ``rebuild-index`` job at the end of the shared, persistent file system, and where no secrets are stored for giving
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
no rebuilds are required. way to propagate binaries from jobs to their dependents.
Something to note is that in this simple case, we rely on spack to Also, it is usually a good idea to let the pipeline generate a final "rebuild the
generate a reasonable script for the package build jobs (it just creates buildcache index" job, so that subsequent pipeline generation can quickly determine
a script that invokes ``spack ci rebuild``). which specs are up to date and which need to be rebuilt (it's a good idea for other
reasons as well, but those are out of scope for this discussion). In this case we
have disabled it (using ``rebuild-index: False``) because the index would only be
generated in the artifacts mirror anyway, and consequently would not be available
during subsequent pipeline runs.
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment .. note::
variable in any generated jobs. The purpose of this is to make spack With the addition of reproducible builds (#22887) a previously working
aware of one final file in the example, the one that contains the mirror pipeline will require some changes:
configuration. This file, ``mirrors.yaml`` looks like this:
.. code-block:: yaml * In the build-jobs, the environment location changed.
This will typically show as a ``KeyError`` in the failing job. Be sure to
point to ``${SPACK_CONCRETE_ENV_DIR}``.
mirrors: * When using ``include`` in your environment, be sure to make the included
buildcache-destination: files available in the build jobs. This means adding those files to the
url: oci://registry.gitlab.com/spack/pipeline-quickstart artifact directory. Those files will also be missing in the reproducibility
binary: true artifact.
access_pair:
id_variable: CI_REGISTRY_USER
secret_variable: CI_REGISTRY_PASSWORD
* Because the location of the environment changed, including files with
Note the name of the mirror is ``buildcache-destination``, which is required relative path may have to be adapted to work both in the project context
as of Spack 0.23 (see below for more information). The mirror url simply (generation job) and in the concrete env dir context (build job).
points to the container registry associated with the project, while
``id_variable`` and ``secret_variable`` refer to to environment variables
containing the access credentials for the mirror.
When spack builds packages for this example project, they will be pushed to
the project container registry, where they will be available for subsequent
jobs to install as dependencies, or for other pipelines to use to build runnable
container images.
----------------------------------- -----------------------------------
Spack commands supporting pipelines Spack commands supporting pipelines
@@ -435,6 +417,15 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
does not create a signing job and the job configuration attributes will be ignored. does not create a signing job and the job configuration attributes will be ignored.
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``. Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
^^^^^^^^^^^^^^^^^
Cleanup (cleanup)
^^^^^^^^^^^^^^^^^
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
script, but do expect that the spack command is in the path and require a
``before_script`` to be specified that sources the ``setup-env.sh`` script.
.. _noop_jobs: .. _noop_jobs:
^^^^^^^^^^^^ ^^^^^^^^^^^^
@@ -750,6 +741,15 @@ environment/stack file, and in that case no bootstrapping will be done (only the
specs will be staged for building) and the runners will be expected to already specs will be staged for building) and the runners will be expected to already
have all needed compilers installed and configured for spack to use. have all needed compilers installed and configured for spack to use.
^^^^^^^^^^^^^^^^^^^
Pipeline Buildcache
^^^^^^^^^^^^^^^^^^^
The ``enable-artifacts-buildcache`` key
takes a boolean and determines whether the pipeline uses artifacts to store and
pass along the buildcaches from one stage to the next (the default if you don't
provide this option is ``False``).
^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
Broken Specs URL Broken Specs URL
^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^

View File

@@ -1,13 +1,13 @@
sphinx==8.1.3 sphinx==7.4.7
sphinxcontrib-programoutput==0.17 sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1 sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2 sphinx-rtd-theme==3.0.1
python-levenshtein==0.26.1 python-levenshtein==0.26.0
docutils==0.21.2 docutils==0.20.1
pygments==2.18.0 pygments==2.18.0
urllib3==2.2.3 urllib3==2.2.3
pytest==8.3.4 pytest==8.3.3
isort==5.13.2 isort==5.13.2
black==24.10.0 black==24.8.0
flake8==7.1.1 flake8==7.1.1
mypy==1.11.1 mypy==1.11.1

264
lib/spack/env/cc vendored
View File

@@ -40,6 +40,11 @@ readonly params="\
SPACK_ENV_PATH SPACK_ENV_PATH
SPACK_DEBUG_LOG_DIR SPACK_DEBUG_LOG_DIR
SPACK_DEBUG_LOG_ID SPACK_DEBUG_LOG_ID
SPACK_COMPILER_SPEC
SPACK_CC_RPATH_ARG
SPACK_CXX_RPATH_ARG
SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG SPACK_LINKER_ARG
SPACK_SHORT_SPEC SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS SPACK_SYSTEM_DIRS
@@ -96,9 +101,10 @@ setsep() {
esac esac
} }
# prepend LISTNAME ELEMENT # prepend LISTNAME ELEMENT [SEP]
# #
# Prepend ELEMENT to the list stored in the variable LISTNAME. # Prepend ELEMENT to the list stored in the variable LISTNAME,
# assuming the list is separated by SEP.
# Handles empty lists and single-element lists. # Handles empty lists and single-element lists.
prepend() { prepend() {
varname="$1" varname="$1"
@@ -218,7 +224,6 @@ for param in $params; do
if eval "test -z \"\${${param}:-}\""; then if eval "test -z \"\${${param}:-}\""; then
die "Spack compiler must be run from Spack! Input '$param' is missing." die "Spack compiler must be run from Spack! Input '$param' is missing."
fi fi
# FIXME (compiler as nodes) add checks on whether `SPACK_XX_RPATH` is set if `SPACK_XX` is set
done done
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over. # eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
@@ -233,36 +238,6 @@ esac
} }
" "
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
# which are used to prioritize paths when assembling the final command line.
# init_path_lists LISTNAME
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
init_path_lists() {
eval "spack_store_$1=\"\""
eval "$1=\"\""
eval "system_$1=\"\""
}
# assign_path_lists LISTNAME1 LISTNAME2
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
assign_path_lists() {
eval "spack_store_$1=\"\${spack_store_$2}\""
eval "$1=\"\${$2}\""
eval "system_$1=\"\${system_$2}\""
}
# append_path_lists LISTNAME ELT
# Append the provided ELT to the appropriate list, based on the result of path_order().
append_path_lists() {
path_order "$2"
case $? in
0) eval "append spack_store_$1 \"\$2\"" ;;
1) eval "append $1 \"\$2\"" ;;
2) eval "append system_$1 \"\$2\"" ;;
esac
}
# Check if optional parameters are defined # Check if optional parameters are defined
# If we aren't asking for debug flags, don't add them # If we aren't asking for debug flags, don't add them
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
@@ -342,9 +317,6 @@ case "$command" in
;; ;;
ld|ld.gold|ld.lld) ld|ld.gold|ld.lld)
mode=ld mode=ld
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
comp="CXX"
fi
;; ;;
*) *)
die "Unknown compiler: $command" die "Unknown compiler: $command"
@@ -402,7 +374,7 @@ dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
linker_arg="${SPACK_LINKER_ARG}" linker_arg="${SPACK_LINKER_ARG}"
# Set up rpath variable according to language. # Set up rpath variable according to language.
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?" rpath="ERROR: RPATH ARG WAS NOT SET"
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}" eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
# Dump the mode and exit if the command is dump-mode. # Dump the mode and exit if the command is dump-mode.
@@ -411,6 +383,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
exit exit
fi fi
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
# *have* to set up Fortran executables, so we need to tell the user when a build is
# about to attempt to use them unsuccessfully.
if [ -z "$command" ]; then
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
fi
# #
# Filter '.' and Spack environment directories out of PATH so that # Filter '.' and Spack environment directories out of PATH so that
# this script doesn't just call itself # this script doesn't just call itself
@@ -491,7 +470,12 @@ input_command="$*"
parse_Wl() { parse_Wl() {
while [ $# -ne 0 ]; do while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then if [ "$wl_expect_rpath" = yes ]; then
append_path_lists return_rpath_dirs_list "$1" path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
wl_expect_rpath=no wl_expect_rpath=no
else else
case "$1" in case "$1" in
@@ -500,14 +484,24 @@ parse_Wl() {
if [ -z "$arg" ]; then if [ -z "$arg" ]; then
shift; continue shift; continue
fi fi
append_path_lists return_rpath_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;; ;;
--rpath=*) --rpath=*)
arg="${1#--rpath=}" arg="${1#--rpath=}"
if [ -z "$arg" ]; then if [ -z "$arg" ]; then
shift; continue shift; continue
fi fi
append_path_lists return_rpath_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;; ;;
-rpath|--rpath) -rpath|--rpath)
wl_expect_rpath=yes wl_expect_rpath=yes
@@ -515,7 +509,8 @@ parse_Wl() {
"$dtags_to_strip") "$dtags_to_strip")
;; ;;
-Wl) -Wl)
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it. # Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
# it.
return 1 return 1
;; ;;
*) *)
@@ -534,10 +529,21 @@ categorize_arguments() {
return_other_args_list="" return_other_args_list=""
return_isystem_was_used="" return_isystem_was_used=""
init_path_lists return_isystem_include_dirs_list return_isystem_spack_store_include_dirs_list=""
init_path_lists return_include_dirs_list return_isystem_system_include_dirs_list=""
init_path_lists return_lib_dirs_list return_isystem_include_dirs_list=""
init_path_lists return_rpath_dirs_list
return_spack_store_include_dirs_list=""
return_system_include_dirs_list=""
return_include_dirs_list=""
return_spack_store_lib_dirs_list=""
return_system_lib_dirs_list=""
return_lib_dirs_list=""
return_spack_store_rpath_dirs_list=""
return_system_rpath_dirs_list=""
return_rpath_dirs_list=""
# Global state for keeping track of -Wl,-rpath -Wl,/path # Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no wl_expect_rpath=no
@@ -603,17 +609,32 @@ categorize_arguments() {
arg="${1#-isystem}" arg="${1#-isystem}"
return_isystem_was_used=true return_isystem_was_used=true
if [ -z "$arg" ]; then shift; arg="$1"; fi if [ -z "$arg" ]; then shift; arg="$1"; fi
append_path_lists return_isystem_include_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
1) append return_isystem_include_dirs_list "$arg" ;;
2) append return_isystem_system_include_dirs_list "$arg" ;;
esac
;; ;;
-I*) -I*)
arg="${1#-I}" arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi if [ -z "$arg" ]; then shift; arg="$1"; fi
append_path_lists return_include_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_include_dirs_list "$arg" ;;
1) append return_include_dirs_list "$arg" ;;
2) append return_system_include_dirs_list "$arg" ;;
esac
;; ;;
-L*) -L*)
arg="${1#-L}" arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi if [ -z "$arg" ]; then shift; arg="$1"; fi
append_path_lists return_lib_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_lib_dirs_list "$arg" ;;
1) append return_lib_dirs_list "$arg" ;;
2) append return_system_lib_dirs_list "$arg" ;;
esac
;; ;;
-l*) -l*)
# -loopopt=0 is generated erroneously in autoconf <= 2.69, # -loopopt=0 is generated erroneously in autoconf <= 2.69,
@@ -646,17 +667,32 @@ categorize_arguments() {
break break
elif [ "$xlinker_expect_rpath" = yes ]; then elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path> # Register the path of -Xlinker -rpath <other args> -Xlinker <path>
append_path_lists return_rpath_dirs_list "$1" path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
xlinker_expect_rpath=no xlinker_expect_rpath=no
else else
case "$1" in case "$1" in
-rpath=*) -rpath=*)
arg="${1#-rpath=}" arg="${1#-rpath=}"
append_path_lists return_rpath_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;; ;;
--rpath=*) --rpath=*)
arg="${1#--rpath=}" arg="${1#--rpath=}"
append_path_lists return_rpath_dirs_list "$arg" path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;; ;;
-rpath|--rpath) -rpath|--rpath)
xlinker_expect_rpath=yes xlinker_expect_rpath=yes
@@ -673,36 +709,7 @@ categorize_arguments() {
"$dtags_to_strip") "$dtags_to_strip")
;; ;;
*) *)
# if mode is not ld, we can just add to other args append return_other_args_list "$1"
if [ "$mode" != "ld" ]; then
append return_other_args_list "$1"
shift
continue
fi
# if we're in linker mode, we need to parse raw RPATH args
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
append_path_lists return_rpath_dirs_list "$arg"
;;
--rpath=*)
arg="${1#--rpath=}"
append_path_lists return_rpath_dirs_list "$arg"
;;
-rpath|--rpath)
if [ $# -eq 1 ]; then
# -rpath without value: let the linker raise an error.
append return_other_args_list "$1"
break
fi
shift
append_path_lists return_rpath_dirs_list "$1"
;;
*)
append return_other_args_list "$1"
;;
esac
;; ;;
esac esac
shift shift
@@ -724,10 +731,21 @@ categorize_arguments() {
categorize_arguments "$@" categorize_arguments "$@"
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
assign_path_lists include_dirs_list return_include_dirs_list system_include_dirs_list="$return_system_include_dirs_list"
assign_path_lists lib_dirs_list return_lib_dirs_list include_dirs_list="$return_include_dirs_list"
assign_path_lists rpath_dirs_list return_rpath_dirs_list
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
isystem_was_used="$return_isystem_was_used" isystem_was_used="$return_isystem_was_used"
other_args_list="$return_other_args_list" other_args_list="$return_other_args_list"
@@ -780,17 +798,15 @@ case "$mode" in
C) C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS extend spack_flags_list SPACK_CFLAGS
preextend flags_list SPACK_TARGET_ARGS_CC
;; ;;
CXX) CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS extend spack_flags_list SPACK_CXXFLAGS
preextend flags_list SPACK_TARGET_ARGS_CXX
;;
F)
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
;; ;;
esac esac
# prepend target args
preextend flags_list SPACK_TARGET_ARGS
;; ;;
esac esac
@@ -805,10 +821,21 @@ IFS="$lsep"
categorize_arguments $spack_flags_list categorize_arguments $spack_flags_list
unset IFS unset IFS
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used" spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_other_args_list="$return_other_args_list" spack_flags_other_args_list="$return_other_args_list"
@@ -867,7 +894,7 @@ esac
case "$mode" in case "$mode" in
cpp|cc|as|ccld) cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
else else
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
@@ -883,63 +910,64 @@ args_list="$flags_list"
# Include search paths partitioned by (in store, non-sytem, system) # Include search paths partitioned by (in store, non-sytem, system)
# NOTE: adding ${lsep} to the prefix here turns every added element into two # NOTE: adding ${lsep} to the prefix here turns every added element into two
extend args_list spack_store_spack_flags_include_dirs_list -I extend args_list spack_flags_spack_store_include_dirs_list -I
extend args_list spack_store_include_dirs_list -I extend args_list spack_store_include_dirs_list -I
extend args_list spack_flags_include_dirs_list -I extend args_list spack_flags_include_dirs_list -I
extend args_list include_dirs_list -I extend args_list include_dirs_list -I
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}" extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}" extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}" extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
extend args_list isystem_include_dirs_list "-isystem${lsep}" extend args_list isystem_include_dirs_list "-isystem${lsep}"
extend args_list system_spack_flags_include_dirs_list -I extend args_list spack_flags_system_include_dirs_list -I
extend args_list system_include_dirs_list -I extend args_list system_include_dirs_list -I
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}" extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
extend args_list system_isystem_include_dirs_list "-isystem${lsep}" extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
# Library search paths partitioned by (in store, non-sytem, system) # Library search paths partitioned by (in store, non-sytem, system)
extend args_list spack_store_spack_flags_lib_dirs_list "-L" extend args_list spack_flags_spack_store_lib_dirs_list "-L"
extend args_list spack_store_lib_dirs_list "-L" extend args_list spack_store_lib_dirs_list "-L"
extend args_list spack_flags_lib_dirs_list "-L" extend args_list spack_flags_lib_dirs_list "-L"
extend args_list lib_dirs_list "-L" extend args_list lib_dirs_list "-L"
extend args_list system_spack_flags_lib_dirs_list "-L" extend args_list spack_flags_system_lib_dirs_list "-L"
extend args_list system_lib_dirs_list "-L" extend args_list system_lib_dirs_list "-L"
# RPATHs arguments # RPATHs arguments
rpath_prefix=""
case "$mode" in case "$mode" in
ccld) ccld)
if [ -n "$dtags_to_add" ] ; then if [ -n "$dtags_to_add" ] ; then
append args_list "$linker_arg$dtags_to_add" append args_list "$linker_arg$dtags_to_add"
fi fi
rpath_prefix="$rpath" extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
extend args_list spack_store_rpath_dirs_list "$rpath"
extend args_list spack_flags_rpath_dirs_list "$rpath"
extend args_list rpath_dirs_list "$rpath"
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
extend args_list system_rpath_dirs_list "$rpath"
;; ;;
ld) ld)
if [ -n "$dtags_to_add" ] ; then if [ -n "$dtags_to_add" ] ; then
append args_list "$dtags_to_add" append args_list "$dtags_to_add"
fi fi
rpath_prefix="-rpath${lsep}" extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
extend args_list rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
extend args_list system_rpath_dirs_list "-rpath${lsep}"
;; ;;
esac esac
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
if [ -n "$rpath_prefix" ]; then
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
extend args_list rpath_dirs_list "$rpath_prefix"
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
extend args_list system_rpath_dirs_list "$rpath_prefix"
fi
# Other arguments from the input command # Other arguments from the input command
extend args_list other_args_list extend args_list other_args_list
extend args_list spack_flags_other_args_list extend args_list spack_flags_other_args_list

View File

@@ -20,24 +20,11 @@
import tempfile import tempfile
from contextlib import contextmanager from contextlib import contextmanager
from itertools import accumulate from itertools import accumulate
from typing import ( from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
Callable,
Deque,
Dict,
Generator,
Iterable,
List,
Match,
Optional,
Sequence,
Set,
Tuple,
Union,
)
import llnl.util.symlink import llnl.util.symlink
from llnl.util import tty from llnl.util import tty
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from ..path import path_to_os_path, system_path_filter from ..path import path_to_os_path, system_path_filter
@@ -98,8 +85,6 @@
"visit_directory_tree", "visit_directory_tree",
] ]
Path = Union[str, pathlib.Path]
if sys.version_info < (3, 7, 4): if sys.version_info < (3, 7, 4):
# monkeypatch shutil.copystat to fix PermissionError when copying read-only # monkeypatch shutil.copystat to fix PermissionError when copying read-only
# files on Lustre when using Python < 3.7.4 # files on Lustre when using Python < 3.7.4
@@ -1688,203 +1673,105 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find() return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
def find( def find(root, files, recursive=True):
root: Union[Path, Sequence[Path]], """Search for ``files`` starting from the ``root`` directory.
files: Union[str, Sequence[str]],
recursive: bool = True, Like GNU/BSD find but written entirely in Python.
max_depth: Optional[int] = None,
) -> List[str]: Examples:
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
returns a deterministic result for the same input and directory structure when run multiple .. code-block:: console
times. Symlinked directories are followed, and unique directories are searched only once. Each
matching file is returned only once at lowest depth in case multiple paths exist due to $ find /usr -name python
symlinked directories.
is equivalent to:
>>> find('/usr', 'python')
.. code-block:: console
$ find /usr/local/bin -maxdepth 1 -name python
is equivalent to:
>>> find('/usr/local/bin', 'python', recursive=False)
Accepts any glob characters accepted by fnmatch: Accepts any glob characters accepted by fnmatch:
========== ==================================== ========== ====================================
Pattern Meaning Pattern Meaning
========== ==================================== ========== ====================================
``*`` matches one or more characters ``*`` matches everything
``?`` matches any single character ``?`` matches any single character
``[seq]`` matches any character in ``seq`` ``[seq]`` matches any character in ``seq``
``[!seq]`` matches any character not in ``seq`` ``[!seq]`` matches any character not in ``seq``
========== ==================================== ========== ====================================
Examples:
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
depth 2.
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
``/var`` at any depth.
>>> find("/usr", "GL/*.h", recursive=True)
finds all header files in a directory GL at any depth in the directory ``/usr``.
Parameters: Parameters:
root: One or more root directories to start searching from root (str): The root directory to start searching from
files: One or more filename patterns to search for files (str or collections.abc.Sequence): Library name(s) to search for
recursive: if False search only root, if True descends from roots. Defaults to True. recursive (bool): if False search only root folder,
max_depth: if set, don't search below this depth. Cannot be set if recursive is False if True descends top-down from the root. Defaults to True.
Returns a list of absolute, matching file paths. Returns:
list: The files that have been found
""" """
if isinstance(root, (str, pathlib.Path)):
root = [root]
elif not isinstance(root, collections.abc.Sequence):
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
if isinstance(files, str): if isinstance(files, str):
files = [files] files = [files]
elif not isinstance(files, collections.abc.Sequence):
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
# If recursive is false, max_depth can only be None or 0 if recursive:
if max_depth and not recursive: tty.debug(f"Find (recursive): {root} {str(files)}")
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False") result = _find_recursive(root, files)
else:
tty.debug(f"Find (not recursive): {root} {str(files)}")
result = _find_non_recursive(root, files)
tty.debug(f"Find (max depth = {max_depth}): {root} {files}") tty.debug(f"Find complete: {root} {str(files)}")
if not recursive:
max_depth = 0
elif max_depth is None:
max_depth = sys.maxsize
result = _find_max_depth(root, files, max_depth)
tty.debug(f"Find complete: {root} {files}")
return result return result
def _log_file_access_issue(e: OSError, path: str) -> None: @system_path_filter
errno_name = errno.errorcode.get(e.errno, "UNKNOWN") def _find_recursive(root, search_files):
tty.debug(f"find must skip {path}: {errno_name} {e}") # The variable here is **on purpose** a defaultdict. The idea is that
# we want to poke the filesystem as little as possible, but still maintain
# stability in the order of the answer. Thus we are recording each library
# found in a key, and reconstructing the stable order later.
found_files = collections.defaultdict(list)
# Make the path absolute to have os.walk also return an absolute path
root = os.path.abspath(root)
for path, _, list_files in os.walk(root):
for search_file in search_files:
matches = glob.glob(os.path.join(path, search_file))
matches = [os.path.join(path, x) for x in matches]
found_files[search_file].extend(matches)
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
return answer
def _file_id(s: os.stat_result) -> Tuple[int, int]: @system_path_filter
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See def _find_non_recursive(root, search_files):
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c # The variable here is **on purpose** a defaultdict as os.list_dir
return (s.st_ino, s.st_dev) # can return files in any order (does not preserve stability)
found_files = collections.defaultdict(list)
# Make the path absolute to have absolute path returned
root = os.path.abspath(root)
def _dedupe_files(paths: List[str]) -> List[str]: for search_file in search_files:
"""Deduplicate files by inode and device, dropping files that cannot be accessed.""" matches = glob.glob(os.path.join(root, search_file))
unique_files: List[str] = [] matches = [os.path.join(root, x) for x in matches]
# tuple of (inode, device) for each file without following symlinks found_files[search_file].extend(matches)
visited: Set[Tuple[int, int]] = set()
for path in paths:
try:
stat_info = os.lstat(path)
except OSError as e:
_log_file_access_issue(e, path)
continue
file_id = _file_id(stat_info)
if file_id not in visited:
unique_files.append(path)
visited.add(file_id)
return unique_files
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
def _find_max_depth( return answer
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
) -> List[str]:
"""See ``find`` for the public API."""
# We optimize for the common case of simple filename only patterns: a single, combined regex
# is used. For complex patterns that include path components, we use a slower glob call from
# every directory we visit within max_depth.
filename_only_patterns = {
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
}
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
# Ordered dictionary that keeps track of what pattern found which files
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
# Ensure returned paths are always absolute
roots = [os.path.abspath(r) for r in roots]
# Breadth-first search queue. Each element is a tuple of (depth, dir)
dir_queue: Deque[Tuple[int, str]] = collections.deque()
# Set of visited directories. Each element is a tuple of (inode, device)
visited_dirs: Set[Tuple[int, int]] = set()
for root in roots:
try:
stat_root = os.stat(root)
except OSError as e:
_log_file_access_issue(e, root)
continue
dir_id = _file_id(stat_root)
if dir_id not in visited_dirs:
dir_queue.appendleft((0, root))
visited_dirs.add(dir_id)
while dir_queue:
depth, curr_dir = dir_queue.pop()
try:
dir_iter = os.scandir(curr_dir)
except OSError as e:
_log_file_access_issue(e, curr_dir)
continue
# Use glob.glob for complex patterns.
for pattern_name, pattern in complex_patterns.items():
matched_paths[pattern_name].extend(
path for path in glob.glob(os.path.join(curr_dir, pattern))
)
# List of subdirectories by path and (inode, device) tuple
subdirs: List[Tuple[str, Tuple[int, int]]] = []
with dir_iter:
for dir_entry in dir_iter:
# Match filename only patterns
if filename_only_patterns:
m = regex.match(os.path.normcase(dir_entry.name))
if m:
for pattern_name in filename_only_patterns:
if m.group(pattern_name):
matched_paths[pattern_name].append(dir_entry.path)
break
# Collect subdirectories
if depth >= max_depth:
continue
try:
if not dir_entry.is_dir(follow_symlinks=True):
continue
if sys.platform == "win32":
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
# to call os.stat
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
else:
stat_info = dir_entry.stat(follow_symlinks=True)
except OSError as e:
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
_log_file_access_issue(e, dir_entry.path)
continue
subdirs.append((dir_entry.path, _file_id(stat_info)))
# Enqueue subdirectories in a deterministic order
if subdirs:
subdirs.sort(key=lambda s: os.path.basename(s[0]))
for subdir, subdir_id in subdirs:
if subdir_id not in visited_dirs:
dir_queue.appendleft((depth + 1, subdir))
visited_dirs.add(subdir_id)
# Sort the matched paths for deterministic output
for paths in matched_paths.values():
paths.sort()
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
# We only dedupe files if we have any complex patterns, since only they can match the same file
# multiple times
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
# Utilities for libraries and headers # Utilities for libraries and headers
@@ -2323,9 +2210,7 @@ def find_system_libraries(libraries, shared=True):
return libraries_found return libraries_found
def find_libraries( def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
):
"""Returns an iterable of full paths to libraries found in a root dir. """Returns an iterable of full paths to libraries found in a root dir.
Accepts any glob characters accepted by fnmatch: Accepts any glob characters accepted by fnmatch:
@@ -2346,8 +2231,6 @@ def find_libraries(
otherwise for static. Defaults to True. otherwise for static. Defaults to True.
recursive (bool): if False search only root folder, recursive (bool): if False search only root folder,
if True descends top-down from the root. Defaults to False. if True descends top-down from the root. Defaults to False.
max_depth (int): if set, don't search below this depth. Cannot be set
if recursive is False
runtime (bool): Windows only option, no-op elsewhere. If true, runtime (bool): Windows only option, no-op elsewhere. If true,
search for runtime shared libs (.DLL), otherwise, search search for runtime shared libs (.DLL), otherwise, search
for .Lib files. If shared is false, this has no meaning. for .Lib files. If shared is false, this has no meaning.
@@ -2356,7 +2239,6 @@ def find_libraries(
Returns: Returns:
LibraryList: The libraries that have been found LibraryList: The libraries that have been found
""" """
if isinstance(libraries, str): if isinstance(libraries, str):
libraries = [libraries] libraries = [libraries]
elif not isinstance(libraries, collections.abc.Sequence): elif not isinstance(libraries, collections.abc.Sequence):
@@ -2389,10 +2271,8 @@ def find_libraries(
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes] libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
if not recursive: if not recursive:
if max_depth:
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
# If not recursive, look for the libraries directly in root # If not recursive, look for the libraries directly in root
return LibraryList(find(root, libraries, recursive=False)) return LibraryList(find(root, libraries, False))
# To speedup the search for external packages configured e.g. in /usr, # To speedup the search for external packages configured e.g. in /usr,
# perform first non-recursive search in root/lib then in root/lib64 and # perform first non-recursive search in root/lib then in root/lib64 and
@@ -2410,7 +2290,7 @@ def find_libraries(
if found_libs: if found_libs:
break break
else: else:
found_libs = find(root, libraries, recursive=True, max_depth=max_depth) found_libs = find(root, libraries, True)
return LibraryList(found_libs) return LibraryList(found_libs)
@@ -2773,6 +2653,22 @@ def prefixes(path):
return paths return paths
@system_path_filter
def md5sum(file):
"""Compute the MD5 sum of a file.
Args:
file (str): file to be checksummed
Returns:
MD5 sum of the file's content
"""
md5 = hashlib.md5()
with open(file, "rb") as f:
md5.update(f.read())
return md5.digest()
@system_path_filter @system_path_filter
def remove_directory_contents(dir): def remove_directory_contents(dir):
"""Remove all contents of a directory.""" """Remove all contents of a directory."""
@@ -2823,25 +2719,6 @@ def temporary_dir(
remove_directory_contents(tmp_dir) remove_directory_contents(tmp_dir)
@contextmanager
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
for functions or external tools that do not support in-place editing. Notice that this function
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
since we assume the call site will create a new inode at the same path."""
tmp_fd, tmp_path = tempfile.mkstemp(
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
)
# windows cannot replace a file with open fds, so close since the call site needs to replace.
os.close(tmp_fd)
try:
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
yield tmp_path
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
finally:
os.unlink(tmp_path)
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]: def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
"""Create a small summary of the given file. Does not error """Create a small summary of the given file. Does not error
when file does not exist. when file does not exist.

View File

@@ -5,17 +5,14 @@
import collections.abc import collections.abc
import contextlib import contextlib
import fnmatch
import functools import functools
import itertools import itertools
import os import os
import re import re
import sys import sys
import traceback import traceback
import typing
import warnings
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar from typing import Callable, Iterable, List, Tuple, TypeVar
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$" ignore_modules = r"^\.#|~$"
@@ -73,7 +70,7 @@ def index_by(objects, *funcs):
if isinstance(f, str): if isinstance(f, str):
f = lambda x: getattr(x, funcs[0]) f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple): elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0]) f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {} result = {}
for o in objects: for o in objects:
@@ -861,19 +858,6 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
return line_list return line_list
if sys.version_info >= (3, 9):
PatternStr = re.Pattern[str]
else:
PatternStr = typing.Pattern[str]
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
names, and values are filename patterns. The output is a regex that matches any of the
patterns in order, and named capture groups are used to identify which pattern matched."""
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
@contextlib.contextmanager @contextlib.contextmanager
def nullcontext(*args, **kwargs): def nullcontext(*args, **kwargs):
"""Empty context manager. """Empty context manager.
@@ -886,6 +870,15 @@ class UnhashableArguments(TypeError):
"""Raise when an @memoized function receives unhashable arg or kwarg values.""" """Raise when an @memoized function receives unhashable arg or kwarg values."""
def enum(**kwargs):
"""Return an enum-like class.
Args:
**kwargs: explicit dictionary of enums
"""
return type("Enum", (object,), kwargs)
T = TypeVar("T") T = TypeVar("T")
@@ -921,21 +914,6 @@ def ensure_last(lst, *elements):
lst.append(lst.pop(lst.index(elt))) lst.append(lst.pop(lst.index(elt)))
class Const:
"""Class level constant, raises when trying to set the attribute"""
__slots__ = ["value"]
def __init__(self, value):
self.value = value
def __get__(self, instance, owner):
return self.value
def __set__(self, instance, value):
raise TypeError(f"Const value does not support assignment [value={self.value}]")
class TypedMutableSequence(collections.abc.MutableSequence): class TypedMutableSequence(collections.abc.MutableSequence):
"""Base class that behaves like a list, just with a different type. """Base class that behaves like a list, just with a different type.
@@ -995,8 +973,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
def grouped_message(self, with_tracebacks: bool = True) -> str: def grouped_message(self, with_tracebacks: bool = True) -> str:
"""Print out an error message coalescing all the forwarded errors.""" """Print out an error message coalescing all the forwarded errors."""
each_exception_message = [ each_exception_message = [
"\n\t{0} raised {1}: {2}\n{3}".format( "{0} raised {1}: {2}{3}".format(
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else "" context,
exc.__class__.__name__,
exc,
"\n{0}".format("".join(tb)) if with_tracebacks else "",
) )
for context, exc, tb in self.exceptions for context, exc, tb in self.exceptions
] ]
@@ -1037,42 +1018,3 @@ def __init__(self, callback):
def __get__(self, instance, owner): def __get__(self, instance, owner):
return self.callback(owner) return self.callback(owner)
class DeprecatedProperty:
"""Data descriptor to error or warn when a deprecated property is accessed.
Derived classes must define a factory method to return an adaptor for the deprecated
property, if the descriptor is not set to error.
"""
__slots__ = ["name"]
#: 0 - Nothing
#: 1 - Warning
#: 2 - Error
error_lvl = 0
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance, owner):
if instance is None:
return self
if self.error_lvl == 1:
warnings.warn(
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
)
elif self.error_lvl == 2:
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
return self.factory(instance, owner)
def __set__(self, instance, value):
raise TypeError(
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
)
def factory(self, instance, owner):
raise NotImplementedError("must be implemented by derived classes")

View File

@@ -263,9 +263,7 @@ def match_to_ansi(match):
f"Incomplete color format: '{match.group(0)}' in '{match.string}'" f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
) )
color_number = colors.get(color_code, "") ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
semi = ";" if color_number else ""
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
if text: if text:
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}" return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
else: else:

View File

@@ -10,6 +10,7 @@
import errno import errno
import io import io
import multiprocessing import multiprocessing
import multiprocessing.connection
import os import os
import re import re
import select import select
@@ -18,10 +19,9 @@
import threading import threading
import traceback import traceback
from contextlib import contextmanager from contextlib import contextmanager
from multiprocessing.connection import Connection
from threading import Thread from threading import Thread
from types import ModuleType from types import ModuleType
from typing import Callable, Optional from typing import Optional
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -345,6 +345,49 @@ def close(self):
self.file.close() self.file.close()
class MultiProcessFd:
"""Return an object which stores a file descriptor and can be passed as an
argument to a function run with ``multiprocessing.Process``, such that
the file descriptor is available in the subprocess."""
def __init__(self, fd):
self._connection = None
self._fd = None
if sys.version_info >= (3, 8):
self._connection = multiprocessing.connection.Connection(fd)
else:
self._fd = fd
@property
def fd(self):
if self._connection:
return self._connection._handle
else:
return self._fd
def close(self):
if self._connection:
self._connection.close()
else:
os.close(self._fd)
def close_connection_and_file(multiprocess_fd, file):
# MultiprocessFd is intended to transmit a FD
# to a child process, this FD is then opened to a Python File object
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
# multiprocessing.connection.Connection; Connection closes the FD
# when it is deleted, and prints a warning about duplicate closure if
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
# simple FD; closing the FD here appears to conflict with
# closure of the File object (in < 3.8 that is). Therefore this needs
# to choose whether to close the File or the Connection.
if sys.version_info >= (3, 8):
multiprocess_fd.close()
else:
file.close()
@contextmanager @contextmanager
def replace_environment(env): def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`. """Replace the current environment (`os.environ`) with `env`.
@@ -502,20 +545,22 @@ def __enter__(self):
# forcing debug output. # forcing debug output.
self._saved_debug = tty._debug self._saved_debug = tty._debug
# Pipe for redirecting output to logger # OS-level pipe for redirecting output to logger
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False) read_fd, write_fd = os.pipe()
# Pipe for communication back from the daemon read_multiprocess_fd = MultiProcessFd(read_fd)
# Multiprocessing pipe for communication back from the daemon
# Currently only used to save echo value between uses # Currently only used to save echo value between uses
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False) self.parent_pipe, child_pipe = multiprocessing.Pipe()
# Sets a daemon that writes to file what it reads from a pipe # Sets a daemon that writes to file what it reads from a pipe
try: try:
# need to pass this b/c multiprocessing closes stdin in child. # need to pass this b/c multiprocessing closes stdin in child.
input_fd = None input_multiprocess_fd = None
try: try:
if sys.stdin.isatty(): if sys.stdin.isatty():
input_fd = Connection(os.dup(sys.stdin.fileno())) input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
except BaseException: except BaseException:
# just don't forward input if this fails # just don't forward input if this fails
pass pass
@@ -524,9 +569,9 @@ def __enter__(self):
self.process = multiprocessing.Process( self.process = multiprocessing.Process(
target=_writer_daemon, target=_writer_daemon,
args=( args=(
input_fd, input_multiprocess_fd,
read_fd, read_multiprocess_fd,
self.write_fd, write_fd,
self.echo, self.echo,
self.log_file, self.log_file,
child_pipe, child_pipe,
@@ -537,9 +582,9 @@ def __enter__(self):
self.process.start() self.process.start()
finally: finally:
if input_fd: if input_multiprocess_fd:
input_fd.close() input_multiprocess_fd.close()
read_fd.close() read_multiprocess_fd.close()
# Flush immediately before redirecting so that anything buffered # Flush immediately before redirecting so that anything buffered
# goes to the original stream # goes to the original stream
@@ -557,9 +602,9 @@ def __enter__(self):
self._saved_stderr = os.dup(sys.stderr.fileno()) self._saved_stderr = os.dup(sys.stderr.fileno())
# redirect to the pipe we created above # redirect to the pipe we created above
os.dup2(self.write_fd.fileno(), sys.stdout.fileno()) os.dup2(write_fd, sys.stdout.fileno())
os.dup2(self.write_fd.fileno(), sys.stderr.fileno()) os.dup2(write_fd, sys.stderr.fileno())
self.write_fd.close() os.close(write_fd)
else: else:
# Handle I/O the Python way. This won't redirect lower-level # Handle I/O the Python way. This won't redirect lower-level
@@ -572,7 +617,7 @@ def __enter__(self):
self._saved_stderr = sys.stderr self._saved_stderr = sys.stderr
# create a file object for the pipe; redirect to it. # create a file object for the pipe; redirect to it.
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False) pipe_fd_out = os.fdopen(write_fd, "w")
sys.stdout = pipe_fd_out sys.stdout = pipe_fd_out
sys.stderr = pipe_fd_out sys.stderr = pipe_fd_out
@@ -608,7 +653,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
else: else:
sys.stdout = self._saved_stdout sys.stdout = self._saved_stdout
sys.stderr = self._saved_stderr sys.stderr = self._saved_stderr
self.write_fd.close()
# print log contents in parent if needed. # print log contents in parent if needed.
if self.log_file.write_in_parent: if self.log_file.write_in_parent:
@@ -822,14 +866,14 @@ def force_echo(self):
def _writer_daemon( def _writer_daemon(
stdin_fd: Optional[Connection], stdin_multiprocess_fd,
read_fd: Connection, read_multiprocess_fd,
write_fd: Connection, write_fd,
echo: bool, echo,
log_file_wrapper: FileWrapper, log_file_wrapper,
control_fd: Connection, control_pipe,
filter_fn: Optional[Callable[[str], str]], filter_fn,
) -> None: ):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``. """Daemon used by ``log_output`` to write to a log file and to ``stdout``.
The daemon receives output from the parent process and writes it both The daemon receives output from the parent process and writes it both
@@ -866,37 +910,43 @@ def _writer_daemon(
``StringIO`` in the parent. This is mainly for testing. ``StringIO`` in the parent. This is mainly for testing.
Arguments: Arguments:
stdin_fd: optional input from the terminal stdin_multiprocess_fd (int): input from the terminal
read_fd: pipe for reading from parent's redirected stdout read_multiprocess_fd (int): pipe for reading from parent's redirected
echo: initial echo setting -- controlled by user and preserved across multiple writer stdout
daemons echo (bool): initial echo setting -- controlled by user and
log_file_wrapper: file to log all output preserved across multiple writer daemons
control_pipe: multiprocessing pipe on which to send control information to the parent log_file_wrapper (FileWrapper): file to log all output
filter_fn: optional function to filter each line of output control_pipe (Pipe): multiprocessing pipe on which to send control
information to the parent
filter_fn (callable, optional): function to filter each line of output
""" """
# This process depends on closing all instances of write_pipe to terminate the reading loop # If this process was forked, then it will inherit file descriptors from
write_fd.close() # the parent process. This process depends on closing all instances of
# write_fd to terminate the reading loop, so we close the file descriptor
# here. Forking is the process spawning method everywhere except Mac OS
# for Python >= 3.8 and on Windows
if sys.version_info < (3, 8) or sys.platform != "darwin":
os.close(write_fd)
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug # 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O. # that prevents unbuffered text I/O.
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default # 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
# 3. closefd=False because Connection has "ownership" in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
if stdin_fd: if stdin_multiprocess_fd:
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False) stdin = os.fdopen(stdin_multiprocess_fd.fd)
else: else:
stdin_file = None stdin = None
# list of streams to select from # list of streams to select from
istreams = [read_file, stdin_file] if stdin_file else [read_file] istreams = [in_pipe, stdin] if stdin else [in_pipe]
force_echo = False # parent can force echo for certain output force_echo = False # parent can force echo for certain output
log_file = log_file_wrapper.unwrap() log_file = log_file_wrapper.unwrap()
try: try:
with keyboard_input(stdin_file) as kb: with keyboard_input(stdin) as kb:
while True: while True:
# fix the terminal settings if we recently came to # fix the terminal settings if we recently came to
# the foreground # the foreground
@@ -909,12 +959,12 @@ def _writer_daemon(
# Allow user to toggle echo with 'v' key. # Allow user to toggle echo with 'v' key.
# Currently ignores other chars. # Currently ignores other chars.
# only read stdin if we're in the foreground # only read stdin if we're in the foreground
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file): if stdin in rlist and not _is_background_tty(stdin):
# it's possible to be backgrounded between the above # it's possible to be backgrounded between the above
# check and the read, so we ignore SIGTTIN here. # check and the read, so we ignore SIGTTIN here.
with ignore_signal(signal.SIGTTIN): with ignore_signal(signal.SIGTTIN):
try: try:
if stdin_file.read(1) == "v": if stdin.read(1) == "v":
echo = not echo echo = not echo
except IOError as e: except IOError as e:
# If SIGTTIN is ignored, the system gives EIO # If SIGTTIN is ignored, the system gives EIO
@@ -923,13 +973,13 @@ def _writer_daemon(
if e.errno != errno.EIO: if e.errno != errno.EIO:
raise raise
if read_file in rlist: if in_pipe in rlist:
line_count = 0 line_count = 0
try: try:
while line_count < 100: while line_count < 100:
# Handle output from the calling process. # Handle output from the calling process.
try: try:
line = _retry(read_file.readline)() line = _retry(in_pipe.readline)()
except UnicodeDecodeError: except UnicodeDecodeError:
# installs like --test=root gpgme produce non-UTF8 logs # installs like --test=root gpgme produce non-UTF8 logs
line = "<line lost: output was not encoded as UTF-8>\n" line = "<line lost: output was not encoded as UTF-8>\n"
@@ -958,7 +1008,7 @@ def _writer_daemon(
if xoff in controls: if xoff in controls:
force_echo = False force_echo = False
if not _input_available(read_file): if not _input_available(in_pipe):
break break
finally: finally:
if line_count > 0: if line_count > 0:
@@ -973,14 +1023,14 @@ def _writer_daemon(
finally: finally:
# send written data back to parent if we used a StringIO # send written data back to parent if we used a StringIO
if isinstance(log_file, io.StringIO): if isinstance(log_file, io.StringIO):
control_fd.send(log_file.getvalue()) control_pipe.send(log_file.getvalue())
log_file_wrapper.close() log_file_wrapper.close()
read_fd.close() close_connection_and_file(read_multiprocess_fd, in_pipe)
if stdin_fd: if stdin_multiprocess_fd:
stdin_fd.close() close_connection_and_file(stdin_multiprocess_fd, stdin)
# send echo value back to the parent so it can be preserved. # send echo value back to the parent so it can be preserved.
control_fd.send(echo) control_pipe.send(echo)
def _retry(function): def _retry(function):

View File

@@ -11,7 +11,7 @@
import spack.util.git import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.24.0.dev0" __version__ = "0.23.0.dev0"
spack_version = __version__ spack_version = __version__
@@ -69,15 +69,4 @@ def get_version() -> str:
return spack_version return spack_version
def get_short_version() -> str: __all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]
"""Short Spack version."""
return f"{spack_version_info[0]}.{spack_version_info[1]}"
__all__ = [
"spack_version_info",
"spack_version",
"get_version",
"get_spack_commit",
"get_short_version",
]

View File

@@ -55,7 +55,6 @@ def _search_duplicate_compilers(error_cls):
import spack.builder import spack.builder
import spack.config import spack.config
import spack.deptypes
import spack.fetch_strategy import spack.fetch_strategy
import spack.patch import spack.patch
import spack.repo import spack.repo
@@ -572,13 +571,8 @@ def _search_for_deprecated_package_methods(pkgs, error_cls):
@package_properties @package_properties
def _ensure_all_package_names_are_lowercase(pkgs, error_cls): def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
"""Ensure package names are lowercase and consistent""" """Ensure package names are lowercase and consistent"""
reserved_names = ("all",)
badname_regex, errors = re.compile(r"[_A-Z]"), [] badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs: for pkg_name in pkgs:
if pkg_name in reserved_names:
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
errors.append(error_cls(error_msg, []))
if badname_regex.search(pkg_name): if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'" error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
errors.append(error_cls(error_msg, [])) errors.append(error_cls(error_msg, []))
@@ -694,19 +688,19 @@ def invalid_sha256_digest(fetcher):
return h, True return h, True
return None, False return None, False
error_msg = f"Package '{pkg_name}' does not use sha256 checksum" error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
details = [] details = []
for v, args in pkg.versions.items(): for v, args in pkg.versions.items():
fetcher = spack.fetch_strategy.for_package_version(pkg, v) fetcher = spack.fetch_strategy.for_package_version(pkg, v)
digest, is_bad = invalid_sha256_digest(fetcher) digest, is_bad = invalid_sha256_digest(fetcher)
if is_bad: if is_bad:
details.append(f"{pkg_name}@{v} uses {digest}") details.append("{}@{} uses {}".format(pkg_name, v, digest))
for _, resources in pkg.resources.items(): for _, resources in pkg.resources.items():
for resource in resources: for resource in resources:
digest, is_bad = invalid_sha256_digest(resource.fetcher) digest, is_bad = invalid_sha256_digest(resource.fetcher)
if is_bad: if is_bad:
details.append(f"Resource in '{pkg_name}' uses {digest}") details.append("Resource in '{}' uses {}".format(pkg_name, digest))
if details: if details:
errors.append(error_cls(error_msg, details)) errors.append(error_cls(error_msg, details))
@@ -720,16 +714,17 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
for pkg_name in pkgs: for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
# values are either ConditionalValue objects or the values themselves # values are either Value objects (for conditional values) or the values themselves
build_system_names = set( build_system_names = set(
v.value if isinstance(v, spack.variant.ConditionalValue) else v v.value if isinstance(v, spack.variant.Value) else v
for _, variant in pkg_cls.variant_definitions("build_system") for _, variant in pkg_cls.variant_definitions("build_system")
for v in variant.values for v in variant.values
) )
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names] builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
module = pkg_cls.module
has_builders_in_package_py = any( has_builders_in_package_py = any(
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names getattr(module, name, False) for name in builder_cls_names
) )
if not has_builders_in_package_py: if not has_builders_in_package_py:
continue continue
@@ -811,7 +806,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
file = spack.repo.PATH.filename_for_package_name(pkg_name) file = spack.repo.PATH.filename_for_package_name(pkg_name)
tree = ast.parse(open(file).read()) tree = ast.parse(open(file).read())
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args")) visitor = DeprecatedMagicGlobals(("std_cmake_args",))
visitor.visit(tree) visitor.visit(tree)
if visitor.references_to_globals: if visitor.references_to_globals:
errors.append( errors.append(
@@ -1015,14 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
# depends_on('foo+bar ^fee+baz') # depends_on('foo+bar ^fee+baz')
# #
# but we'd like to have two dependencies listed instead. # but we'd like to have two dependencies listed instead.
nested_dependencies = dep.spec.edges_to_dependencies() nested_dependencies = dep.spec.dependencies()
# Filter out pure build dependencies, like:
#
# depends_on('foo+bar%gcc')
#
nested_dependencies = [
x for x in nested_dependencies if x.depflag != spack.deptypes.BUILD
]
if nested_dependencies: if nested_dependencies:
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'" summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
ndir = len(nested_dependencies) + 1 ndir = len(nested_dependencies) + 1

View File

@@ -40,7 +40,7 @@
import spack.hash_types as ht import spack.hash_types as ht
import spack.hooks import spack.hooks
import spack.hooks.sbang import spack.hooks.sbang
import spack.mirrors.mirror import spack.mirror
import spack.oci.image import spack.oci.image
import spack.oci.oci import spack.oci.oci
import spack.oci.opener import spack.oci.opener
@@ -87,8 +87,6 @@
from spack.stage import Stage from spack.stage import Stage
from spack.util.executable import which from spack.util.executable import which
from .enums import InstallRecordStatus
BUILD_CACHE_RELATIVE_PATH = "build_cache" BUILD_CACHE_RELATIVE_PATH = "build_cache"
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp" BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
@@ -254,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
spec_list = [ spec_list = [
s s
for s in db.query_local(installed=InstallRecordStatus.ANY) for s in db.query_local(installed=any, in_buildcache=any)
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
] ]
@@ -369,7 +367,7 @@ def update(self, with_cooldown=False):
on disk under ``_index_cache_root``).""" on disk under ``_index_cache_root``)."""
self._init_local_index_cache() self._init_local_index_cache()
configured_mirror_urls = [ configured_mirror_urls = [
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values() m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
] ]
items_to_remove = [] items_to_remove = []
spec_cache_clear_needed = False spec_cache_clear_needed = False
@@ -765,14 +763,7 @@ def tarball_directory_name(spec):
Return name of the tarball directory according to the convention Return name of the tarball directory according to the convention
<os>-<architecture>/<compiler>/<package>-<version>/ <os>-<architecture>/<compiler>/<package>-<version>/
""" """
if spec.original_spec_format() < 5: return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
compiler = spec.annotations.compiler_node_attribute
assert compiler is not None, "a compiler spec is expected"
return spec.format_path(
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
)
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
def tarball_name(spec, ext): def tarball_name(spec, ext):
@@ -780,17 +771,9 @@ def tarball_name(spec, ext):
Return the name of the tarfile according to the convention Return the name of the tarfile according to the convention
<os>-<architecture>-<package>-<dag_hash><ext> <os>-<architecture>-<package>-<dag_hash><ext>
""" """
if spec.original_spec_format() < 5: spec_formatted = spec.format_path(
compiler = spec.annotations.compiler_node_attribute "{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
assert compiler is not None, "a compiler spec is expected" )
spec_formatted = (
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
f"-{spec.version}-{spec.dag_hash()}"
)
else:
spec_formatted = (
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
)
return f"{spec_formatted}{ext}" return f"{spec_formatted}{ext}"
@@ -1191,7 +1174,7 @@ def _url_upload_tarball_and_specfile(
class Uploader: class Uploader:
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool): def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror self.mirror = mirror
self.force = force self.force = force
self.update_index = update_index self.update_index = update_index
@@ -1199,9 +1182,6 @@ def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_inde
self.tmpdir: str self.tmpdir: str
self.executor: concurrent.futures.Executor self.executor: concurrent.futures.Executor
# Verify if the mirror meets the requirements to push
self.mirror.ensure_mirror_usable("push")
def __enter__(self): def __enter__(self):
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
self._executor = spack.util.parallel.make_concurrent_executor() self._executor = spack.util.parallel.make_concurrent_executor()
@@ -1239,7 +1219,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class OCIUploader(Uploader): class OCIUploader(Uploader):
def __init__( def __init__(
self, self,
mirror: spack.mirrors.mirror.Mirror, mirror: spack.mirror.Mirror,
force: bool, force: bool,
update_index: bool, update_index: bool,
base_image: Optional[str], base_image: Optional[str],
@@ -1288,7 +1268,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class URLUploader(Uploader): class URLUploader(Uploader):
def __init__( def __init__(
self, self,
mirror: spack.mirrors.mirror.Mirror, mirror: spack.mirror.Mirror,
force: bool, force: bool,
update_index: bool, update_index: bool,
signing_key: Optional[str], signing_key: Optional[str],
@@ -1312,7 +1292,7 @@ def push(
def make_uploader( def make_uploader(
mirror: spack.mirrors.mirror.Mirror, mirror: spack.mirror.Mirror,
force: bool = False, force: bool = False,
update_index: bool = False, update_index: bool = False,
signing_key: Optional[str] = None, signing_key: Optional[str] = None,
@@ -1968,9 +1948,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
"signature_verified": "true-if-binary-pkg-was-already-verified" "signature_verified": "true-if-binary-pkg-was-already-verified"
} }
""" """
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = ( configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
spack.mirrors.mirror.MirrorCollection(binary=True).values() binary=True
) ).values()
if not configured_mirrors: if not configured_mirrors:
tty.die("Please add a spack mirror to allow download of pre-compiled packages.") tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
@@ -1995,7 +1975,7 @@ def fetch_url_to_mirror(url):
for mirror in configured_mirrors: for mirror in configured_mirrors:
if mirror.fetch_url == url: if mirror.fetch_url == url:
return mirror return mirror
return spack.mirrors.mirror.Mirror(url) return spack.mirror.Mirror(url)
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls] mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
@@ -2349,9 +2329,7 @@ def is_backup_file(file):
if not codesign: if not codesign:
return return
for binary in changed_files: for binary in changed_files:
# preserve the original inode by running codesign on a copy codesign("-fs-", binary)
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
codesign("-fs-", tmp_binary)
# If we are installing back to the same location # If we are installing back to the same location
# relocate the sbang location if the spack directory changed # relocate the sbang location if the spack directory changed
@@ -2584,13 +2562,7 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
return pkg_prefix return pkg_prefix
def install_root_node( def install_root_node(spec, unsigned=False, force=False, sha256=None):
spec: spack.spec.Spec,
unsigned=False,
force: bool = False,
sha256: Optional[str] = None,
allow_missing: bool = False,
) -> None:
"""Install the root node of a concrete spec from a buildcache. """Install the root node of a concrete spec from a buildcache.
Checking the sha256 sum of a node before installation is usually needed only Checking the sha256 sum of a node before installation is usually needed only
@@ -2599,10 +2571,11 @@ def install_root_node(
Args: Args:
spec: spec to be installed (note that only the root node will be installed) spec: spec to be installed (note that only the root node will be installed)
unsigned: if True allows installing unsigned binaries unsigned (bool): if True allows installing unsigned binaries
force: force installation if the spec is already present in the local store force (bool): force installation if the spec is already present in the
sha256: optional sha256 of the binary package, to be checked before installation local store
allow_missing: when true, allows installing a node with missing dependencies sha256 (str): optional sha256 of the binary package, to be checked
before installation
""" """
# Early termination # Early termination
if spec.external or spec.virtual: if spec.external or spec.virtual:
@@ -2640,7 +2613,7 @@ def install_root_node(
spec, spack.store.STORE.layout.spec_file_path(spec) spec, spack.store.STORE.layout.spec_file_path(spec)
) )
spack.hooks.post_install(spec, False) spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, allow_missing=allow_missing) spack.store.STORE.db.add(spec)
def install_single_spec(spec, unsigned=False, force=False): def install_single_spec(spec, unsigned=False, force=False):
@@ -2665,7 +2638,7 @@ def try_direct_fetch(spec, mirrors=None):
specfile_is_signed = False specfile_is_signed = False
found_specs = [] found_specs = []
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values() binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
for mirror in binary_mirrors: for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join( buildcache_fetch_url_json = url_util.join(
@@ -2726,7 +2699,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
if spec is None: if spec is None:
return [] return []
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True): if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
tty.debug("No Spack mirrors are currently configured") tty.debug("No Spack mirrors are currently configured")
return {} return {}
@@ -2765,7 +2738,7 @@ def clear_spec_cache():
def get_keys(install=False, trust=False, force=False, mirrors=None): def get_keys(install=False, trust=False, force=False, mirrors=None):
"""Get pgp public keys available on mirror with suffix .pub""" """Get pgp public keys available on mirror with suffix .pub"""
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True) mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
if not mirror_collection: if not mirror_collection:
tty.die("Please add a spack mirror to allow " + "download of build caches.") tty.die("Please add a spack mirror to allow " + "download of build caches.")
@@ -2820,7 +2793,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
def _url_push_keys( def _url_push_keys(
*mirrors: Union[spack.mirrors.mirror.Mirror, str], *mirrors: Union[spack.mirror.Mirror, str],
keys: List[str], keys: List[str],
tmpdir: str, tmpdir: str,
update_index: bool = False, update_index: bool = False,
@@ -2887,7 +2860,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
""" """
rebuilds = {} rebuilds = {}
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values(): for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url)) tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
rebuild_list = [] rebuild_list = []
@@ -2931,7 +2904,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
def download_buildcache_entry(file_descriptions, mirror_url=None): def download_buildcache_entry(file_descriptions, mirror_url=None):
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True): if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
tty.die( tty.die(
"Please provide or add a spack mirror to allow " + "download of buildcache entries." "Please provide or add a spack mirror to allow " + "download of buildcache entries."
) )
@@ -2940,7 +2913,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH) mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
return _download_buildcache_entry(mirror_root, file_descriptions) return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values(): for mirror in spack.mirror.MirrorCollection(binary=True).values():
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH) mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
if _download_buildcache_entry(mirror_root, file_descriptions): if _download_buildcache_entry(mirror_root, file_descriptions):

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package""" """Common basic functions used through the spack.bootstrap package"""
import fnmatch import fnmatch
import glob
import importlib import importlib
import os.path import os.path
import re import re
@@ -61,19 +60,10 @@ def _try_import_from_store(
python, *_ = candidate_spec.dependencies("python-venv") python, *_ = candidate_spec.dependencies("python-venv")
else: else:
python, *_ = candidate_spec.dependencies("python") python, *_ = candidate_spec.dependencies("python")
module_paths = [
# if python is installed, ask it for the layout os.path.join(candidate_spec.prefix, python.package.purelib),
if python.installed: os.path.join(candidate_spec.prefix, python.package.platlib),
module_paths = [ ]
os.path.join(candidate_spec.prefix, python.package.purelib),
os.path.join(candidate_spec.prefix, python.package.platlib),
]
# otherwise search for the site-packages directory
# (clingo from binaries with truncated python-venv runtime)
else:
module_paths = glob.glob(
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
)
path_before = list(sys.path) path_before = list(sys.path)
# NOTE: try module_paths first and last, last allows an existing version in path # NOTE: try module_paths first and last, last allows an existing version in path
@@ -227,13 +217,12 @@ def _root_spec(spec_str: str) -> str:
# Add a compiler and platform requirement to the root spec. # Add a compiler and platform requirement to the root spec.
platform = str(spack.platforms.host()) platform = str(spack.platforms.host())
# FIXME (compiler as nodes): recover the compiler for source bootstrapping if platform == "darwin":
# if platform == "darwin": spec_str += " %apple-clang"
# spec_str += " %apple-clang" elif platform == "windows":
if platform == "windows":
spec_str += " %msvc" spec_str += " %msvc"
# elif platform == "linux": elif platform == "linux":
# spec_str += " %gcc" spec_str += " %gcc"
elif platform == "freebsd": elif platform == "freebsd":
spec_str += " %clang" spec_str += " %clang"
spec_str += f" platform={platform}" spec_str += f" platform={platform}"

View File

@@ -16,9 +16,8 @@
import archspec.cpu import archspec.cpu
import spack.compilers.config import spack.compiler
import spack.compilers.libraries import spack.compilers
import spack.config
import spack.platforms import spack.platforms
import spack.spec import spack.spec
import spack.traverse import spack.traverse
@@ -40,7 +39,7 @@ def __init__(self, configuration):
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration) self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self): def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux": if str(self.host_platform) == "linux":
compiler_name = "gcc" compiler_name = "gcc"
elif str(self.host_platform) == "darwin": elif str(self.host_platform) == "darwin":
@@ -48,30 +47,17 @@ def _valid_compiler_or_raise(self):
elif str(self.host_platform) == "windows": elif str(self.host_platform) == "windows":
compiler_name = "msvc" compiler_name = "msvc"
elif str(self.host_platform) == "freebsd": elif str(self.host_platform) == "freebsd":
compiler_name = "llvm" compiler_name = "clang"
else: else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}") raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec(
candidates = [ compiler_name, arch_spec=self.host_architecture
x )
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
if x.name == compiler_name
]
if not candidates: if not candidates:
raise RuntimeError( raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources" f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
) )
candidates.sort(key=lambda x: x.version, reverse=True) candidates.sort(key=lambda x: x.spec.version, reverse=True)
best = candidates[0]
# Get compilers for bootstrapping from the 'builtin' repository
best.namespace = "builtin"
# If the compiler does not support C++ 14, fail with a legible error message
try:
_ = best.package.standard_flag(language="cxx", standard="14")
except RuntimeError as e:
raise RuntimeError(
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
) from e
return candidates[0] return candidates[0]
def _externals_from_yaml( def _externals_from_yaml(
@@ -90,6 +76,9 @@ def _externals_from_yaml(
if not s.satisfies(requirements[pkg_name]): if not s.satisfies(requirements[pkg_name]):
continue continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"): if not s.intersects(f"arch={self.host_architecture}"):
continue continue
@@ -122,10 +111,11 @@ def concretize(self) -> "spack.spec.Spec":
# Tweak it to conform to the host architecture # Tweak it to conform to the host architecture
for node in s.traverse(): for node in s.traverse():
node.architecture.os = str(self.host_os) node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture node.architecture = self.host_architecture
if node.name == "gcc-runtime": if node.name == "gcc-runtime":
node.versions = self.host_compiler.versions node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"): for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python": if edge.spec.name == "python":
@@ -137,9 +127,6 @@ def concretize(self) -> "spack.spec.Spec":
if edge.spec.name == "cmake" and self.external_cmake: if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake edge.spec = self.external_cmake
if edge.spec.name == self.host_compiler.name:
edge.spec = self.host_compiler
if "libc" in edge.virtuals: if "libc" in edge.virtuals:
edge.spec = self.host_libc edge.spec = self.host_libc
@@ -155,12 +142,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
return self._external_spec(result) return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec": def libc_external_spec(self) -> "spack.spec.Spec":
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler) result = self.host_compiler.default_libc
result = detector.default_libc()
return self._external_spec(result) return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec": def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin" initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags(): for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = [] initial_spec.compiler_flags[flag_type] = []

View File

@@ -11,7 +11,7 @@
from llnl.util import tty from llnl.util import tty
import spack.compilers.config import spack.compilers
import spack.config import spack.config
import spack.environment import spack.environment
import spack.modules import spack.modules
@@ -143,8 +143,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None: def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch() arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.config.compilers_for_arch(arch): if not spack.compilers.compilers_for_arch(arch):
spack.compilers.config.find_compilers() spack.compilers.find_compilers()
@contextlib.contextmanager @contextlib.contextmanager

View File

@@ -37,7 +37,6 @@
import spack.binary_distribution import spack.binary_distribution
import spack.config import spack.config
import spack.detection import spack.detection
import spack.mirrors.mirror
import spack.platforms import spack.platforms
import spack.spec import spack.spec
import spack.store import spack.store
@@ -45,6 +44,7 @@
import spack.util.executable import spack.util.executable
import spack.util.path import spack.util.path
import spack.util.spack_yaml import spack.util.spack_yaml
import spack.util.url
import spack.version import spack.version
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
@@ -91,7 +91,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"]) self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Promote (relative) paths to file urls # Promote (relative) paths to file urls
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url url = conf["info"]["url"]
if spack.util.url.is_path_instead_of_url(url):
if not os.path.isabs(url):
url = os.path.join(self.metadata_dir, url)
url = spack.util.url.path_to_file_url(url)
self.url = url
@property @property
def mirror_scope(self) -> spack.config.InternalConfigScope: def mirror_scope(self) -> spack.config.InternalConfigScope:
@@ -170,15 +175,7 @@ def _install_by_hash(
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True) query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query): for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
spack.binary_distribution.install_root_node( spack.binary_distribution.install_root_node(
# allow_missing is true since when bootstrapping clingo we truncate runtime match, unsigned=True, force=True, sha256=pkg_sha256
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
# further runtime deps are loaded by the Python interpreter. This just silences
# warnings about missing dependencies.
match,
unsigned=True,
force=True,
sha256=pkg_sha256,
allow_missing=True,
) )
def _install_and_test( def _install_and_test(
@@ -281,12 +278,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable # Install the spec that should make the module importable
with spack.config.override(self.mirror_scope): with spack.config.override(self.mirror_scope):
PackageInstaller( PackageInstaller([concrete_spec.package], fail_fast=True).install()
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info): if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info self.last_search = info
@@ -322,10 +314,11 @@ def create_bootstrapper(conf: ConfigDictionary):
return _bootstrap_methods[btype](conf) return _bootstrap_methods[btype](conf)
def source_is_enabled(conf: ConfigDictionary): def source_is_enabled_or_raise(conf: ConfigDictionary):
"""Raise ValueError if the source is not enabled for bootstrapping""" """Raise ValueError if the source is not enabled for bootstrapping"""
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"] trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
return trusted.get(name, False) if not trusted.get(name, False):
raise ValueError("source is not trusted")
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None): def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
@@ -355,10 +348,8 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
exception_handler = GroupedExceptionHandler() exception_handler = GroupedExceptionHandler()
for current_config in bootstrapping_sources(): for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception): with exception_handler.forward(current_config["name"], Exception):
source_is_enabled_or_raise(current_config)
current_bootstrapper = create_bootstrapper(current_config) current_bootstrapper = create_bootstrapper(current_config)
if current_bootstrapper.try_import(module, abstract_spec): if current_bootstrapper.try_import(module, abstract_spec):
return return
@@ -370,7 +361,11 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
msg = f'cannot bootstrap the "{module}" Python module ' msg = f'cannot bootstrap the "{module}" Python module '
if abstract_spec: if abstract_spec:
msg += f'from spec "{abstract_spec}" ' msg += f'from spec "{abstract_spec}" '
msg += exception_handler.grouped_message(with_tracebacks=tty.is_debug()) if tty.is_debug():
msg += exception_handler.grouped_message(with_tracebacks=True)
else:
msg += exception_handler.grouped_message(with_tracebacks=False)
msg += "\nRun `spack --debug ...` for more detailed errors"
raise ImportError(msg) raise ImportError(msg)
@@ -408,9 +403,8 @@ def ensure_executables_in_path_or_raise(
exception_handler = GroupedExceptionHandler() exception_handler = GroupedExceptionHandler()
for current_config in bootstrapping_sources(): for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception): with exception_handler.forward(current_config["name"], Exception):
source_is_enabled_or_raise(current_config)
current_bootstrapper = create_bootstrapper(current_config) current_bootstrapper = create_bootstrapper(current_config)
if current_bootstrapper.try_search_path(executables, abstract_spec): if current_bootstrapper.try_search_path(executables, abstract_spec):
# Additional environment variables needed # Additional environment variables needed
@@ -605,10 +599,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
current = copy.copy(entry) current = copy.copy(entry)
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"]) metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME) metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
try: with open(metadata_yaml, encoding="utf-8") as stream:
with open(metadata_yaml, encoding="utf-8") as stream: current.update(spack.util.spack_yaml.load(stream))
current.update(spack.util.spack_yaml.load(stream)) list_of_sources.append(current)
list_of_sources.append(current)
except OSError:
pass
return list_of_sources return list_of_sources

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -37,13 +37,13 @@
import multiprocessing import multiprocessing
import os import os
import re import re
import stat
import sys import sys
import traceback import traceback
import types import types
from collections import defaultdict from collections import defaultdict
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
from multiprocessing.connection import Connection
from typing import Callable, Dict, List, Optional, Set, Tuple from typing import Callable, Dict, List, Optional, Set, Tuple
import archspec.cpu import archspec.cpu
@@ -54,12 +54,14 @@
from llnl.util.lang import dedupe, stable_partition from llnl.util.lang import dedupe, stable_partition
from llnl.util.symlink import symlink from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd
import spack.build_systems._checks
import spack.build_systems.cmake import spack.build_systems.cmake
import spack.build_systems.meson import spack.build_systems.meson
import spack.build_systems.python import spack.build_systems.python
import spack.builder import spack.builder
import spack.compilers.libraries import spack.compilers
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
@@ -73,6 +75,7 @@
import spack.store import spack.store
import spack.subprocess_context import spack.subprocess_context
import spack.util.executable import spack.util.executable
import spack.util.libc
from spack import traverse from spack import traverse
from spack.context import Context from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError from spack.error import InstallError, NoHeadersError, NoLibrariesError
@@ -80,7 +83,6 @@
from spack.util.environment import ( from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY, SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications, EnvironmentModifications,
PrependPath,
env_flag, env_flag,
filter_system_paths, filter_system_paths,
get_path, get_path,
@@ -296,10 +298,62 @@ def _add_werror_handling(keep_werror, env):
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags])) env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
def set_wrapper_environment_variables_for_flags(pkg, env): def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete assert pkg.spec.concrete
compiler = pkg.compiler
spec = pkg.spec spec = pkg.spec
# Make sure the executables for this compiler exist
compiler.verify_executables()
# Set compiler variables used by CMake and autotools
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
# Populate an object with the list of environment modifications
# and return it
# TODO : add additional kwargs for better diagnostics, like requestor,
# ttyout, ttyerr, etc.
link_dir = spack.paths.build_env_path
# Set SPACK compiler variables so that our wrapper knows what to
# call. If there is no compiler configured then use a default
# wrapper which will emit an error if it is used.
if compiler.cc:
env.set("SPACK_CC", compiler.cc)
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
else:
env.set("CC", os.path.join(link_dir, "cc"))
if compiler.cxx:
env.set("SPACK_CXX", compiler.cxx)
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
else:
env.set("CC", os.path.join(link_dir, "c++"))
if compiler.f77:
env.set("SPACK_F77", compiler.f77)
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
else:
env.set("F77", os.path.join(link_dir, "f77"))
if compiler.fc:
env.set("SPACK_FC", compiler.fc)
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
else:
env.set("FC", os.path.join(link_dir, "fc"))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
# Check whether we want to force RPATH or RUNPATH
if spack.config.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
else:
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
if pkg.keep_werror is not None: if pkg.keep_werror is not None:
keep_werror = pkg.keep_werror keep_werror = pkg.keep_werror
else: else:
@@ -307,6 +361,10 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
_add_werror_handling(keep_werror, env) _add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add
isa_arg = optimization_flags(compiler, spec.target)
env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate. # Trap spack-tracked compiler flags as appropriate.
# env_flags are easy to accidentally override. # env_flags are easy to accidentally override.
inject_flags = {} inject_flags = {}
@@ -340,27 +398,74 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
env.set(flag.upper(), " ".join(f for f in env_flags[flag])) env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
pkg.flags_to_build_system_args(build_system_flags) pkg.flags_to_build_system_args(build_system_flags)
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY) env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
# FIXME (compiler as nodes): recover this one in the correct packages compiler.setup_custom_environment(pkg, env)
# compiler.setup_custom_environment(pkg, env)
return env return env
def optimization_flags(compiler, target): def optimization_flags(compiler, target):
if spack.compilers.is_mixed_toolchain(compiler):
msg = (
"microarchitecture specific optimizations are not "
"supported yet on mixed compiler toolchains [check"
f" {compiler.name}@{compiler.version} for further details]"
)
tty.debug(msg)
return ""
# Try to check if the current compiler comes with a version number or # Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a # has an unexpected suffix. If so, treat it as a compiler with a
# custom spec. # custom spec.
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string) compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
try:
compiler_version = compiler.real_version
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
try: try:
result = target.optimization_flags(compiler.name, version_number) result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture): except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
result = "" result = ""
return result return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env): def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix """Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH. `SPACK_`) and also add the compiler wrappers to PATH.
@@ -369,8 +474,39 @@ def set_wrapper_variables(pkg, env):
this function computes these options in a manner that is intended to match the DAG traversal this function computes these options in a manner that is intended to match the DAG traversal
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
is using topo order.""" is using topo order."""
# Set compiler flags injected from the spec # Set environment variables if specified for
set_wrapper_environment_variables_for_flags(pkg, env) # the given compiler
compiler = pkg.compiler
env.extend(spack.schema.environment.parse(compiler.environment))
if compiler.extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
for item in env_paths:
env.prepend_path("PATH", item)
env.set_path(SPACK_ENV_PATH, env_paths)
# Working directory for the spack command itself, for debug logs. # Working directory for the spack command itself, for debug logs.
if spack.config.get("config:debug"): if spack.config.get("config:debug"):
@@ -436,15 +572,22 @@ def set_wrapper_variables(pkg, env):
lib_path = os.path.join(pkg.prefix, libdir) lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path) rpath_dirs.insert(0, lib_path)
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
pkg.compiler.default_dynamic_linker
)
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path # TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
# branch above). link_dirs should be filtered with entries from _parse_link_paths. # branch above). link_dirs should be filtered with entries from _parse_link_paths.
link_dirs = list(dedupe(filter_system_paths(link_dirs))) link_dirs = list(dedupe(filter_system_paths(link_dirs)))
include_dirs = list(dedupe(filter_system_paths(include_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec) # TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
if default_dynamic_linker_filter: # just this filter.
rpath_dirs = default_dynamic_linker_filter(rpath_dirs) implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
if implicit_rpaths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Spack managed directories include the stage, store and upstream stores. We extend this with # Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS). # their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
@@ -500,19 +643,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
# Put spack compiler paths in module scope. (Some packages use it # Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build) # in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path link_dir = spack.paths.build_env_path
pkg_compiler = None
try:
pkg_compiler = pkg.compiler
except spack.compilers.NoCompilerForSpecError as e:
tty.debug(f"cannot set 'spack_cc': {str(e)}")
# FIXME (compiler as nodes): make this more general, and not tied to three languages if pkg_compiler is not None:
# Maybe add a callback? module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
global_names = { module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
"c": ("spack_cc",), module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
"cxx": ("spack_cxx",), module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
"fortran": ("spack_fc", "spack_f77"), else:
} module.spack_cc = None
for language in ("c", "cxx", "fortran"): module.spack_cxx = None
spec = pkg.spec.dependencies(virtuals=[language]) module.spack_f77 = None
value = None if not spec else os.path.join(link_dir, spec[0].package.link_paths[language]) module.spack_fc = None
for name in global_names[language]:
setattr(module, name, value)
# Useful directories within the prefix are encapsulated in # Useful directories within the prefix are encapsulated in
# a Prefix object. # a Prefix object.
@@ -679,6 +825,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
context == Context.TEST and pkg.test_requires_compiler context == Context.TEST and pkg.test_requires_compiler
) )
if need_compiler: if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods) set_wrapper_variables(pkg, env_mods)
# Platform specific setup goes before package specific setup. This is for setting # Platform specific setup goes before package specific setup. This is for setting
@@ -690,11 +837,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
env_mods.extend(setup_context.get_env_modifications()) env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies") tty.debug("setup_package: collected all modifications from dependencies")
tty.debug("setup_package: adding compiler wrappers paths")
for x in env_mods.group_by_name()["SPACK_ENV_PATH"]:
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
env_mods.prepend_path("PATH", x.value)
if context == Context.TEST: if context == Context.TEST:
env_mods.prepend_path("PATH", ".") env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"): elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
@@ -708,6 +850,11 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
# Load modules on an already clean environment, just before applying Spack's # Load modules on an already clean environment, just before applying Spack's
# own environment modifications. This ensures Spack controls CC/CXX/... variables. # own environment modifications. This ensures Spack controls CC/CXX/... variables.
if need_compiler:
tty.debug("setup_package: loading compiler modules")
for mod in pkg.compiler.modules:
load_module(mod)
load_external_modules(pkg) load_external_modules(pkg)
# Make sure nothing's strange about the Spack environment. # Make sure nothing's strange about the Spack environment.
@@ -736,9 +883,6 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
elif context == Context.RUN: elif context == Context.RUN:
self.root_depflag = dt.RUN | dt.LINK self.root_depflag = dt.RUN | dt.LINK
def accept(self, item):
return True
def neighbors(self, item): def neighbors(self, item):
spec = item.edge.spec spec = item.edge.spec
if spec.dag_hash() in self.root_hashes: if spec.dag_hash() in self.root_hashes:
@@ -776,19 +920,19 @@ def effective_deptypes(
a flag specifying in what way they do so. The list is ordered topologically a flag specifying in what way they do so. The list is ordered topologically
from root to leaf, meaning that environment modifications should be applied from root to leaf, meaning that environment modifications should be applied
in reverse so that dependents override dependencies, not the other way around.""" in reverse so that dependents override dependencies, not the other way around."""
topo_sorted_edges = traverse.traverse_topo_edges_generator( visitor = traverse.TopoVisitor(
traverse.with_artificial_edges(specs), EnvironmentVisitor(*specs, context=context),
visitor=EnvironmentVisitor(*specs, context=context), key=lambda x: x.dag_hash(),
key=traverse.by_dag_hash,
root=True, root=True,
all_edges=True, all_edges=True,
) )
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag. # Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
use_modes = defaultdict(lambda: UseMode(0)) use_modes = defaultdict(lambda: UseMode(0))
nodes_with_type = [] nodes_with_type = []
for edge in topo_sorted_edges: for edge in visitor.edges:
parent, child, depflag = edge.parent, edge.spec, edge.depflag parent, child, depflag = edge.parent, edge.spec, edge.depflag
# Mark the starting point # Mark the starting point
@@ -917,8 +1061,8 @@ def set_all_package_py_globals(self):
pkg.setup_dependent_package(dependent_module, spec) pkg.setup_dependent_package(dependent_module, spec)
dependent_module.propagate_changes_to_mro() dependent_module.propagate_changes_to_mro()
pkg = self.specs[0].package
if self.context == Context.BUILD: if self.context == Context.BUILD:
pkg = self.specs[0].package
module = ModuleChangePropagator(pkg) module = ModuleChangePropagator(pkg)
# std_cmake_args is not sufficiently static to be defined # std_cmake_args is not sufficiently static to be defined
# in set_package_py_globals and is deprecated so its handled # in set_package_py_globals and is deprecated so its handled
@@ -999,10 +1143,10 @@ def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext", serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable, function: Callable,
kwargs: Dict, kwargs: Dict,
write_pipe: Connection, write_pipe: multiprocessing.connection.Connection,
input_pipe: Optional[Connection], input_multiprocess_fd: Optional[MultiProcessFd],
jsfd1: Optional[Connection], jsfd1: Optional[MultiProcessFd],
jsfd2: Optional[Connection], jsfd2: Optional[MultiProcessFd],
): ):
"""Main entry point in the child process for Spack builds. """Main entry point in the child process for Spack builds.
@@ -1044,12 +1188,13 @@ def _setup_pkg_and_run(
context: str = kwargs.get("context", "build") context: str = kwargs.get("context", "build")
try: try:
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our # We are in the child process. Python sets sys.stdin to
# process and its parent from simultaneously reading from the original stdin. But, we # open(os.devnull) to prevent our process and its parent from
# assume that the parent process is not going to read from it till we are done with the # simultaneously reading from the original stdin. But, we assume
# child, so we undo Python's precaution. closefd=False since Connection has ownership. # that the parent process is not going to read from it till we
if input_pipe is not None: # are done with the child, so we undo Python's precaution.
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False) if input_multiprocess_fd is not None:
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
pkg = serialized_pkg.restore() pkg = serialized_pkg.restore()
@@ -1118,8 +1263,8 @@ def _setup_pkg_and_run(
finally: finally:
write_pipe.close() write_pipe.close()
if input_pipe is not None: if input_multiprocess_fd is not None:
input_pipe.close() input_multiprocess_fd.close()
def start_build_process(pkg, function, kwargs): def start_build_process(pkg, function, kwargs):
@@ -1146,9 +1291,23 @@ def child_fun():
If something goes wrong, the child process catches the error and If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError. expected to handle (or re-raise) the ChildError.
This uses `multiprocessing.Process` to create the child process. The
mechanism used to create the process differs on different operating
systems and for different versions of Python. In some cases "fork"
is used (i.e. the "fork" system call) and some cases it starts an
entirely new Python interpreter process (in the docs this is referred
to as the "spawn" start method). Breaking it down by OS:
- Linux always uses fork.
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
- Windows always uses the "spawn" start method.
For more information on `multiprocessing` child process creation
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
""" """
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False) read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
input_fd = None input_multiprocess_fd = None
jobserver_fd1 = None jobserver_fd1 = None
jobserver_fd2 = None jobserver_fd2 = None
@@ -1157,13 +1316,14 @@ def child_fun():
try: try:
# Forward sys.stdin when appropriate, to allow toggling verbosity # Forward sys.stdin when appropriate, to allow toggling verbosity
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"): if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
input_fd = Connection(os.dup(sys.stdin.fileno())) input_fd = os.dup(sys.stdin.fileno())
input_multiprocess_fd = MultiProcessFd(input_fd)
mflags = os.environ.get("MAKEFLAGS", False) mflags = os.environ.get("MAKEFLAGS", False)
if mflags: if mflags:
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags) m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
if m: if m:
jobserver_fd1 = Connection(int(m.group(1))) jobserver_fd1 = MultiProcessFd(int(m.group(1)))
jobserver_fd2 = Connection(int(m.group(2))) jobserver_fd2 = MultiProcessFd(int(m.group(2)))
p = multiprocessing.Process( p = multiprocessing.Process(
target=_setup_pkg_and_run, target=_setup_pkg_and_run,
@@ -1172,7 +1332,7 @@ def child_fun():
function, function,
kwargs, kwargs,
write_pipe, write_pipe,
input_fd, input_multiprocess_fd,
jobserver_fd1, jobserver_fd1,
jobserver_fd2, jobserver_fd2,
), ),
@@ -1192,8 +1352,8 @@ def child_fun():
finally: finally:
# Close the input stream in the parent process # Close the input stream in the parent process
if input_fd is not None: if input_multiprocess_fd is not None:
input_fd.close() input_multiprocess_fd.close()
def exitcode_msg(p): def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal" typ = "exit" if p.exitcode >= 0 else "signal"
@@ -1231,7 +1391,7 @@ def exitcode_msg(p):
return child_result return child_result
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder) CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
def get_package_context(traceback, context=3): def get_package_context(traceback, context=3):
@@ -1280,20 +1440,27 @@ def make_stack(tb, stack=None):
# We found obj, the Package implementation we care about. # We found obj, the Package implementation we care about.
# Point out the location in the install method where we failed. # Point out the location in the install method where we failed.
filename = inspect.getfile(frame.f_code) filename = inspect.getfile(frame.f_code)
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"] lineno = frame.f_lineno
if os.path.basename(filename) == "package.py":
# subtract 1 because we inject a magic import at the top of package files.
# TODO: get rid of the magic import.
lineno -= 1
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
# Build a message showing context in the install method. # Build a message showing context in the install method.
sourcelines, start = inspect.getsourcelines(frame) sourcelines, start = inspect.getsourcelines(frame)
# Calculate lineno of the error relative to the start of the function. # Calculate lineno of the error relative to the start of the function.
fun_lineno = frame.f_lineno - start fun_lineno = lineno - start
start_ctx = max(0, fun_lineno - context) start_ctx = max(0, fun_lineno - context)
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1] sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
for i, line in enumerate(sourcelines): for i, line in enumerate(sourcelines):
is_error = start_ctx + i == fun_lineno is_error = start_ctx + i == fun_lineno
mark = ">> " if is_error else " "
# Add start to get lineno relative to start of file, not function. # Add start to get lineno relative to start of file, not function.
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}" marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
if is_error: if is_error:
marked = colorize("@R{%s}" % cescape(marked)) marked = colorize("@R{%s}" % cescape(marked))
lines.append(marked) lines.append(marked)

View File

@@ -9,7 +9,6 @@
import spack.builder import spack.builder
import spack.error import spack.error
import spack.phase_callbacks
import spack.relocate import spack.relocate
import spack.spec import spack.spec
import spack.store import spack.store
@@ -64,7 +63,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
def ensure_build_dependencies_or_raise( def ensure_build_dependencies_or_raise(
spec: spack.spec.Spec, dependencies: List[str], error_msg: str spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
): ):
"""Ensure that some build dependencies are present in the concrete spec. """Ensure that some build dependencies are present in the concrete spec.
@@ -72,7 +71,7 @@ def ensure_build_dependencies_or_raise(
Args: Args:
spec: concrete spec to be checked. spec: concrete spec to be checked.
dependencies: list of package names of required build dependencies dependencies: list of abstract specs to be satisfied
error_msg: brief error message to be prepended to a longer description error_msg: brief error message to be prepended to a longer description
Raises: Raises:
@@ -128,8 +127,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks) builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
class BuilderWithDefaults(spack.builder.Builder): class BaseBuilder(spack.builder.Builder):
"""Base class for all specific builders with common callbacks registered.""" """Base class for builders to register common checks"""
# Check that self.prefix is there after installation # Check that self.prefix is there after installation
spack.phase_callbacks.run_after("install")(sanity_check_prefix) spack.builder.run_after("install")(sanity_check_prefix)

View File

@@ -6,19 +6,15 @@
import os.path import os.path
import stat import stat
import subprocess import subprocess
from typing import Callable, List, Optional, Set, Tuple, Union from typing import List
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.build_environment import spack.build_environment
import spack.builder import spack.builder
import spack.compilers.libraries
import spack.error import spack.error
import spack.package_base import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when from spack.multimethod import when
from spack.operating_systems.mac_os import macos_version from spack.operating_systems.mac_os import macos_version
@@ -26,7 +22,7 @@
from spack.version import Version from spack.version import Version
from ._checks import ( from ._checks import (
BuilderWithDefaults, BaseBuilder,
apply_macos_rpath_fixups, apply_macos_rpath_fixups,
ensure_build_dependencies_or_raise, ensure_build_dependencies_or_raise,
execute_build_time_tests, execute_build_time_tests,
@@ -73,14 +69,14 @@ def flags_to_build_system_args(self, flags):
# Legacy methods (used by too many packages to change them, # Legacy methods (used by too many packages to change them,
# need to forward to the builder) # need to forward to the builder)
def enable_or_disable(self, *args, **kwargs): def enable_or_disable(self, *args, **kwargs):
return spack.builder.create(self).enable_or_disable(*args, **kwargs) return self.builder.enable_or_disable(*args, **kwargs)
def with_or_without(self, *args, **kwargs): def with_or_without(self, *args, **kwargs):
return spack.builder.create(self).with_or_without(*args, **kwargs) return self.builder.with_or_without(*args, **kwargs)
@spack.builder.builder("autotools") @spack.builder.builder("autotools")
class AutotoolsBuilder(BuilderWithDefaults): class AutotoolsBuilder(BaseBuilder):
"""The autotools builder encodes the default way of installing software built """The autotools builder encodes the default way of installing software built
with autotools. It has four phases that can be overridden, if need be: with autotools. It has four phases that can be overridden, if need be:
@@ -161,7 +157,7 @@ class AutotoolsBuilder(BuilderWithDefaults):
install_libtool_archives = False install_libtool_archives = False
@property @property
def patch_config_files(self) -> bool: def patch_config_files(self):
"""Whether to update old ``config.guess`` and ``config.sub`` files """Whether to update old ``config.guess`` and ``config.sub`` files
distributed with the tarball. distributed with the tarball.
@@ -181,7 +177,7 @@ def patch_config_files(self) -> bool:
) )
@property @property
def _removed_la_files_log(self) -> str: def _removed_la_files_log(self):
"""File containing the list of removed libtool archives""" """File containing the list of removed libtool archives"""
build_dir = self.build_directory build_dir = self.build_directory
if not os.path.isabs(self.build_directory): if not os.path.isabs(self.build_directory):
@@ -189,15 +185,15 @@ def _removed_la_files_log(self) -> str:
return os.path.join(build_dir, "removed_la_files.txt") return os.path.join(build_dir, "removed_la_files.txt")
@property @property
def archive_files(self) -> List[str]: def archive_files(self):
"""Files to archive for packages based on autotools""" """Files to archive for packages based on autotools"""
files = [os.path.join(self.build_directory, "config.log")] files = [os.path.join(self.build_directory, "config.log")]
if not self.install_libtool_archives: if not self.install_libtool_archives:
files.append(self._removed_la_files_log) files.append(self._removed_la_files_log)
return files return files
@spack.phase_callbacks.run_after("autoreconf") @spack.builder.run_after("autoreconf")
def _do_patch_config_files(self) -> None: def _do_patch_config_files(self):
"""Some packages ship with older config.guess/config.sub files and need to """Some packages ship with older config.guess/config.sub files and need to
have these updated when installed on a newer architecture. have these updated when installed on a newer architecture.
@@ -298,7 +294,7 @@ def runs_ok(script_abs_path):
and set the prefix to the directory containing the `config.guess` and and set the prefix to the directory containing the `config.guess` and
`config.sub` files. `config.sub` files.
""" """
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name)) raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
# Copy the good files over the bad ones # Copy the good files over the bad ones
for abs_path in to_be_patched: for abs_path in to_be_patched:
@@ -308,8 +304,8 @@ def runs_ok(script_abs_path):
fs.copy(substitutes[name], abs_path) fs.copy(substitutes[name], abs_path)
os.chmod(abs_path, mode) os.chmod(abs_path, mode)
@spack.phase_callbacks.run_before("configure") @spack.builder.run_before("configure")
def _patch_usr_bin_file(self) -> None: def _patch_usr_bin_file(self):
"""On NixOS file is not available in /usr/bin/file. Patch configure """On NixOS file is not available in /usr/bin/file. Patch configure
scripts to use file from path.""" scripts to use file from path."""
@@ -320,8 +316,8 @@ def _patch_usr_bin_file(self) -> None:
with fs.keep_modification_time(*x.filenames): with fs.keep_modification_time(*x.filenames):
x.filter(regex="/usr/bin/file", repl="file", string=True) x.filter(regex="/usr/bin/file", repl="file", string=True)
@spack.phase_callbacks.run_before("configure") @spack.builder.run_before("configure")
def _set_autotools_environment_variables(self) -> None: def _set_autotools_environment_variables(self):
"""Many autotools builds use a version of mknod.m4 that fails when """Many autotools builds use a version of mknod.m4 that fails when
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1. running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
@@ -334,8 +330,8 @@ def _set_autotools_environment_variables(self) -> None:
""" """
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1" os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
@spack.phase_callbacks.run_before("configure") @spack.builder.run_before("configure")
def _do_patch_libtool_configure(self) -> None: def _do_patch_libtool_configure(self):
"""Patch bugs that propagate from libtool macros into "configure" and """Patch bugs that propagate from libtool macros into "configure" and
further into "libtool". Note that patches that can be fixed by patching further into "libtool". Note that patches that can be fixed by patching
"libtool" directly should be implemented in the _do_patch_libtool method "libtool" directly should be implemented in the _do_patch_libtool method
@@ -362,8 +358,8 @@ def _do_patch_libtool_configure(self) -> None:
# Support Libtool 2.4.2 and older: # Support Libtool 2.4.2 and older:
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2') x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
@spack.phase_callbacks.run_after("configure") @spack.builder.run_after("configure")
def _do_patch_libtool(self) -> None: def _do_patch_libtool(self):
"""If configure generates a "libtool" script that does not correctly """If configure generates a "libtool" script that does not correctly
detect the compiler (and patch_libtool is set), patch in the correct detect the compiler (and patch_libtool is set), patch in the correct
values for libtool variables. values for libtool variables.
@@ -397,44 +393,33 @@ def _do_patch_libtool(self) -> None:
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper()) markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
# Replace empty linker flag prefixes: # Replace empty linker flag prefixes:
if self.spec.satisfies("%nag"): if self.pkg.compiler.name == "nag":
# Nag is mixed with gcc and g++, which are recognized correctly. # Nag is mixed with gcc and g++, which are recognized correctly.
# Therefore, we change only Fortran values: # Therefore, we change only Fortran values:
nag_pkg = self.spec["fortran"].package
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
x.filter( x.filter(
regex='^wl=""$', regex='^wl=""$',
repl=f'wl="{nag_pkg.linker_arg}"', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
start_at=f"# ### BEGIN {marker}", start_at="# ### BEGIN {0}".format(marker),
stop_at=f"# ### END {marker}", stop_at="# ### END {0}".format(marker),
) )
else: else:
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec) x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
if compiler_spec:
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
# Replace empty PIC flag values: # Replace empty PIC flag values:
for compiler, marker in markers.items(): for cc, marker in markers.items():
if compiler == "cc":
language = "c"
elif compiler == "cxx":
language = "cxx"
else:
language = "fortran"
if language not in self.spec:
continue
x.filter( x.filter(
regex='^pic_flag=""$', regex='^pic_flag=""$',
repl=f'pic_flag="{self.spec[language].package.pic_flag}"', repl='pic_flag="{0}"'.format(
start_at=f"# ### BEGIN {marker}", getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
stop_at=f"# ### END {marker}", ),
start_at="# ### BEGIN {0}".format(marker),
stop_at="# ### END {0}".format(marker),
) )
# Other compiler-specific patches: # Other compiler-specific patches:
if self.spec.satisfies("%fj"): if self.pkg.compiler.name == "fj":
x.filter(regex="-nostdlib", repl="", string=True) x.filter(regex="-nostdlib", repl="", string=True)
rehead = r"/\S*/" rehead = r"/\S*/"
for o in [ for o in [
@@ -447,7 +432,7 @@ def _do_patch_libtool(self) -> None:
r"crtendS\.o", r"crtendS\.o",
]: ]:
x.filter(regex=(rehead + o), repl="") x.filter(regex=(rehead + o), repl="")
elif self.spec.satisfies("%nag"): elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]: for tag in ["fc", "f77"]:
marker = markers[tag] marker = markers[tag]
start_at = "# ### BEGIN {0}".format(marker) start_at = "# ### BEGIN {0}".format(marker)
@@ -522,64 +507,27 @@ def _do_patch_libtool(self) -> None:
) )
@property @property
def configure_directory(self) -> str: def configure_directory(self):
"""Return the directory where 'configure' resides.""" """Return the directory where 'configure' resides."""
return self.pkg.stage.source_path return self.pkg.stage.source_path
@property @property
def configure_abs_path(self) -> str: def configure_abs_path(self):
# Absolute path to configure # Absolute path to configure
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure") configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
return configure_abs_path return configure_abs_path
@property @property
def build_directory(self) -> str: def build_directory(self):
"""Override to provide another place to build the package""" """Override to provide another place to build the package"""
return self.configure_directory return self.configure_directory
@spack.phase_callbacks.run_before("autoreconf") @spack.builder.run_before("autoreconf")
def delete_configure_to_force_update(self) -> None: def delete_configure_to_force_update(self):
if self.force_autoreconf: if self.force_autoreconf:
fs.force_remove(self.configure_abs_path) fs.force_remove(self.configure_abs_path)
@property def autoreconf(self, pkg, spec, prefix):
def autoreconf_search_path_args(self) -> List[str]:
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
of build deps, skips the default path of automake, move external include
flags to the back, since they might pull in unrelated m4 files shadowing
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
@spack.phase_callbacks.run_after("autoreconf")
def set_configure_or_die(self) -> None:
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self) -> List[str]:
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
def autoreconf(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Not needed usually, configure should be already there""" """Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done # If configure exists nothing needs to be done
@@ -606,12 +554,39 @@ def autoreconf(
autoreconf_args += self.autoreconf_extra_args autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args) self.pkg.module.autoreconf(*autoreconf_args)
def configure( @property
self, def autoreconf_search_path_args(self):
pkg: spack.package_base.PackageBase, """Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
spec: spack.spec.Spec, of build deps, skips the default path of automake, move external include
prefix: spack.util.prefix.Prefix, flags to the back, since they might pull in unrelated m4 files shadowing
) -> None: spack dependencies."""
return _autoreconf_search_path_args(self.spec)
@spack.builder.run_after("autoreconf")
def set_configure_or_die(self):
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self):
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
def configure(self, pkg, spec, prefix):
"""Run "configure", with the arguments specified by the builder and an """Run "configure", with the arguments specified by the builder and an
appropriately set prefix. appropriately set prefix.
""" """
@@ -622,12 +597,7 @@ def configure(
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options) pkg.module.configure(*options)
def build( def build(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
# See https://autotools.io/automake/silent.html # See https://autotools.io/automake/silent.html
params = ["V=1"] params = ["V=1"]
@@ -635,49 +605,41 @@ def build(
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*params) pkg.module.make(*params)
def install( def install(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets) pkg.module.make(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None: def check(self):
"""Run "make" on the ``test`` and ``check`` targets, if found.""" """Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test") self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check") self.pkg._if_make_target_execute("check")
def _activate_or_not( def _activate_or_not(
self, self, name, activation_word, deactivation_word, activation_value=None, variant=None
name: str, ):
activation_word: str,
deactivation_word: str,
activation_value: Optional[Union[Callable, str]] = None,
variant=None,
) -> List[str]:
"""This function contain the current implementation details of """This function contain the current implementation details of
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`. :meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
Args: Args:
name: name of the option that is being activated or not name (str): name of the option that is being activated or not
activation_word: the default activation word ('with' in the case of activation_word (str): the default activation word ('with' in the
``with_or_without``) case of ``with_or_without``)
deactivation_word: the default deactivation word ('without' in the case of deactivation_word (str): the default deactivation word ('without'
``with_or_without``) in the case of ``with_or_without``)
activation_value: callable that accepts a single value. This value is either one of the activation_value (typing.Callable): callable that accepts a single
allowed values for a multi-valued variant or the name of a bool-valued variant. value. This value is either one of the allowed values for a
multi-valued variant or the name of a bool-valued variant.
Returns the parameter to be used when the value is activated. Returns the parameter to be used when the value is activated.
The special value "prefix" can also be assigned and will return The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter. ``spec[name].prefix`` as activation parameter.
variant: name of the variant that is being processed (if different from option name) variant (str): name of the variant that is being processed
(if different from option name)
Examples: Examples:
@@ -685,19 +647,19 @@ def _activate_or_not(
.. code-block:: python .. code-block:: python
variant("foo", values=("x", "y"), description=") variant('foo', values=('x', 'y'), description='')
variant("bar", default=True, description=") variant('bar', default=True, description='')
variant("ba_z", default=True, description=") variant('ba_z', default=True, description='')
calling this function like: calling this function like:
.. code-block:: python .. code-block:: python
_activate_or_not( _activate_or_not(
"foo", "with", "without", activation_value="prefix" 'foo', 'with', 'without', activation_value='prefix'
) )
_activate_or_not("bar", "with", "without") _activate_or_not('bar', 'with', 'without')
_activate_or_not("ba-z", "with", "without", variant="ba_z") _activate_or_not('ba-z', 'with', 'without', variant='ba_z')
will generate the following configuration options: will generate the following configuration options:
@@ -717,8 +679,8 @@ def _activate_or_not(
Raises: Raises:
KeyError: if name is not among known variants KeyError: if name is not among known variants
""" """
spec: spack.spec.Spec = self.pkg.spec spec = self.pkg.spec
args: List[str] = [] args = []
if activation_value == "prefix": if activation_value == "prefix":
activation_value = lambda x: spec[x].prefix activation_value = lambda x: spec[x].prefix
@@ -736,7 +698,7 @@ def _activate_or_not(
# Create a list of pairs. Each pair includes a configuration # Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated # option and whether or not that option is activated
vdef = self.pkg.get_variant(variant) vdef = self.pkg.get_variant(variant)
if set(vdef.values) == set((True, False)): # type: ignore if set(vdef.values) == set((True, False)):
# BoolValuedVariant carry information about a single option. # BoolValuedVariant carry information about a single option.
# Nonetheless, for uniformity of treatment we'll package them # Nonetheless, for uniformity of treatment we'll package them
# in an iterable of one element. # in an iterable of one element.
@@ -747,12 +709,14 @@ def _activate_or_not(
# package's build system. It excludes values which have special # package's build system. It excludes values which have special
# meanings and do not correspond to features (e.g. "none") # meanings and do not correspond to features (e.g. "none")
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore options = [(value, f"{variant}={value}" in spec) for value in feature_values]
# For each allowed value in the list of values # For each allowed value in the list of values
for option_value, activated in options: for option_value, activated in options:
# Search for an override in the package for this value # Search for an override in the package for this value
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}" override_name = "{0}_or_{1}_{2}".format(
activation_word, deactivation_word, option_value
)
line_generator = getattr(self, override_name, None) or getattr( line_generator = getattr(self, override_name, None) or getattr(
self.pkg, override_name, None self.pkg, override_name, None
) )
@@ -761,24 +725,19 @@ def _activate_or_not(
def _default_generator(is_activated): def _default_generator(is_activated):
if is_activated: if is_activated:
line = f"--{activation_word}-{option_value}" line = "--{0}-{1}".format(activation_word, option_value)
if activation_value is not None and activation_value( if activation_value is not None and activation_value(
option_value option_value
): # NOQA=ignore=E501 ): # NOQA=ignore=E501
line = f"{line}={activation_value(option_value)}" line += "={0}".format(activation_value(option_value))
return line return line
return f"--{deactivation_word}-{option_value}" return "--{0}-{1}".format(deactivation_word, option_value)
line_generator = _default_generator line_generator = _default_generator
args.append(line_generator(activated)) args.append(line_generator(activated))
return args return args
def with_or_without( def with_or_without(self, name, activation_value=None, variant=None):
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Inspects a variant and returns the arguments that activate """Inspects a variant and returns the arguments that activate
or deactivate the selected feature(s) for the configure options. or deactivate the selected feature(s) for the configure options.
@@ -793,11 +752,12 @@ def with_or_without(
``variant=value`` is in the spec. ``variant=value`` is in the spec.
Args: Args:
name: name of a valid multi-valued variant name (str): name of a valid multi-valued variant
activation_value: callable that accepts a single value and returns the parameter to be activation_value (typing.Callable): callable that accepts a single
used leading to an entry of the type ``--with-{name}={parameter}``. value and returns the parameter to be used leading to an entry
of the type ``--with-{name}={parameter}``.
The special value "prefix" can also be assigned and will return The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter. ``spec[name].prefix`` as activation parameter.
Returns: Returns:
@@ -805,22 +765,18 @@ def with_or_without(
""" """
return self._activate_or_not(name, "with", "without", activation_value, variant) return self._activate_or_not(name, "with", "without", activation_value, variant)
def enable_or_disable( def enable_or_disable(self, name, activation_value=None, variant=None):
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
"""Same as """Same as
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` :meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``. but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args: Args:
name: name of a valid multi-valued variant name (str): name of a valid multi-valued variant
activation_value: if present accepts a single value and returns the parameter to be activation_value (typing.Callable): if present accepts a single value
used leading to an entry of the type ``--enable-{name}={parameter}`` and returns the parameter to be used leading to an entry of the
type ``--enable-{name}={parameter}``
The special value "prefix" can also be assigned and will return The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter. ``spec[name].prefix`` as activation parameter.
Returns: Returns:
@@ -828,15 +784,15 @@ def enable_or_disable(
""" """
return self._activate_or_not(name, "enable", "disable", activation_value, variant) return self._activate_or_not(name, "enable", "disable", activation_value, variant)
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self) -> None: def installcheck(self):
"""Run "make" on the ``installcheck`` target, if found.""" """Run "make" on the ``installcheck`` target, if found."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("installcheck") self.pkg._if_make_target_execute("installcheck")
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def remove_libtool_archives(self) -> None: def remove_libtool_archives(self):
"""Remove all .la files in prefix sub-folders if the package sets """Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False. ``install_libtool_archives`` to be False.
""" """
@@ -858,13 +814,12 @@ def setup_build_environment(self, env):
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16") env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]: def _autoreconf_search_path_args(spec):
dirs_seen: Set[Tuple[int, int]] = set() dirs_seen = set()
flags_spack: List[str] = [] flags_spack, flags_external = [], []
flags_external: List[str] = []
# We don't want to add an include flag for automake's default search path. # We don't want to add an include flag for automake's default search path.
for automake in spec.dependencies(name="automake", deptype="build"): for automake in spec.dependencies(name="automake", deptype="build"):

View File

@@ -10,8 +10,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.phase_callbacks import spack.builder
from spack.directives import depends_on
from .cmake import CMakeBuilder, CMakePackage from .cmake import CMakeBuilder, CMakePackage
@@ -69,7 +68,12 @@ class CachedCMakeBuilder(CMakeBuilder):
@property @property
def cache_name(self): def cache_name(self):
return f"{self.pkg.name}-{self.spec.architecture.platform}-{self.spec.dag_hash()}.cmake" return "{0}-{1}-{2}@{3}.cmake".format(
self.pkg.name,
self.pkg.spec.architecture,
self.pkg.spec.compiler.name,
self.pkg.spec.compiler.version,
)
@property @property
def cache_path(self): def cache_path(self):
@@ -112,9 +116,7 @@ def initconfig_compiler_entries(self):
# Fortran compiler is optional # Fortran compiler is optional
if "FC" in os.environ: if "FC" in os.environ:
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"]) spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
system_fc_entry = cmake_cache_path( system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
)
else: else:
spack_fc_entry = "# No Fortran compiler defined in spec" spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec" system_fc_entry = "# No Fortran compiler defined in spec"
@@ -130,8 +132,8 @@ def initconfig_compiler_entries(self):
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]), " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
" " + spack_fc_entry, " " + spack_fc_entry,
"else()\n", "else()\n",
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc), " " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx), " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
" " + system_fc_entry, " " + system_fc_entry,
"endif()\n", "endif()\n",
] ]
@@ -190,10 +192,7 @@ def initconfig_mpi_entries(self):
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc)) entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx)) entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
# not all MPIs have Fortran wrappers
if hasattr(spec["mpi"], "mpifc"):
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
# Check for slurm # Check for slurm
using_slurm = False using_slurm = False
@@ -333,7 +332,7 @@ def std_cmake_args(self):
args.extend(["-C", self.cache_path]) args.extend(["-C", self.cache_path])
return args return args
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def install_cmake_cache(self): def install_cmake_cache(self):
fs.mkdirp(self.pkg.spec.prefix.share.cmake) fs.mkdirp(self.pkg.spec.prefix.share.cmake)
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake) fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
@@ -350,10 +349,6 @@ class CachedCMakePackage(CMakePackage):
CMakeBuilder = CachedCMakeBuilder CMakeBuilder = CachedCMakeBuilder
# These dependencies are assumed in the builder
depends_on("c", type="build")
depends_on("cxx", type="build")
def flag_handler(self, name, flags): def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"): if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache return None, None, None # handled in the cmake cache

View File

@@ -7,11 +7,10 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from spack.multimethod import when from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests from ._checks import BaseBuilder, execute_install_time_tests
class CargoPackage(spack.package_base.PackageBase): class CargoPackage(spack.package_base.PackageBase):
@@ -28,7 +27,7 @@ class CargoPackage(spack.package_base.PackageBase):
@spack.builder.builder("cargo") @spack.builder.builder("cargo")
class CargoBuilder(BuilderWithDefaults): class CargoBuilder(BaseBuilder):
"""The Cargo builder encodes the most common way of building software with """The Cargo builder encodes the most common way of building software with
a rust Cargo.toml file. It has two phases that can be overridden, if need be: a rust Cargo.toml file. It has two phases that can be overridden, if need be:
@@ -78,7 +77,7 @@ def install(self, pkg, spec, prefix):
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
fs.install_tree("out", prefix) fs.install_tree("out", prefix)
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)
def check(self): def check(self):
"""Run "cargo test".""" """Run "cargo test"."""

View File

@@ -9,7 +9,7 @@
import re import re
import sys import sys
from itertools import chain from itertools import chain
from typing import Any, List, Optional, Tuple from typing import List, Optional, Set, Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
from llnl.util.lang import stable_partition from llnl.util.lang import stable_partition
@@ -18,15 +18,11 @@
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
import spack.package_base import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack import traverse
from spack.directives import build_system, conflicts, depends_on, variant from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when from spack.multimethod import when
from spack.util.environment import filter_system_paths from spack.util.environment import filter_system_paths
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
# Regex to extract the primary generator from the CMake generator # Regex to extract the primary generator from the CMake generator
# string. # string.
@@ -52,9 +48,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
python_executable = pkg.spec["python"].command.path python_executable = pkg.spec["python"].command.path
args.extend( args.extend(
[ [
define("PYTHON_EXECUTABLE", python_executable), CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
define("Python_EXECUTABLE", python_executable), CMakeBuilder.define("Python_EXECUTABLE", python_executable),
define("Python3_EXECUTABLE", python_executable), CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
] ]
) )
@@ -89,7 +85,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
ipo = False ipo = False
if cmake.satisfies("@3.9:"): if cmake.satisfies("@3.9:"):
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo)) args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and # Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
# find_package may search there. This is not what we want. # find_package may search there. This is not what we want.
@@ -97,36 +93,30 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
# Do not populate CMake User Package Registry # Do not populate CMake User Package Registry
if cmake.satisfies("@3.15:"): if cmake.satisfies("@3.15:"):
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html # see https://cmake.org/cmake/help/latest/policy/CMP0090.html
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW")) args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
elif cmake.satisfies("@3.1:"): elif cmake.satisfies("@3.1:"):
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html # see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True)) args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
# Do not use CMake User/System Package Registry # Do not use CMake User/System Package Registry
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry # https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
if cmake.satisfies("@3.16:"): if cmake.satisfies("@3.16:"):
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False)) args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
elif cmake.satisfies("@3.1:3.15"): elif cmake.satisfies("@3.1:3.15"):
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False)) args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False)) args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
# Export a compilation database if supported. # Export a compilation database if supported.
if _supports_compilation_databases(pkg): if _supports_compilation_databases(pkg):
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True)) args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3 # Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html # https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"): if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW")) args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
# Disable find package's config mode for versions of Boost that
# didn't provide it. See https://github.com/spack/spack/issues/20169
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
if pkg.spec.satisfies("^boost@:1.69.0"):
args.append(define("Boost_NO_BOOST_CMAKE", True))
def generator(*names: str, default: Optional[str] = None) -> None: def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use. """The build system generator to use.
See ``cmake --help`` for a list of valid generators. See ``cmake --help`` for a list of valid generators.
@@ -167,18 +157,15 @@ def _values(x):
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]: def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package """Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
attribute of direct build/test and transitive link dependencies.""" attribute of direct build/test and transitive link dependencies."""
edges = traverse.traverse_topo_edges_generator( # Add direct build/test deps
traverse.with_artificial_edges([pkg.spec]), selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
visitor=traverse.MixedDepthVisitor( # Add transitive link deps
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
),
key=traverse.by_dag_hash,
root=False,
all_edges=False, # cover all nodes, not all edges
)
ordered_specs = [edge.spec for edge in edges]
# Separate out externals so they do not shadow Spack prefixes # Separate out externals so they do not shadow Spack prefixes
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external) externals, spack_built = stable_partition(
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
lambda x: x.external,
)
return filter_system_paths( return filter_system_paths(
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
@@ -276,15 +263,15 @@ def flags_to_build_system_args(self, flags):
# Legacy methods (used by too many packages to change them, # Legacy methods (used by too many packages to change them,
# need to forward to the builder) # need to forward to the builder)
def define(self, cmake_var: str, value: Any) -> str: def define(self, *args, **kwargs):
return define(cmake_var, value) return self.builder.define(*args, **kwargs)
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str: def define_from_variant(self, *args, **kwargs):
return define_from_variant(self, cmake_var, variant) return self.builder.define_from_variant(*args, **kwargs)
@spack.builder.builder("cmake") @spack.builder.builder("cmake")
class CMakeBuilder(BuilderWithDefaults): class CMakeBuilder(BaseBuilder):
"""The cmake builder encodes the default way of building software with CMake. IT """The cmake builder encodes the default way of building software with CMake. IT
has three phases that can be overridden: has three phases that can be overridden:
@@ -334,15 +321,15 @@ class CMakeBuilder(BuilderWithDefaults):
build_time_test_callbacks = ["check"] build_time_test_callbacks = ["check"]
@property @property
def archive_files(self) -> List[str]: def archive_files(self):
"""Files to archive for packages based on CMake""" """Files to archive for packages based on CMake"""
files = [os.path.join(self.build_directory, "CMakeCache.txt")] files = [os.path.join(self.build_directory, "CMakeCache.txt")]
if _supports_compilation_databases(self.pkg): if _supports_compilation_databases(self):
files.append(os.path.join(self.build_directory, "compile_commands.json")) files.append(os.path.join(self.build_directory, "compile_commands.json"))
return files return files
@property @property
def root_cmakelists_dir(self) -> str: def root_cmakelists_dir(self):
"""The relative path to the directory containing CMakeLists.txt """The relative path to the directory containing CMakeLists.txt
This path is relative to the root of the extracted tarball, This path is relative to the root of the extracted tarball,
@@ -351,17 +338,16 @@ def root_cmakelists_dir(self) -> str:
return self.pkg.stage.source_path return self.pkg.stage.source_path
@property @property
def generator(self) -> str: def generator(self):
if self.spec.satisfies("generator=make"): if self.spec.satisfies("generator=make"):
return "Unix Makefiles" return "Unix Makefiles"
if self.spec.satisfies("generator=ninja"): if self.spec.satisfies("generator=ninja"):
return "Ninja" return "Ninja"
raise ValueError( msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
f'{self.spec.format()} has an unsupported value for the "generator" variant' raise ValueError(msg)
)
@property @property
def std_cmake_args(self) -> List[str]: def std_cmake_args(self):
"""Standard cmake arguments provided as a property for """Standard cmake arguments provided as a property for
convenience of package writers convenience of package writers
""" """
@@ -370,9 +356,7 @@ def std_cmake_args(self) -> List[str]:
return args return args
@staticmethod @staticmethod
def std_args( def std_args(pkg, generator=None):
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
) -> List[str]:
"""Computes the standard cmake arguments for a generic package""" """Computes the standard cmake arguments for a generic package"""
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles" default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
generator = generator or default_generator generator = generator or default_generator
@@ -389,6 +373,7 @@ def std_args(
except KeyError: except KeyError:
build_type = "RelWithDebInfo" build_type = "RelWithDebInfo"
define = CMakeBuilder.define
args = [ args = [
"-G", "-G",
generator, generator,
@@ -420,31 +405,152 @@ def std_args(
return args return args
@staticmethod @staticmethod
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str: def define_cuda_architectures(pkg):
return define_cuda_architectures(pkg) """Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
``cuda_arch`` is variant composed of a list of target CUDA architectures and
it is declared in the cuda package.
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
"""
cmake_flag = str()
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
cmake_flag = CMakeBuilder.define(
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
)
return cmake_flag
@staticmethod @staticmethod
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str: def define_hip_architectures(pkg):
return define_hip_architectures(pkg) """Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
``amdgpu_target`` is variant composed of a list of the target HIP
architectures and it is declared in the rocm package.
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
not set.
"""
cmake_flag = str()
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
cmake_flag = CMakeBuilder.define(
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
)
return cmake_flag
@staticmethod @staticmethod
def define(cmake_var: str, value: Any) -> str: def define(cmake_var, value):
return define(cmake_var, value) """Return a CMake command line argument that defines a variable.
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str: The resulting argument will convert boolean values to OFF/ON
return define_from_variant(self.pkg, cmake_var, variant) and lists/tuples to CMake semicolon-separated string lists. All other
values will be interpreted as strings.
Examples:
.. code-block:: python
[define('BUILD_SHARED_LIBS', True),
define('CMAKE_CXX_STANDARD', 14),
define('swr', ['avx', 'avx2'])]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
"""
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
kind = "BOOL"
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)
return "".join(["-D", cmake_var, ":", kind, "=", value])
def define_from_variant(self, cmake_var, variant=None):
"""Return a CMake command line argument from the given variant's value.
The optional ``variant`` argument defaults to the lower-case transform
of ``cmake_var``.
This utility function is similar to
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
Examples:
Given a package with:
.. code-block:: python
variant('cxxstd', default='11', values=('11', '14'),
multi=False, description='')
variant('shared', default=True, description='')
variant('swr', values=any_combination_of('avx', 'avx2'),
description='')
calling this function like:
.. code-block:: python
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
self.define_from_variant('SWR')]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
Note: if the provided variant is conditional, and the condition is not met,
this function returns an empty string. CMake discards empty strings
provided on the command line.
"""
if variant is None:
variant = cmake_var.lower()
if not self.pkg.has_variant(variant):
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.pkg.spec.variants:
return ""
value = self.pkg.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
value = sorted(value)
return self.define(cmake_var, value)
@property @property
def build_dirname(self) -> str: def build_dirname(self):
"""Directory name to use when building the package.""" """Directory name to use when building the package."""
return f"spack-build-{self.pkg.spec.dag_hash(7)}" return "spack-build-%s" % self.pkg.spec.dag_hash(7)
@property @property
def build_directory(self) -> str: def build_directory(self):
"""Full-path to the directory to use when building the package.""" """Full-path to the directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname) return os.path.join(self.pkg.stage.path, self.build_dirname)
def cmake_args(self) -> List[str]: def cmake_args(self):
"""List of all the arguments that must be passed to cmake, except: """List of all the arguments that must be passed to cmake, except:
* CMAKE_INSTALL_PREFIX * CMAKE_INSTALL_PREFIX
@@ -454,12 +560,7 @@ def cmake_args(self) -> List[str]:
""" """
return [] return []
def cmake( def cmake(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Runs ``cmake`` in the build directory""" """Runs ``cmake`` in the build directory"""
# skip cmake phase if it is an incremental develop build # skip cmake phase if it is an incremental develop build
@@ -474,12 +575,7 @@ def cmake(
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.cmake(*options) pkg.module.cmake(*options)
def build( def build(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Make the build targets""" """Make the build targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
@@ -488,12 +584,7 @@ def build(
self.build_targets.append("-v") self.build_targets.append("-v")
pkg.module.ninja(*self.build_targets) pkg.module.ninja(*self.build_targets)
def install( def install(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
@@ -501,9 +592,9 @@ def install(
elif self.generator == "Ninja": elif self.generator == "Ninja":
pkg.module.ninja(*self.install_targets) pkg.module.ninja(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None: def check(self):
"""Search the CMake-generated files for the targets ``test`` and ``check``, """Search the CMake-generated files for the targets ``test`` and ``check``,
and runs them if found. and runs them if found.
""" """
@@ -514,133 +605,3 @@ def check(self) -> None:
elif self.generator == "Ninja": elif self.generator == "Ninja":
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL") self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
self.pkg._if_ninja_target_execute("check") self.pkg._if_ninja_target_execute("check")
def define(cmake_var: str, value: Any) -> str:
"""Return a CMake command line argument that defines a variable.
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
semicolon-separated string lists. All other values will be interpreted as strings.
Examples:
.. code-block:: python
[define("BUILD_SHARED_LIBS", True),
define("CMAKE_CXX_STANDARD", 14),
define("swr", ["avx", "avx2"])]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
"""
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
kind = "BOOL"
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)
return "".join(["-D", cmake_var, ":", kind, "=", value])
def define_from_variant(
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
) -> str:
"""Return a CMake command line argument from the given variant's value.
The optional ``variant`` argument defaults to the lower-case transform
of ``cmake_var``.
Examples:
Given a package with:
.. code-block:: python
variant("cxxstd", default="11", values=("11", "14"),
multi=False, description="")
variant("shared", default=True, description="")
variant("swr", values=any_combination_of("avx", "avx2"),
description="")
calling this function like:
.. code-block:: python
[
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
self.define_from_variant("SWR"),
]
will generate the following configuration options:
.. code-block:: console
[
"-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2",
]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
Note: if the provided variant is conditional, and the condition is not met, this function
returns an empty string. CMake discards empty strings provided on the command line.
"""
if variant is None:
variant = cmake_var.lower()
if not pkg.has_variant(variant):
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
if variant not in pkg.spec.variants:
return ""
value = pkg.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
value = sorted(value)
return define(cmake_var, value)
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
``amdgpu_target`` is variant composed of a list of the target HIP
architectures and it is declared in the rocm package.
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
not set.
"""
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
return ""
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
``cuda_arch`` is variant composed of a list of target CUDA architectures and
it is declared in the cuda package.
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
"""
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
return ""

View File

@@ -5,22 +5,15 @@
import itertools import itertools
import os import os
import pathlib import pathlib
import platform
import re import re
import sys import sys
from typing import Dict, List, Optional, Sequence, Tuple, Union from typing import Dict, List, Sequence, Tuple, Union
import archspec.cpu
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized from llnl.util.lang import classproperty
import spack import spack.compiler
import spack.compilers.error
import spack.compilers.libraries
import spack.config
import spack.package_base import spack.package_base
import spack.paths
import spack.util.executable import spack.util.executable
# Local "type" for type hints # Local "type" for type hints
@@ -51,9 +44,6 @@ class CompilerPackage(spack.package_base.PackageBase):
#: Static definition of languages supported by this class #: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"] compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
#: Relative path to compiler wrappers
link_paths: Dict[str, str] = {}
def __init__(self, spec: "spack.spec.Spec"): def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec) super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages" msg = f"Supported languages for {spec} are not a subset of possible supported languages"
@@ -88,14 +78,14 @@ def executables(cls) -> Sequence[str]:
] ]
@classmethod @classmethod
def determine_version(cls, exe: Path) -> str: def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument version_argument = cls.compiler_version_argument
if isinstance(version_argument, str): if isinstance(version_argument, str):
version_argument = (version_argument,) version_argument = (version_argument,)
for va in version_argument: for va in version_argument:
try: try:
output = compiler_output(exe, version_argument=va) output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output) match = re.search(cls.compiler_version_regex, output)
if match: if match:
return ".".join(match.groups()) return ".".join(match.groups())
@@ -106,7 +96,6 @@ def determine_version(cls, exe: Path) -> str:
f"[{__file__}] Cannot detect a valid version for the executable " f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}" f"{str(exe)}, for package '{cls.name}': {e}"
) )
return ""
@classmethod @classmethod
def compiler_bindir(cls, prefix: Path) -> Path: def compiler_bindir(cls, prefix: Path) -> Path:
@@ -154,184 +143,3 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple: def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses # path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)} return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
#: Returns the argument needed to set the RPATH, or None if it does not exist
rpath_arg: Optional[str] = "-Wl,-rpath,"
#: Flag that needs to be used to pass an argument to the linker
linker_arg: str = "-Wl,"
#: Flag used to produce Position Independent Code
pic_flag: str = "-fPIC"
#: Flag used to get verbose output
verbose_flags: str = "-v"
#: Flag to activate OpenMP support
openmp_flag: str = "-fopenmp"
def standard_flag(self, *, language: str, standard: str) -> str:
"""Returns the flag used to enforce a given standard for a language"""
if language not in self.supported_languages:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' language"
)
try:
return self._standard_flag(language=language, standard=standard)
except (KeyError, RuntimeError) as e:
raise spack.compilers.error.UnsupportedCompilerFlag(
f"{self.spec} does not provide the '{language}' standard {standard}"
) from e
def _standard_flag(self, *, language: str, standard: str) -> str:
raise NotImplementedError("Must be implemented by derived classes")
@property
def disable_new_dtags(self) -> str:
if platform.system() == "Darwin":
return ""
return "--disable-new-dtags"
@property
def enable_new_dtags(self) -> str:
if platform.system() == "Darwin":
return ""
return "--enable-new-dtags"
def setup_dependent_build_environment(self, env, dependent_spec):
# FIXME (compiler as nodes): check if this is good enough or should be made more general
# The package is not used as a compiler, so skip this setup
if not any(
lang in dependent_spec and dependent_spec[lang].name == self.spec.name
for lang in ("c", "cxx", "fortran")
):
return
# Populate an object with the list of environment modifications and return it
link_dir = pathlib.Path(spack.paths.build_env_path)
env_paths = []
for language, attr_name, wrapper_var_name, spack_var_name in [
("c", "cc", "CC", "SPACK_CC"),
("cxx", "cxx", "CXX", "SPACK_CXX"),
("fortran", "fortran", "F77", "SPACK_F77"),
("fortran", "fortran", "FC", "SPACK_FC"),
]:
if language not in dependent_spec or dependent_spec[language].name != self.spec.name:
continue
if not hasattr(self, attr_name):
continue
compiler = getattr(self, attr_name)
env.set(spack_var_name, compiler)
if language not in self.link_paths:
continue
wrapper_path = link_dir / self.link_paths.get(language)
env.set(wrapper_var_name, str(wrapper_path))
env.set(f"SPACK_{wrapper_var_name}_RPATH_ARG", self.rpath_arg)
uarch = dependent_spec.architecture.target
version_number, _ = archspec.cpu.version_components(
self.spec.version.dotted_numeric_string
)
try:
isa_arg = uarch.optimization_flags(self.archspec_name(), version_number)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
isa_arg = ""
if isa_arg:
env.set(f"SPACK_TARGET_ARGS_{attr_name.upper()}", isa_arg)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
# compiler-specific symlinks. The latter ensures that builds that
# are sensitive to the *name* of the compiler see the right name when
# we're building with the wrappers.
#
# Conflicts on case-insensitive systems (like "CC" and "cc") are
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
compiler_specific = os.path.join(
spack.paths.build_env_path, os.path.dirname(self.link_paths[language])
)
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
# FIXME (compiler as nodes): make these paths language specific
env.set("SPACK_LINKER_ARG", self.linker_arg)
paths = _implicit_rpaths(pkg=self)
if paths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(paths))
# Check whether we want to force RPATH or RUNPATH
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
env.set("SPACK_DTAGS_TO_STRIP", self.enable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", self.disable_new_dtags)
else:
env.set("SPACK_DTAGS_TO_STRIP", self.disable_new_dtags)
env.set("SPACK_DTAGS_TO_ADD", self.enable_new_dtags)
spec = self.spec
if spec.extra_attributes:
extra_rpaths = spec.extra_attributes.get("extra_rpaths")
if extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.append_path("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
for item in env_paths:
env.prepend_path("SPACK_ENV_PATH", item)
def archspec_name(self) -> str:
"""Name that archspec uses to refer to this compiler"""
return self.spec.name
def _implicit_rpaths(pkg: spack.package_base.PackageBase) -> List[str]:
detector = spack.compilers.libraries.CompilerPropertyDetector(pkg.spec)
paths = detector.implicit_rpaths()
return paths
@memoized
def _compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Returns the output from the compiler invoked with the given version argument.
Args:
compiler_path: path of the compiler to be invoked
version_argument: the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
compiler_invocation_args = {
"output": str,
"error": str,
"ignore_errors": ignore_errors,
"timeout": 120,
"fail_on_error": True,
}
if version_argument:
output = compiler(version_argument, **compiler_invocation_args)
else:
output = compiler(**compiler_invocation_args)
return output
def compiler_output(
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
) -> str:
"""Wrapper for _get_compiler_version_output()."""
# This ensures that we memoize compiler output by *absolute path*,
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
return _compiler_output(
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
)

View File

@@ -180,6 +180,13 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:") conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:") conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:") conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:") conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:") conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:") conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
@@ -205,6 +212,9 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:") conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:") conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le # officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:") conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:") conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:") conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")

View File

@@ -7,9 +7,8 @@
import spack.builder import spack.builder
import spack.directives import spack.directives
import spack.package_base import spack.package_base
import spack.phase_callbacks
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
class Package(spack.package_base.PackageBase): class Package(spack.package_base.PackageBase):
@@ -27,7 +26,7 @@ class Package(spack.package_base.PackageBase):
@spack.builder.builder("generic") @spack.builder.builder("generic")
class GenericBuilder(BuilderWithDefaults): class GenericBuilder(BaseBuilder):
"""A builder for a generic build system, that require packagers """A builder for a generic build system, that require packagers
to implement an "install" phase. to implement an "install" phase.
""" """
@@ -45,7 +44,7 @@ class GenericBuilder(BuilderWithDefaults):
install_time_test_callbacks = [] install_time_test_callbacks = []
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
# unconditionally perform any post-install phase tests # unconditionally perform any post-install phase tests
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -7,11 +7,10 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, extends from spack.directives import build_system, extends
from spack.multimethod import when from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests from ._checks import BaseBuilder, execute_install_time_tests
class GoPackage(spack.package_base.PackageBase): class GoPackage(spack.package_base.PackageBase):
@@ -33,7 +32,7 @@ class GoPackage(spack.package_base.PackageBase):
@spack.builder.builder("go") @spack.builder.builder("go")
class GoBuilder(BuilderWithDefaults): class GoBuilder(BaseBuilder):
"""The Go builder encodes the most common way of building software with """The Go builder encodes the most common way of building software with
a golang go.mod file. It has two phases that can be overridden, if need be: a golang go.mod file. It has two phases that can be overridden, if need be:
@@ -100,7 +99,7 @@ def install(self, pkg, spec, prefix):
fs.mkdirp(prefix.bin) fs.mkdirp(prefix.bin)
fs.install(pkg.name, prefix.bin) fs.install(pkg.name, prefix.bin)
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)
def check(self): def check(self):
"""Run ``go test .`` in the source directory""" """Run ``go test .`` in the source directory"""

View File

@@ -22,8 +22,8 @@
install, install,
) )
import spack.builder
import spack.error import spack.error
import spack.phase_callbacks
from spack.build_environment import dso_suffix from spack.build_environment import dso_suffix
from spack.error import InstallError from spack.error import InstallError
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
debug_print(license_type) debug_print(license_type)
return license_type return license_type
@spack.phase_callbacks.run_before("install") @spack.builder.run_before("install")
def configure(self): def configure(self):
"""Generates the silent.cfg file to pass to installer.sh. """Generates the silent.cfg file to pass to installer.sh.
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
for f in glob.glob("%s/intel*log" % tmpdir): for f in glob.glob("%s/intel*log" % tmpdir):
install(f, dst) install(f, dst)
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def validate_install(self): def validate_install(self):
# Sometimes the installer exits with an error but doesn't pass a # Sometimes the installer exits with an error but doesn't pass a
# non-zero exit code to spack. Check for the existence of a 'bin' # non-zero exit code to spack. Check for the existence of a 'bin'
@@ -1258,7 +1258,7 @@ def validate_install(self):
if not os.path.exists(self.prefix.bin): if not os.path.exists(self.prefix.bin):
raise InstallError("The installer has failed to install anything.") raise InstallError("The installer has failed to install anything.")
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def configure_rpath(self): def configure_rpath(self):
if "+rpath" not in self.spec: if "+rpath" not in self.spec:
return return
@@ -1276,7 +1276,7 @@ def configure_rpath(self):
with open(compiler_cfg, "w") as fh: with open(compiler_cfg, "w") as fh:
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir)) fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def configure_auto_dispatch(self): def configure_auto_dispatch(self):
if self._has_compilers: if self._has_compilers:
if "auto_dispatch=none" in self.spec: if "auto_dispatch=none" in self.spec:
@@ -1300,7 +1300,7 @@ def configure_auto_dispatch(self):
with open(compiler_cfg, "a") as fh: with open(compiler_cfg, "a") as fh:
fh.write("-ax{0}\n".format(",".join(ad))) fh.write("-ax{0}\n".format(",".join(ad)))
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def filter_compiler_wrappers(self): def filter_compiler_wrappers(self):
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec: if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
bin_dir = self.component_bin_dir("mpi") bin_dir = self.component_bin_dir("mpi")
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
f = os.path.join(bin_dir, f) f = os.path.join(bin_dir, f)
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True) filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def uninstall_ism(self): def uninstall_ism(self):
# The "Intel(R) Software Improvement Program" [ahem] gets installed, # The "Intel(R) Software Improvement Program" [ahem] gets installed,
# apparently regardless of PHONEHOME_SEND_USAGE_DATA. # apparently regardless of PHONEHOME_SEND_USAGE_DATA.
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
debug_print(d) debug_print(d)
return d return d
@spack.phase_callbacks.run_after("install") @spack.builder.run_after("install")
def modify_LLVMgold_rpath(self): def modify_LLVMgold_rpath(self):
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so. """Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.

View File

@@ -8,14 +8,11 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when from spack.multimethod import when
from ._checks import ( from ._checks import (
BuilderWithDefaults, BaseBuilder,
apply_macos_rpath_fixups, apply_macos_rpath_fixups,
execute_build_time_tests, execute_build_time_tests,
execute_install_time_tests, execute_install_time_tests,
@@ -39,7 +36,7 @@ class MakefilePackage(spack.package_base.PackageBase):
@spack.builder.builder("makefile") @spack.builder.builder("makefile")
class MakefileBuilder(BuilderWithDefaults): class MakefileBuilder(BaseBuilder):
"""The Makefile builder encodes the most common way of building software with """The Makefile builder encodes the most common way of building software with
Makefiles. It has three phases that can be overridden, if need be: Makefiles. It has three phases that can be overridden, if need be:
@@ -94,50 +91,35 @@ class MakefileBuilder(BuilderWithDefaults):
install_time_test_callbacks = ["installcheck"] install_time_test_callbacks = ["installcheck"]
@property @property
def build_directory(self) -> str: def build_directory(self):
"""Return the directory containing the main Makefile.""" """Return the directory containing the main Makefile."""
return self.pkg.stage.source_path return self.pkg.stage.source_path
def edit( def edit(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Edit the Makefile before calling make. The default is a no-op.""" """Edit the Makefile before calling make. The default is a no-op."""
pass pass
def build( def build(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.build_targets) pkg.module.make(*self.build_targets)
def install( def install(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets) pkg.module.make(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None: def check(self):
"""Run "make" on the ``test`` and ``check`` targets, if found.""" """Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test") self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check") self.pkg._if_make_target_execute("check")
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self) -> None: def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target """Searches the Makefile for an ``installcheck`` target
and runs it if found. and runs it if found.
""" """
@@ -145,4 +127,4 @@ def installcheck(self) -> None:
self.pkg._if_make_target_execute("installcheck") self.pkg._if_make_target_execute("installcheck")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups) spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)

View File

@@ -10,7 +10,7 @@
from spack.multimethod import when from spack.multimethod import when
from spack.util.executable import which from spack.util.executable import which
from ._checks import BuilderWithDefaults from ._checks import BaseBuilder
class MavenPackage(spack.package_base.PackageBase): class MavenPackage(spack.package_base.PackageBase):
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
@spack.builder.builder("maven") @spack.builder.builder("maven")
class MavenBuilder(BuilderWithDefaults): class MavenBuilder(BaseBuilder):
"""The Maven builder encodes the default way to build software with Maven. """The Maven builder encodes the default way to build software with Maven.
It has two phases that can be overridden, if need be: It has two phases that can be overridden, if need be:

View File

@@ -9,13 +9,10 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on, variant from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
class MesonPackage(spack.package_base.PackageBase): class MesonPackage(spack.package_base.PackageBase):
@@ -65,7 +62,7 @@ def flags_to_build_system_args(self, flags):
@spack.builder.builder("meson") @spack.builder.builder("meson")
class MesonBuilder(BuilderWithDefaults): class MesonBuilder(BaseBuilder):
"""The Meson builder encodes the default way to build software with Meson. """The Meson builder encodes the default way to build software with Meson.
The builder has three phases that can be overridden, if need be: The builder has three phases that can be overridden, if need be:
@@ -115,7 +112,7 @@ def archive_files(self):
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")] return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
@property @property
def root_mesonlists_dir(self) -> str: def root_mesonlists_dir(self):
"""Relative path to the directory containing meson.build """Relative path to the directory containing meson.build
This path is relative to the root of the extracted tarball, This path is relative to the root of the extracted tarball,
@@ -124,7 +121,7 @@ def root_mesonlists_dir(self) -> str:
return self.pkg.stage.source_path return self.pkg.stage.source_path
@property @property
def std_meson_args(self) -> List[str]: def std_meson_args(self):
"""Standard meson arguments provided as a property for convenience """Standard meson arguments provided as a property for convenience
of package writers. of package writers.
""" """
@@ -135,7 +132,7 @@ def std_meson_args(self) -> List[str]:
return std_meson_args return std_meson_args
@staticmethod @staticmethod
def std_args(pkg) -> List[str]: def std_args(pkg):
"""Standard meson arguments for a generic package.""" """Standard meson arguments for a generic package."""
try: try:
build_type = pkg.spec.variants["buildtype"].value build_type = pkg.spec.variants["buildtype"].value
@@ -175,7 +172,7 @@ def build_directory(self):
"""Directory to use when building the package.""" """Directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname) return os.path.join(self.pkg.stage.path, self.build_dirname)
def meson_args(self) -> List[str]: def meson_args(self):
"""List of arguments that must be passed to meson, except: """List of arguments that must be passed to meson, except:
* ``--prefix`` * ``--prefix``
@@ -188,12 +185,7 @@ def meson_args(self) -> List[str]:
""" """
return [] return []
def meson( def meson(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Run ``meson`` in the build directory""" """Run ``meson`` in the build directory"""
options = [] options = []
if self.spec["meson"].satisfies("@0.64:"): if self.spec["meson"].satisfies("@0.64:"):
@@ -204,31 +196,21 @@ def meson(
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.meson(*options) pkg.module.meson(*options)
def build( def build(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Make the build targets""" """Make the build targets"""
options = ["-v"] options = ["-v"]
options += self.build_targets options += self.build_targets
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.ninja(*options) pkg.module.ninja(*options)
def install( def install(self, pkg, spec, prefix):
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.ninja(*self.install_targets) pkg.module.ninja(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None: def check(self):
"""Search Meson-generated files for the target ``test`` and run it if found.""" """Search Meson-generated files for the target ``test`` and run it if found."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg._if_ninja_target_execute("test") self.pkg._if_ninja_target_execute("test")

View File

@@ -10,7 +10,7 @@
import spack.package_base import spack.package_base
from spack.directives import build_system, conflicts from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults from ._checks import BaseBuilder
class MSBuildPackage(spack.package_base.PackageBase): class MSBuildPackage(spack.package_base.PackageBase):
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
@spack.builder.builder("msbuild") @spack.builder.builder("msbuild")
class MSBuildBuilder(BuilderWithDefaults): class MSBuildBuilder(BaseBuilder):
"""The MSBuild builder encodes the most common way of building software with """The MSBuild builder encodes the most common way of building software with
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be: Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
@@ -75,7 +75,7 @@ def toolchain_version(self):
Override this method to select a specific version of the toolchain or change Override this method to select a specific version of the toolchain or change
selection heuristics. selection heuristics.
Default is whatever version of msvc has been selected by concretization""" Default is whatever version of msvc has been selected by concretization"""
return "v" + self.spec["msvc"].package.platform_toolset_ver return "v" + self.pkg.compiler.platform_toolset_ver
@property @property
def std_msbuild_args(self): def std_msbuild_args(self):

View File

@@ -10,7 +10,7 @@
import spack.package_base import spack.package_base
from spack.directives import build_system, conflicts from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults from ._checks import BaseBuilder
class NMakePackage(spack.package_base.PackageBase): class NMakePackage(spack.package_base.PackageBase):
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
@spack.builder.builder("nmake") @spack.builder.builder("nmake")
class NMakeBuilder(BuilderWithDefaults): class NMakeBuilder(BaseBuilder):
"""The NMake builder encodes the most common way of building software with """The NMake builder encodes the most common way of building software with
Mircosoft's NMake tool. It has two phases that can be overridden, if need be: Mircosoft's NMake tool. It has two phases that can be overridden, if need be:

View File

@@ -7,7 +7,7 @@
from spack.directives import build_system, extends from spack.directives import build_system, extends
from spack.multimethod import when from spack.multimethod import when
from ._checks import BuilderWithDefaults from ._checks import BaseBuilder
class OctavePackage(spack.package_base.PackageBase): class OctavePackage(spack.package_base.PackageBase):
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
@spack.builder.builder("octave") @spack.builder.builder("octave")
class OctaveBuilder(BuilderWithDefaults): class OctaveBuilder(BaseBuilder):
"""The octave builder provides the following phases that can be overridden: """The octave builder provides the following phases that can be overridden:
1. :py:meth:`~.OctaveBuilder.install` 1. :py:meth:`~.OctaveBuilder.install`

View File

@@ -140,7 +140,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh $ source {prefix}/{component}/{version}/env/vars.sh
""" """
# Only if environment modifications are desired (default is +envmods) # Only if environment modifications are desired (default is +envmods)
if "+envmods" in self.spec: if "~envmods" not in self.spec:
env.extend( env.extend(
EnvironmentModifications.from_sourcing_file( EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args self.component_prefix.env.join("vars.sh"), *self.env_script_args
@@ -255,7 +255,7 @@ def libs(self):
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout) return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage): class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
"""Base class for Intel oneAPI library packages with SDK components. """Base class for Intel oneAPI library packages with SDK components.
Contains some convenient default implementations for libraries Contains some convenient default implementations for libraries

View File

@@ -10,12 +10,11 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, extends from spack.directives import build_system, extends
from spack.install_test import SkipTest, test_part from spack.install_test import SkipTest, test_part
from spack.util.executable import Executable from spack.util.executable import Executable
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
class PerlPackage(spack.package_base.PackageBase): class PerlPackage(spack.package_base.PackageBase):
@@ -85,7 +84,7 @@ def test_use(self):
@spack.builder.builder("perl") @spack.builder.builder("perl")
class PerlBuilder(BuilderWithDefaults): class PerlBuilder(BaseBuilder):
"""The perl builder provides four phases that can be overridden, if required: """The perl builder provides four phases that can be overridden, if required:
1. :py:meth:`~.PerlBuilder.configure` 1. :py:meth:`~.PerlBuilder.configure`
@@ -164,7 +163,7 @@ def configure(self, pkg, spec, prefix):
# Build.PL may be too long causing the build to fail. Patching the shebang # Build.PL may be too long causing the build to fail. Patching the shebang
# does not happen until after install so set '/usr/bin/env perl' here in # does not happen until after install so set '/usr/bin/env perl' here in
# the Build script. # the Build script.
@spack.phase_callbacks.run_after("configure") @spack.builder.run_after("configure")
def fix_shebang(self): def fix_shebang(self):
if self.build_method == "Build.PL": if self.build_method == "Build.PL":
pattern = "#!{0}".format(self.spec["perl"].command.path) pattern = "#!{0}".format(self.spec["perl"].command.path)
@@ -176,7 +175,7 @@ def build(self, pkg, spec, prefix):
self.build_executable() self.build_executable()
# Ensure that tests run after build (if requested): # Ensure that tests run after build (if requested):
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def check(self): def check(self):
"""Runs built-in tests of a Perl package.""" """Runs built-in tests of a Perl package."""

View File

@@ -24,7 +24,6 @@
import spack.detection import spack.detection
import spack.multimethod import spack.multimethod
import spack.package_base import spack.package_base
import spack.phase_callbacks
import spack.platforms import spack.platforms
import spack.repo import spack.repo
import spack.spec import spack.spec
@@ -35,7 +34,7 @@
from spack.spec import Spec from spack.spec import Spec
from spack.util.prefix import Prefix from spack.util.prefix import Prefix
from ._checks import BuilderWithDefaults, execute_install_time_tests from ._checks import BaseBuilder, execute_install_time_tests
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]: def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
@@ -277,6 +276,10 @@ def update_external_dependencies(self, extendee_spec=None):
if not python.architecture.target: if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name python.architecture.target = archspec.cpu.host().family.name
# Ensure compiler information is present
if not python.compiler:
python.compiler = self.spec.compiler
python.external_path = self.spec.external_path python.external_path = self.spec.external_path
python._mark_concrete() python._mark_concrete()
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=()) self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
@@ -371,7 +374,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
return None return None
@property @property
def python_spec(self) -> Spec: def python_spec(self):
"""Get python-venv if it exists or python otherwise.""" """Get python-venv if it exists or python otherwise."""
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python") python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
return python return python
@@ -422,7 +425,7 @@ def libs(self) -> LibraryList:
@spack.builder.builder("python_pip") @spack.builder.builder("python_pip")
class PythonPipBuilder(BuilderWithDefaults): class PythonPipBuilder(BaseBuilder):
phases = ("install",) phases = ("install",)
#: Names associated with package methods in the old build-system format #: Names associated with package methods in the old build-system format
@@ -540,4 +543,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pip(*args) pip(*args)
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,10 +6,9 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
class QMakePackage(spack.package_base.PackageBase): class QMakePackage(spack.package_base.PackageBase):
@@ -31,7 +30,7 @@ class QMakePackage(spack.package_base.PackageBase):
@spack.builder.builder("qmake") @spack.builder.builder("qmake")
class QMakeBuilder(BuilderWithDefaults): class QMakeBuilder(BaseBuilder):
"""The qmake builder provides three phases that can be overridden: """The qmake builder provides three phases that can be overridden:
1. :py:meth:`~.QMakeBuilder.qmake` 1. :py:meth:`~.QMakeBuilder.qmake`
@@ -82,4 +81,4 @@ def check(self):
with working_dir(self.build_directory): with working_dir(self.build_directory):
self.pkg._if_make_target_execute("check") self.pkg._if_make_target_execute("check")
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -8,7 +8,7 @@
import spack.package_base import spack.package_base
from spack.directives import build_system, extends, maintainers from spack.directives import build_system, extends, maintainers
from ._checks import BuilderWithDefaults from ._checks import BaseBuilder
class RubyPackage(spack.package_base.PackageBase): class RubyPackage(spack.package_base.PackageBase):
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
@spack.builder.builder("ruby") @spack.builder.builder("ruby")
class RubyBuilder(BuilderWithDefaults): class RubyBuilder(BaseBuilder):
"""The Ruby builder provides two phases that can be overridden if required: """The Ruby builder provides two phases that can be overridden if required:
#. :py:meth:`~.RubyBuilder.build` #. :py:meth:`~.RubyBuilder.build`

View File

@@ -4,10 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
class SConsPackage(spack.package_base.PackageBase): class SConsPackage(spack.package_base.PackageBase):
@@ -29,7 +28,7 @@ class SConsPackage(spack.package_base.PackageBase):
@spack.builder.builder("scons") @spack.builder.builder("scons")
class SConsBuilder(BuilderWithDefaults): class SConsBuilder(BaseBuilder):
"""The Scons builder provides the following phases that can be overridden: """The Scons builder provides the following phases that can be overridden:
1. :py:meth:`~.SConsBuilder.build` 1. :py:meth:`~.SConsBuilder.build`
@@ -80,4 +79,4 @@ def build_test(self):
""" """
pass pass
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -11,12 +11,11 @@
import spack.builder import spack.builder
import spack.install_test import spack.install_test
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
from spack.multimethod import when from spack.multimethod import when
from spack.util.executable import Executable from spack.util.executable import Executable
from ._checks import BuilderWithDefaults, execute_install_time_tests from ._checks import BaseBuilder, execute_install_time_tests
class SIPPackage(spack.package_base.PackageBase): class SIPPackage(spack.package_base.PackageBase):
@@ -104,7 +103,7 @@ def test_imports(self):
@spack.builder.builder("sip") @spack.builder.builder("sip")
class SIPBuilder(BuilderWithDefaults): class SIPBuilder(BaseBuilder):
"""The SIP builder provides the following phases that can be overridden: """The SIP builder provides the following phases that can be overridden:
* configure * configure
@@ -171,4 +170,4 @@ def install_args(self):
"""Arguments to pass to install.""" """Arguments to pass to install."""
return [] return []
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,10 +6,9 @@
import spack.builder import spack.builder
import spack.package_base import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
class WafPackage(spack.package_base.PackageBase): class WafPackage(spack.package_base.PackageBase):
@@ -31,7 +30,7 @@ class WafPackage(spack.package_base.PackageBase):
@spack.builder.builder("waf") @spack.builder.builder("waf")
class WafBuilder(BuilderWithDefaults): class WafBuilder(BaseBuilder):
"""The WAF builder provides the following phases that can be overridden: """The WAF builder provides the following phases that can be overridden:
* configure * configure
@@ -137,7 +136,7 @@ def build_test(self):
""" """
pass pass
spack.phase_callbacks.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
def install_test(self): def install_test(self):
"""Run unit tests after install. """Run unit tests after install.
@@ -147,4 +146,4 @@ def install_test(self):
""" """
pass pass
spack.phase_callbacks.run_after("install")(execute_install_time_tests) spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,30 +6,43 @@
import collections.abc import collections.abc
import copy import copy
import functools import functools
from typing import Dict, List, Optional, Tuple, Type from typing import List, Optional, Tuple
from llnl.util import lang
import spack.error import spack.error
import spack.multimethod import spack.multimethod
import spack.package_base
import spack.phase_callbacks
import spack.repo
import spack.spec
import spack.util.environment
#: Builder classes, as registered by the "builder" decorator #: Builder classes, as registered by the "builder" decorator
BUILDER_CLS: Dict[str, Type["Builder"]] = {} BUILDER_CLS = {}
#: An object of this kind is a shared global state used to collect callbacks during
#: class definition time, and is flushed when the class object is created at the end
#: of the class definition
#:
#: Args:
#: attribute_name (str): name of the attribute that will be attached to the builder
#: callbacks (list): container used to temporarily aggregate the callbacks
CallbackTemporaryStage = collections.namedtuple(
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
)
#: Shared global state to aggregate "@run_before" callbacks
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
#: Shared global state to aggregate "@run_after" callbacks
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
#: Map id(pkg) to a builder, to avoid creating multiple #: Map id(pkg) to a builder, to avoid creating multiple
#: builders for the same package object. #: builders for the same package object.
_BUILDERS: Dict[int, "Builder"] = {} _BUILDERS = {}
def builder(build_system_name: str): def builder(build_system_name):
"""Class decorator used to register the default builder """Class decorator used to register the default builder
for a given build-system. for a given build-system.
Args: Args:
build_system_name: name of the build-system build_system_name (str): name of the build-system
""" """
def _decorator(cls): def _decorator(cls):
@@ -40,9 +53,13 @@ def _decorator(cls):
return _decorator return _decorator
def create(pkg: spack.package_base.PackageBase) -> "Builder": def create(pkg):
"""Given a package object with an associated concrete spec, return the builder object that can """Given a package object with an associated concrete spec,
install it.""" return the builder object that can install it.
Args:
pkg (spack.package_base.PackageBase): package for which we want the builder
"""
if id(pkg) not in _BUILDERS: if id(pkg) not in _BUILDERS:
_BUILDERS[id(pkg)] = _create(pkg) _BUILDERS[id(pkg)] = _create(pkg)
return _BUILDERS[id(pkg)] return _BUILDERS[id(pkg)]
@@ -57,15 +74,7 @@ def __call__(self, spec, prefix):
return self.phase_fn(self.builder.pkg, spec, prefix) return self.phase_fn(self.builder.pkg, spec, prefix)
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]: def _create(pkg):
"""Return the builder class if a package module defines it."""
cls = getattr(pkg.module, name, None)
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
return cls
return None
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
"""Return a new builder object for the package object being passed as argument. """Return a new builder object for the package object being passed as argument.
The function inspects the build-system used by the package object and try to: The function inspects the build-system used by the package object and try to:
@@ -85,15 +94,14 @@ class hierarchy (look at AspellDictPackage for an example of that)
to look for build-related methods in the ``*Package``. to look for build-related methods in the ``*Package``.
Args: Args:
pkg: package object for which we need a builder pkg (spack.package_base.PackageBase): package object for which we need a builder
""" """
package_buildsystem = buildsystem_name(pkg) package_buildsystem = buildsystem_name(pkg)
default_builder_cls = BUILDER_CLS[package_buildsystem] default_builder_cls = BUILDER_CLS[package_buildsystem]
builder_cls_name = default_builder_cls.__name__ builder_cls_name = default_builder_cls.__name__
builder_class = get_builder_class(pkg, builder_cls_name) builder_cls = getattr(pkg.module, builder_cls_name, None)
if builder_cls:
if builder_class: return builder_cls(pkg)
return builder_class(pkg)
# Specialized version of a given buildsystem can subclass some # Specialized version of a given buildsystem can subclass some
# base classes and specialize certain phases or methods or attributes. # base classes and specialize certain phases or methods or attributes.
@@ -150,8 +158,8 @@ def __forward(self, *args, **kwargs):
# with the same name is defined in the Package, it will override this definition # with the same name is defined in the Package, it will override this definition
# (when _ForwardToBaseBuilder is initialized) # (when _ForwardToBaseBuilder is initialized)
for method_name in ( for method_name in (
base_cls.phases # type: ignore base_cls.phases
+ base_cls.legacy_methods # type: ignore + base_cls.legacy_methods
+ getattr(base_cls, "legacy_long_methods", tuple()) + getattr(base_cls, "legacy_long_methods", tuple())
+ ("setup_build_environment", "setup_dependent_build_environment") + ("setup_build_environment", "setup_dependent_build_environment")
): ):
@@ -163,14 +171,14 @@ def __forward(self):
return __forward return __forward
for attribute_name in base_cls.legacy_attributes: # type: ignore for attribute_name in base_cls.legacy_attributes:
setattr( setattr(
_ForwardToBaseBuilder, _ForwardToBaseBuilder,
attribute_name, attribute_name,
property(forward_property_to_getattr(attribute_name)), property(forward_property_to_getattr(attribute_name)),
) )
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore class Adapter(base_cls, metaclass=_PackageAdapterMeta):
def __init__(self, pkg): def __init__(self, pkg):
# Deal with custom phases in packages here # Deal with custom phases in packages here
if hasattr(pkg, "phases"): if hasattr(pkg, "phases"):
@@ -195,18 +203,99 @@ def setup_dependent_build_environment(self, env, dependent_spec):
return Adapter(pkg) return Adapter(pkg)
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str: def buildsystem_name(pkg):
"""Given a package object with an associated concrete spec, """Given a package object with an associated concrete spec,
return the name of its build system.""" return the name of its build system.
Args:
pkg (spack.package_base.PackageBase): package for which we want
the build system name
"""
try: try:
return pkg.spec.variants["build_system"].value return pkg.spec.variants["build_system"].value
except KeyError: except KeyError:
# We are reading an old spec without the build_system variant # We are reading an old spec without the build_system variant
return pkg.legacy_buildsystem # type: ignore return pkg.legacy_buildsystem
class PhaseCallbacksMeta(type):
"""Permit to register arbitrary functions during class definition and run them
later, before or after a given install phase.
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
stored in a global shared state when a class being defined is parsed by the Python
interpreter. At class definition time that temporary storage gets flushed and a list
of callbacks is attached to the class being defined.
"""
def __new__(mcs, name, bases, attr_dict):
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
staged_callbacks = temporary_stage.callbacks
# Here we have an adapter from an old-style package. This means there is no
# hierarchy of builders, and every callback that had to be combined between
# *Package and *Builder has been combined already by _PackageAdapterMeta
if name == "Adapter":
continue
# If we are here we have callbacks. To get a complete list, we accumulate all the
# callbacks from base classes, we deduplicate them, then prepend what we have
# registered here.
#
# The order should be:
# 1. Callbacks are registered in order within the same class
# 2. Callbacks defined in derived classes precede those defined in base
# classes
callbacks_from_base = []
for base in bases:
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
if not current_callbacks:
continue
callbacks_from_base.extend(current_callbacks)
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
# Set the callbacks in this class and flush the temporary stage
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
del temporary_stage.callbacks[:]
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
@staticmethod
def run_after(phase, when=None):
"""Decorator to register a function for running after a given phase.
Args:
phase (str): phase after which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_AFTER.callbacks.append(item)
return fn
return _decorator
@staticmethod
def run_before(phase, when=None):
"""Decorator to register a function for running before a given phase.
Args:
phase (str): phase before which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_BEFORE.callbacks.append(item)
return fn
return _decorator
class BuilderMeta( class BuilderMeta(
spack.phase_callbacks.PhaseCallbacksMeta, PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta, spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore type(collections.abc.Sequence), # type: ignore
): ):
@@ -301,12 +390,8 @@ def __new__(mcs, name, bases, attr_dict):
) )
combine_callbacks = _PackageAdapterMeta.combine_callbacks combine_callbacks = _PackageAdapterMeta.combine_callbacks
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks( attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
spack.phase_callbacks._RUN_BEFORE.attribute_name attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
)
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
spack.phase_callbacks._RUN_AFTER.attribute_name
)
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict) return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
@@ -326,8 +411,8 @@ def __init__(self, name, builder):
self.name = name self.name = name
self.builder = builder self.builder = builder
self.phase_fn = self._select_phase_fn() self.phase_fn = self._select_phase_fn()
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name) self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name) self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
def _make_callbacks(self, callbacks_attribute): def _make_callbacks(self, callbacks_attribute):
result = [] result = []
@@ -388,103 +473,15 @@ def copy(self):
return copy.deepcopy(self) return copy.deepcopy(self)
class BaseBuilder(metaclass=BuilderMeta): class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
"""An interface for builders, without any phases defined. This class is exposed in the package """A builder is a class that, given a package object (i.e. associated with
API, so that packagers can create a single class to define ``setup_build_environment`` and concrete spec), knows how to install it.
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
Example: The builder behaves like a sequence, and when iterated over return the
"phases" of the installation in the correct order.
.. code-block:: python Args:
pkg (spack.package_base.PackageBase): package object to be built
class AnyBuilder(BaseBuilder):
@run_after("install")
def fixup_install(self):
# do something after the package is installed
pass
def setup_build_environment(self, env):
env.set("MY_ENV_VAR", "my_value")
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
pass
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
pass
"""
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
self.pkg = pkg
@property
def spec(self) -> spack.spec.Spec:
return self.pkg.spec
@property
def stage(self):
return self.pkg.stage
@property
def prefix(self):
return self.pkg.prefix
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
"""Sets up the build environment for a package.
This method will be called before the current package prefix exists in
Spack's store.
Args:
env: environment modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
if not hasattr(super(), "setup_build_environment"):
return
super().setup_build_environment(env) # type: ignore
def setup_dependent_build_environment(
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
) -> None:
"""Sets up the build environment of a package that depends on this one.
This is similar to ``setup_build_environment``, but it is used to modify the build
environment of a package that *depends* on this one.
This gives packages the ability to set environment variables for the build of the
dependent, which can be useful to provide search hints for headers or libraries if they are
not in standard locations.
This method will be called before the dependent package prefix exists in Spack's store.
Args:
env: environment modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec: the spec of the dependent package about to be built. This allows the
extendee (self) to query the dependent's state. Note that *this* package's spec is
available as ``self.spec``
"""
if not hasattr(super(), "setup_dependent_build_environment"):
return
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
def __repr__(self):
fmt = "{name}{/hash:7}"
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
def __str__(self):
fmt = "{name}{/hash:7}"
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
class Builder(BaseBuilder, collections.abc.Sequence):
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
knows how to install it.
The builder behaves like a sequence, and when iterated over return the "phases" of the
installation in the correct order.
""" """
#: Sequence of phases. Must be defined in derived classes #: Sequence of phases. Must be defined in derived classes
@@ -499,22 +496,95 @@ class Builder(BaseBuilder, collections.abc.Sequence):
build_time_test_callbacks: List[str] build_time_test_callbacks: List[str]
install_time_test_callbacks: List[str] install_time_test_callbacks: List[str]
#: List of glob expressions. Each expression must either be absolute or relative to the package #: List of glob expressions. Each expression must either be
#: source path. Matching artifacts found at the end of the build process will be copied in the #: absolute or relative to the package source path.
#: same directory tree as _spack_build_logfile and _spack_build_envfile. #: Matching artifacts found at the end of the build process will be
@property #: copied in the same directory tree as _spack_build_logfile and
def archive_files(self) -> List[str]: #: _spack_build_envfile.
return [] archive_files: List[str] = []
def __init__(self, pkg: spack.package_base.PackageBase) -> None: def __init__(self, pkg):
super().__init__(pkg) self.pkg = pkg
self.callbacks = {} self.callbacks = {}
for phase in self.phases: for phase in self.phases:
self.callbacks[phase] = InstallationPhase(phase, self) self.callbacks[phase] = InstallationPhase(phase, self)
@property
def spec(self):
return self.pkg.spec
@property
def stage(self):
return self.pkg.stage
@property
def prefix(self):
return self.pkg.prefix
def setup_build_environment(self, env):
"""Sets up the build environment for a package.
This method will be called before the current package prefix exists in
Spack's store.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
if not hasattr(super(), "setup_build_environment"):
return
super().setup_build_environment(env)
def setup_dependent_build_environment(self, env, dependent_spec):
"""Sets up the build environment of packages that depend on this one.
This is similar to ``setup_build_environment``, but it is used to
modify the build environments of packages that *depend* on this one.
This gives packages like Python and others that follow the extension
model a way to implement common environment or compile-time settings
for dependencies.
This method will be called before the dependent package prefix exists
in Spack's store.
Examples:
1. Installing python modules generally requires ``PYTHONPATH``
to point to the ``lib/pythonX.Y/site-packages`` directory in the
module's install prefix. This method could be used to set that
variable.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
"""
if not hasattr(super(), "setup_dependent_build_environment"):
return
super().setup_dependent_build_environment(env, dependent_spec)
def __getitem__(self, idx): def __getitem__(self, idx):
key = self.phases[idx] key = self.phases[idx]
return self.callbacks[key] return self.callbacks[key]
def __len__(self): def __len__(self):
return len(self.phases) return len(self.phases)
def __repr__(self):
msg = "{0}({1})"
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
def __str__(self):
msg = '"{0}" builder for "{1}"'
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
# Export these names as standalone to be used in packages
run_after = PhaseCallbacksMeta.run_after
run_before = PhaseCallbacksMeta.run_before

View File

@@ -5,6 +5,7 @@
"""Caches used by Spack to store data""" """Caches used by Spack to store data"""
import os import os
from typing import Union
import llnl.util.lang import llnl.util.lang
from llnl.util.filesystem import mkdirp from llnl.util.filesystem import mkdirp
@@ -31,8 +32,12 @@ def _misc_cache():
return spack.util.file_cache.FileCache(path) return spack.util.file_cache.FileCache(path)
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
#: Spack's cache for small data #: Spack's cache for small data
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)
)
def fetch_cache_location(): def fetch_cache_location():
@@ -69,4 +74,6 @@ def store(self, fetcher, relative_dest):
#: Spack's local cache for downloaded source archives #: Spack's local cache for downloaded source archives
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_fetch_cache)
)

View File

@@ -32,13 +32,11 @@
import spack import spack
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.builder
import spack.concretize import spack.concretize
import spack.config as cfg import spack.config as cfg
import spack.error import spack.environment as ev
import spack.main import spack.main
import spack.mirrors.mirror import spack.mirror
import spack.mirrors.utils
import spack.paths import spack.paths
import spack.repo import spack.repo
import spack.spec import spack.spec
@@ -97,6 +95,8 @@ def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror" TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"] SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
# TODO: Remove this in Spack 0.23
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
JOB_NAME_FORMAT = ( JOB_NAME_FORMAT = (
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}" "{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
) )
@@ -201,11 +201,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
return nodes, edges, stages return nodes, edges, stages
def _print_staging_summary(spec_labels, stages, rebuild_decisions): def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
if not stages: if not stages:
return return
mirrors = spack.mirrors.mirror.MirrorCollection(binary=True) mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
tty.msg("Checked the following mirrors for binaries:") tty.msg("Checked the following mirrors for binaries:")
for m in mirrors.values(): for m in mirrors.values():
tty.msg(f" {m.fetch_url}") tty.msg(f" {m.fetch_url}")
@@ -252,14 +252,21 @@ def _spec_matches(spec, match_string):
return spec.intersects(match_string) return spec.intersects(match_string)
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions): def _format_job_needs(
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
):
needs_list = [] needs_list = []
for dep_job in dep_jobs: for dep_job in dep_jobs:
dep_spec_key = _spec_ci_label(dep_job) dep_spec_key = _spec_ci_label(dep_job)
rebuild = rebuild_decisions[dep_spec_key].rebuild rebuild = rebuild_decisions[dep_spec_key].rebuild
if not prune_dag or rebuild: if not prune_dag or rebuild:
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False}) needs_list.append(
{
"job": get_job_name(dep_job, build_group),
"artifacts": enable_artifacts_buildcache,
}
)
return needs_list return needs_list
@@ -403,6 +410,12 @@ def __init__(self, ci_config, spec_labels, stages):
self.ir = { self.ir = {
"jobs": {}, "jobs": {},
"temporary-storage-url-prefix": self.ci_config.get(
"temporary-storage-url-prefix", None
),
"enable-artifacts-buildcache": self.ci_config.get(
"enable-artifacts-buildcache", False
),
"rebuild-index": self.ci_config.get("rebuild-index", True), "rebuild-index": self.ci_config.get("rebuild-index", True),
"broken-specs-url": self.ci_config.get("broken-specs-url", None), "broken-specs-url": self.ci_config.get("broken-specs-url", None),
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []), "broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
@@ -685,13 +698,14 @@ def generate_gitlab_ci_yaml(
prune_dag=False, prune_dag=False,
check_index_only=False, check_index_only=False,
artifacts_root=None, artifacts_root=None,
remote_mirror_override=None,
): ):
"""Generate a gitlab yaml file to run a dynamic child pipeline from """Generate a gitlab yaml file to run a dynamic child pipeline from
the spec matrix in the active environment. the spec matrix in the active environment.
Arguments: Arguments:
env (spack.environment.Environment): Activated environment object env (spack.environment.Environment): Activated environment object
which must contain a ci section describing how to map which must contain a gitlab-ci section describing how to map
specs to runners specs to runners
print_summary (bool): Should we print a summary of all the jobs in print_summary (bool): Should we print a summary of all the jobs in
the stages in which they were placed. the stages in which they were placed.
@@ -706,21 +720,39 @@ def generate_gitlab_ci_yaml(
artifacts_root (str): Path where artifacts like logs, environment artifacts_root (str): Path where artifacts like logs, environment
files (spack.yaml, spack.lock), etc should be written. GitLab files (spack.yaml, spack.lock), etc should be written. GitLab
requires this to be within the project directory. requires this to be within the project directory.
remote_mirror_override (str): Typically only needed when one spack.yaml
is used to populate several mirrors with binaries, based on some
criteria. Spack protected pipelines populate different mirrors based
on branch name, facilitated by this option. DEPRECATED
""" """
with spack.concretize.disable_compiler_existence_check(): with spack.concretize.disable_compiler_existence_check():
with env.write_transaction(): with env.write_transaction():
env.concretize() env.concretize()
env.write() env.write()
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
# Get the joined "ci" config with all of the current scopes resolved # Get the joined "ci" config with all of the current scopes resolved
ci_config = cfg.get("ci") ci_config = cfg.get("ci")
config_deprecated = False
if not ci_config: if not ci_config:
raise SpackCIError("Environment does not have a `ci` configuration") tty.warn("Environment does not have `ci` a configuration")
gitlabci_config = yaml_root.get("gitlab-ci")
if not gitlabci_config:
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
tty.warn(
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
)
translate_deprecated_config(gitlabci_config)
ci_config = gitlabci_config
config_deprecated = True
# Default target is gitlab...and only target is gitlab # Default target is gitlab...and only target is gitlab
if not ci_config.get("target", "gitlab") == "gitlab": if not ci_config.get("target", "gitlab") == "gitlab":
raise SpackCIError('Spack CI module only generates target "gitlab"') tty.die('Spack CI module only generates target "gitlab"')
cdash_config = cfg.get("cdash") cdash_config = cfg.get("cdash")
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
@@ -781,6 +813,12 @@ def generate_gitlab_ci_yaml(
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None) spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
copy_only_pipeline = spack_pipeline_type == "spack_copy_only" copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
if copy_only_pipeline and config_deprecated:
tty.warn(
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
"deprecated ci configuration, a no-op pipeline will be generated\n",
"instead.",
)
def ensure_expected_target_path(path): def ensure_expected_target_path(path):
"""Returns passed paths with all Windows path separators exchanged """Returns passed paths with all Windows path separators exchanged
@@ -798,17 +836,39 @@ def ensure_expected_target_path(path):
path = path.replace("\\", "/") path = path.replace("\\", "/")
return path return path
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True) pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors: if "buildcache-destination" in pipeline_mirrors:
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'") if remote_mirror_override:
tty.die(
"Using the deprecated --buildcache-destination cli option and "
"having a mirror named 'buildcache-destination' at the same time "
"is not allowed"
)
buildcache_destination = pipeline_mirrors["buildcache-destination"]
else:
deprecated_mirror_config = True
# TODO: This will be an error in Spack 0.23
buildcache_destination = pipeline_mirrors["buildcache-destination"] # TODO: Remove this block in spack 0.23
remote_mirror_url = None
if deprecated_mirror_config:
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
tty.die("spack ci generate requires an env containing a mirror")
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
if spack_buildcache_copy: if spack_buildcache_copy:
buildcache_copies = {} buildcache_copies = {}
buildcache_copy_src_prefix = buildcache_destination.fetch_url buildcache_copy_src_prefix = (
buildcache_destination.fetch_url
if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
buildcache_copy_dest_prefix = spack_buildcache_copy buildcache_copy_dest_prefix = spack_buildcache_copy
# Check for a list of "known broken" specs that we should not bother # Check for a list of "known broken" specs that we should not bother
@@ -818,10 +878,55 @@ def ensure_expected_target_path(path):
if "broken-specs-url" in ci_config: if "broken-specs-url" in ci_config:
broken_specs_url = ci_config["broken-specs-url"] broken_specs_url = ci_config["broken-specs-url"]
enable_artifacts_buildcache = False
if "enable-artifacts-buildcache" in ci_config:
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
rebuild_index_enabled = True rebuild_index_enabled = True
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False: if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
rebuild_index_enabled = False rebuild_index_enabled = False
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in ci_config:
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
# generate.
# TODO: Remove this block in Spack 0.23
mirrors_to_check = None
if deprecated_mirror_config and remote_mirror_override:
if spack_pipeline_type == "spack_protected_branch":
# Overriding the main mirror in this case might result
# in skipping jobs on a release pipeline because specs are
# up to date in develop. Eventually we want to notice and take
# advantage of this by scheduling a job to copy the spec from
# develop to the release, but until we have that, this makes
# sure we schedule a rebuild job if the spec isn't already in
# override mirror.
mirrors_to_check = {"override": remote_mirror_override}
# If we have a remote override and we want generate pipeline using
# --check-index-only, then the override mirror needs to be added to
# the configured mirrors when bindist.update() is run, or else we
# won't fetch its index and include in our local cache.
spack.mirror.add(
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
cfg.default_modify_scope(),
)
# TODO: Remove this block in Spack 0.23
shared_pr_mirror = None
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
spack.mirror.add(
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
cfg.default_modify_scope(),
)
pipeline_artifacts_dir = artifacts_root pipeline_artifacts_dir = artifacts_root
if not pipeline_artifacts_dir: if not pipeline_artifacts_dir:
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd()) proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
@@ -830,8 +935,9 @@ def ensure_expected_target_path(path):
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir) pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment") concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
# Copy the environment manifest file into the concrete environment directory, # Now that we've added the mirrors we know about, they should be properly
# along with the spack.lock file. # reflected in the environment manifest file, so copy that into the
# concrete environment directory, along with the spack.lock file.
if not os.path.exists(concrete_env_dir): if not os.path.exists(concrete_env_dir):
os.makedirs(concrete_env_dir) os.makedirs(concrete_env_dir)
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml")) shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
@@ -856,12 +962,18 @@ def ensure_expected_target_path(path):
env_includes.extend(include_scopes) env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes] env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
translate_deprecated_config(env_yaml_root["spack"]["ci"])
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd: with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False)) fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs") job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction") job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests") job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
# TODO: Remove this line in Spack 0.23
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data") user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
# We communicate relative paths to the downstream jobs to avoid issues in # We communicate relative paths to the downstream jobs to avoid issues in
@@ -875,6 +987,8 @@ def ensure_expected_target_path(path):
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir) rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir) rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir) rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
# TODO: Remove this line in Spack 0.23
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir) rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
# Speed up staging by first fetching binary indices from all mirrors # Speed up staging by first fetching binary indices from all mirrors
@@ -936,7 +1050,7 @@ def ensure_expected_target_path(path):
continue continue
up_to_date_mirrors = bindist.get_mirrors_for_spec( up_to_date_mirrors = bindist.get_mirrors_for_spec(
spec=release_spec, index_only=check_index_only spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
) )
spec_record.rebuild = not up_to_date_mirrors spec_record.rebuild = not up_to_date_mirrors
@@ -980,14 +1094,25 @@ def main_script_replacements(cmd):
job_object["needs"] = [] job_object["needs"] = []
if spec_label in dependencies: if spec_label in dependencies:
# In this case, "needs" is only used for scheduling if enable_artifacts_buildcache:
# purposes, so we only get the direct dependencies. # Get dependencies transitively, so they're all
dep_jobs = [] # available in the artifacts buildcache.
for dep_label in dependencies[spec_label]: dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
dep_jobs.append(spec_labels[dep_label]) else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.
dep_jobs = []
for dep_label in dependencies[spec_label]:
dep_jobs.append(spec_labels[dep_label])
job_object["needs"].extend( job_object["needs"].extend(
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions) _format_job_needs(
dep_jobs,
build_group,
prune_dag,
rebuild_decisions,
enable_artifacts_buildcache,
)
) )
rebuild_spec = spec_record.rebuild rebuild_spec = spec_record.rebuild
@@ -1069,6 +1194,19 @@ def main_script_replacements(cmd):
}, },
) )
# TODO: Remove this block in Spack 0.23
if enable_artifacts_buildcache:
bc_root = os.path.join(local_mirror_dir, "build_cache")
job_object["artifacts"]["paths"].extend(
[
os.path.join(bc_root, p)
for p in [
bindist.tarball_name(release_spec, ".spec.json"),
bindist.tarball_directory_name(release_spec),
]
]
)
job_object["stage"] = stage_name job_object["stage"] = stage_name
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS} job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
job_object["interruptible"] = True job_object["interruptible"] = True
@@ -1083,7 +1221,15 @@ def main_script_replacements(cmd):
job_id += 1 job_id += 1
if print_summary: if print_summary:
_print_staging_summary(spec_labels, stages, rebuild_decisions) _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
# Clean up remote mirror override if enabled
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config:
if remote_mirror_override:
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
if spack_pipeline_type == "spack_pull_request":
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
tty.debug(f"{job_id} build jobs generated in {stage_id} stages") tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
@@ -1105,7 +1251,7 @@ def main_script_replacements(cmd):
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"], "when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
} }
if copy_only_pipeline: if copy_only_pipeline and not config_deprecated:
stage_names.append("copy") stage_names.append("copy")
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"]) sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
sync_job["stage"] = "copy" sync_job["stage"] = "copy"
@@ -1115,12 +1261,17 @@ def main_script_replacements(cmd):
if "variables" not in sync_job: if "variables" not in sync_job:
sync_job["variables"] = {} sync_job["variables"] = {}
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
buildcache_destination.fetch_url
if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
if "buildcache-source" not in pipeline_mirrors: if "buildcache-source" in pipeline_mirrors:
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'") buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
else:
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url # TODO: Remove this condition in Spack 0.23
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
sync_job["dependencies"] = [] sync_job["dependencies"] = []
@@ -1128,6 +1279,27 @@ def main_script_replacements(cmd):
job_id += 1 job_id += 1
if job_id > 0: if job_id > 0:
# TODO: Remove this block in Spack 0.23
if temp_storage_url_prefix:
# There were some rebuild jobs scheduled, so we will need to
# schedule a job to clean up the temporary storage location
# associated with this pipeline.
stage_names.append("cleanup-temp-storage")
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
cleanup_job["stage"] = "cleanup-temp-storage"
cleanup_job["when"] = "always"
cleanup_job["retry"] = service_job_retries
cleanup_job["interruptible"] = True
cleanup_job["script"] = _unpack_script(
cleanup_job["script"],
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
)
cleanup_job["dependencies"] = []
output_object["cleanup"] = cleanup_job
if ( if (
"script" in spack_ci_ir["jobs"]["signing"]["attributes"] "script" in spack_ci_ir["jobs"]["signing"]["attributes"]
and spack_pipeline_type == "spack_protected_branch" and spack_pipeline_type == "spack_protected_branch"
@@ -1144,9 +1316,11 @@ def main_script_replacements(cmd):
signing_job["interruptible"] = True signing_job["interruptible"] = True
if "variables" not in signing_job: if "variables" not in signing_job:
signing_job["variables"] = {} signing_job["variables"] = {}
signing_job["variables"][ signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
"SPACK_BUILDCACHE_DESTINATION" buildcache_destination.push_url # need the s3 url for aws s3 sync
] = buildcache_destination.push_url if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
signing_job["dependencies"] = [] signing_job["dependencies"] = []
output_object["sign-pkgs"] = signing_job output_object["sign-pkgs"] = signing_job
@@ -1157,7 +1331,9 @@ def main_script_replacements(cmd):
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"] final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
final_job["stage"] = "stage-rebuild-index" final_job["stage"] = "stage-rebuild-index"
target_mirror = buildcache_destination.push_url target_mirror = remote_mirror_override or remote_mirror_url
if buildcache_destination:
target_mirror = buildcache_destination.push_url
final_job["script"] = _unpack_script( final_job["script"] = _unpack_script(
final_job["script"], final_job["script"],
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror), op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
@@ -1183,11 +1359,17 @@ def main_script_replacements(cmd):
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir, "SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
"SPACK_VERSION": spack_version, "SPACK_VERSION": spack_version,
"SPACK_CHECKOUT_VERSION": version_to_clone, "SPACK_CHECKOUT_VERSION": version_to_clone,
# TODO: Remove this line in Spack 0.23
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
"SPACK_JOB_LOG_DIR": rel_job_log_dir, "SPACK_JOB_LOG_DIR": rel_job_log_dir,
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir, "SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
"SPACK_JOB_TEST_DIR": rel_job_test_dir, "SPACK_JOB_TEST_DIR": rel_job_test_dir,
# TODO: Remove this line in Spack 0.23
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type), "SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"), "SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
# TODO: Remove this line in Spack 0.23
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag), "SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything), "SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"), "SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
@@ -1196,6 +1378,10 @@ def main_script_replacements(cmd):
for item, val in output_vars.items(): for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val) output_vars[item] = ensure_expected_target_path(val)
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config and remote_mirror_override:
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None) spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
if spack_stack_name: if spack_stack_name:
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
@@ -1222,8 +1408,15 @@ def main_script_replacements(cmd):
noop_job["retry"] = 0 noop_job["retry"] = 0
noop_job["allow_failure"] = True noop_job["allow_failure"] = True
tty.debug("No specs to rebuild, generating no-op job") if copy_only_pipeline and config_deprecated:
output_object = {"no-specs-to-rebuild": noop_job} tty.debug("Generating no-op job as copy-only is unsupported here.")
noop_job["script"] = [
'echo "copy-only pipelines are not supported with deprecated ci configs"'
]
output_object = {"unsupported-copy": noop_job}
else:
tty.debug("No specs to rebuild, generating no-op job")
output_object = {"no-specs-to-rebuild": noop_job}
# Ensure the child pipeline always runs # Ensure the child pipeline always runs
output_object["workflow"] = {"rules": [{"when": "always"}]} output_object["workflow"] = {"rules": [{"when": "always"}]}
@@ -1324,7 +1517,7 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
""" """
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})") tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirrors.mirror.Mirror.from_url(mirror_url) mirror = spack.mirror.Mirror.from_url(mirror_url)
try: try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader: with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec]) uploader.push_or_raise([spec])
@@ -1344,7 +1537,7 @@ def remove_other_mirrors(mirrors_to_keep, scope=None):
mirrors_to_remove.append(name) mirrors_to_remove.append(name)
for mirror_name in mirrors_to_remove: for mirror_name in mirrors_to_remove:
spack.mirrors.utils.remove(mirror_name, scope) spack.mirror.remove(mirror_name, scope)
def copy_files_to_artifacts(src, artifacts_dir): def copy_files_to_artifacts(src, artifacts_dir):
@@ -1389,11 +1582,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
stage_dir = job_pkg.stage.path stage_dir = job_pkg.stage.path
tty.debug(f"stage dir: {stage_dir}") tty.debug(f"stage dir: {stage_dir}")
for file in [ for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
job_pkg.log_path,
job_pkg.env_mods_path,
*spack.builder.create(job_pkg).archive_files,
]:
copy_files_to_artifacts(file, job_log_dir) copy_files_to_artifacts(file, job_log_dir)
@@ -2138,7 +2327,7 @@ def build_name(self):
Returns: (str) current spec's CDash build name.""" Returns: (str) current spec's CDash build name."""
spec = self.current_spec spec = self.current_spec
if spec: if spec:
build_name = f"{spec.name}@{spec.version} \ build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})" hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}") tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
return build_name return build_name
@@ -2265,6 +2454,83 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
reporter.test_skipped_report(report_dir, spec, reason) reporter.test_skipped_report(report_dir, spec, reason)
class SpackCIError(spack.error.SpackError): def translate_deprecated_config(config):
def __init__(self, msg): # Remove all deprecated keys from config
super().__init__(msg) mappings = config.pop("mappings", [])
match_behavior = config.pop("match_behavior", "first")
build_job = {}
if "image" in config:
build_job["image"] = config.pop("image")
if "tags" in config:
build_job["tags"] = config.pop("tags")
if "variables" in config:
build_job["variables"] = config.pop("variables")
# Scripts always override in old CI
if "before_script" in config:
build_job["before_script:"] = config.pop("before_script")
if "script" in config:
build_job["script:"] = config.pop("script")
if "after_script" in config:
build_job["after_script:"] = config.pop("after_script")
signing_job = None
if "signing-job-attributes" in config:
signing_job = {"signing-job": config.pop("signing-job-attributes")}
service_job_attributes = None
if "service-job-attributes" in config:
service_job_attributes = config.pop("service-job-attributes")
# If this config already has pipeline-gen do not more
if "pipeline-gen" in config:
return True if mappings or build_job or signing_job or service_job_attributes else False
config["target"] = "gitlab"
config["pipeline-gen"] = []
pipeline_gen = config["pipeline-gen"]
# Build Job
submapping = []
for section in mappings:
submapping_section = {"match": section["match"]}
if "runner-attributes" in section:
remapped_attributes = {}
if match_behavior == "first":
for key, value in section["runner-attributes"].items():
# Scripts always override in old CI
if key == "script":
remapped_attributes["script:"] = value
elif key == "before_script":
remapped_attributes["before_script:"] = value
elif key == "after_script":
remapped_attributes["after_script:"] = value
else:
remapped_attributes[key] = value
else:
# Handle "merge" behavior be allowing scripts to merge in submapping section
remapped_attributes = section["runner-attributes"]
submapping_section["build-job"] = remapped_attributes
if "remove-attributes" in section:
# Old format only allowed tags in this section, so no extra checks are needed
submapping_section["build-job-remove"] = section["remove-attributes"]
submapping.append(submapping_section)
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
if build_job:
pipeline_gen.append({"build-job": build_job})
# Signing Job
if signing_job:
pipeline_gen.append(signing_job)
# Service Jobs
if service_job_attributes:
pipeline_gen.append({"reindex-job": service_job_attributes})
pipeline_gen.append({"noop-job": service_job_attributes})
pipeline_gen.append({"cleanup-job": service_job_attributes})
return True

View File

@@ -4,13 +4,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse import argparse
import difflib
import importlib import importlib
import os import os
import re import re
import sys import sys
from collections import Counter from typing import List, Union
from typing import List, Optional, Union
import llnl.string import llnl.string
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -19,14 +17,12 @@
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.concretize
import spack.config # breaks a cycle. import spack.config # breaks a cycle.
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
import spack.extensions import spack.extensions
import spack.parser import spack.parser
import spack.paths import spack.paths
import spack.repo
import spack.spec import spack.spec
import spack.store import spack.store
import spack.traverse as traverse import spack.traverse as traverse
@@ -34,8 +30,6 @@
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
from ..enums import InstallRecordStatus
# cmd has a submodule called "list" so preserve the python list module # cmd has a submodule called "list" so preserve the python list module
python_list = list python_list = list
@@ -126,8 +120,6 @@ def get_module(cmd_name):
tty.debug("Imported {0} from built-in commands".format(pname)) tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError: except ImportError:
module = spack.extensions.get_module(cmd_name) module = spack.extensions.get_module(cmd_name)
if not module:
raise CommandNotFoundError(cmd_name)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "") attr_setdefault(module, DESCRIPTION, "")
@@ -181,66 +173,10 @@ def parse_specs(
arg_string = " ".join([quote_kvp(arg) for arg in args]) arg_string = " ".join([quote_kvp(arg) for arg in args])
specs = spack.parser.parse(arg_string) specs = spack.parser.parse(arg_string)
if not concretize: for spec in specs:
return specs if concretize:
spec.concretize(tests=tests)
to_concretize = [(s, None) for s in specs] return specs
return _concretize_spec_pairs(to_concretize, tests=tests)
def _concretize_spec_pairs(to_concretize, tests=False):
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
with ``None`` for its concrete spec will be newly concretized. This method respects unification
rules from config."""
unify = spack.config.get("concretizer:unify", False)
# Special case for concretizing a single spec
if len(to_concretize) == 1:
abstract, concrete = to_concretize[0]
return [concrete or abstract.concretized()]
# Special case if every spec is either concrete or has an abstract hash
if all(
concrete or abstract.concrete or abstract.abstract_hash
for abstract, concrete in to_concretize
):
# Get all the concrete specs
ret = [
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
for abstract, concrete in to_concretize
]
# If unify: true, check that specs don't conflict
# Since all concrete, "when_possible" is not relevant
if unify is True: # True, "when_possible", False are possible values
runtimes = spack.repo.PATH.packages_with_tags("runtime")
specs_per_name = Counter(
spec.name
for spec in traverse.traverse_nodes(
ret, deptype=("link", "run"), key=traverse.by_dag_hash
)
if spec.name not in runtimes # runtimes are allowed multiple times
)
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
if conflicts:
raise spack.error.SpecError(
"Specs conflict and `concretizer:unify` is configured true.",
f" specs depend on multiple versions of {', '.join(conflicts)}",
)
return ret
# Standard case
concretize_method = spack.concretize.concretize_separately # unify: false
if unify is True:
concretize_method = spack.concretize.concretize_together
elif unify == "when_possible":
concretize_method = spack.concretize.concretize_together_when_possible
concretized = concretize_method(to_concretize, tests=tests)
return [concrete for _, concrete in concretized]
def matching_spec_from_env(spec): def matching_spec_from_env(spec):
@@ -256,64 +192,39 @@ def matching_spec_from_env(spec):
return spec.concretized() return spec.concretized()
def matching_specs_from_env(specs): def disambiguate_spec(spec, env, local=False, installed=True, first=False):
"""
Same as ``matching_spec_from_env`` but respects spec unification rules.
For each spec, if there is a matching spec in the environment it is used. If no
matching spec is found, this will return the given spec but concretized in the
context of the active environment and other given specs, with unification rules applied.
"""
env = ev.active_environment()
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
additional_concrete_specs = (
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
)
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
def disambiguate_spec(
spec: spack.spec.Spec,
env: Optional[ev.Environment],
local: bool = False,
installed: Union[bool, InstallRecordStatus] = True,
first: bool = False,
) -> spack.spec.Spec:
"""Given a spec, figure out which installed package it refers to. """Given a spec, figure out which installed package it refers to.
Args: Arguments:
spec: a spec to disambiguate spec (spack.spec.Spec): a spec to disambiguate
env: a spack environment, if one is active, or None if no environment is active env (spack.environment.Environment): a spack environment,
local: do not search chained spack instances if one is active, or None if no environment is active
installed: install status argument passed to database query. local (bool): do not search chained spack instances
first: returns the first matching spec, even if more than one match is found installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
""" """
hashes = env.all_hashes() if env else None hashes = env.all_hashes() if env else None
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first) return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
def disambiguate_spec_from_hashes( def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
spec: spack.spec.Spec,
hashes: List[str],
local: bool = False,
installed: Union[bool, InstallRecordStatus] = True,
first: bool = False,
) -> spack.spec.Spec:
"""Given a spec and a list of hashes, get concrete spec the spec refers to. """Given a spec and a list of hashes, get concrete spec the spec refers to.
Arguments: Arguments:
spec: a spec to disambiguate spec (spack.spec.Spec): a spec to disambiguate
hashes: a set of hashes of specs among which to disambiguate hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
local: if True, do not search chained spack instances local (bool): do not search chained spack instances
installed: install status argument passed to database query. installed (bool or spack.database.InstallStatus or typing.Iterable):
first: returns the first matching spec, even if more than one match is found install status argument passed to database query.
See ``spack.database.Database._query`` for details.
""" """
if local: if local:
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed) matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
else: else:
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed) matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
if not matching_specs: if not matching_specs:
tty.die(f"Spec '{spec}' matches no installed packages.") tty.die("Spec '%s' matches no installed packages." % spec)
elif first: elif first:
return matching_specs[0] return matching_specs[0]
@@ -372,13 +283,8 @@ def iter_groups(specs, indent, all_headers):
index = index_by(specs, ("architecture", "compiler")) index = index_by(specs, ("architecture", "compiler"))
ispace = indent * " " ispace = indent * " "
def _key(item):
if item is None:
return ""
return str(item)
# Traverse the index and print out each package # Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index, key=_key)): for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0: if i > 0:
print() print()
@@ -436,7 +342,6 @@ def display_specs(specs, args=None, **kwargs):
""" """
# FIXME (compiler as nodes): remove the "show full compiler" arguments, and its use
def get_arg(name, default=None): def get_arg(name, default=None):
"""Prefer kwargs, then args, then default.""" """Prefer kwargs, then args, then default."""
if name in kwargs: if name in kwargs:
@@ -451,6 +356,7 @@ def get_arg(name, default=None):
hashes = get_arg("long", False) hashes = get_arg("long", False)
namespaces = get_arg("namespaces", False) namespaces = get_arg("namespaces", False)
flags = get_arg("show_flags", False) flags = get_arg("show_flags", False)
full_compiler = get_arg("show_full_compiler", False)
variants = get_arg("variants", False) variants = get_arg("variants", False)
groups = get_arg("groups", True) groups = get_arg("groups", True)
all_headers = get_arg("all_headers", False) all_headers = get_arg("all_headers", False)
@@ -472,7 +378,10 @@ def get_arg(name, default=None):
if format_string is None: if format_string is None:
nfmt = "{fullname}" if namespaces else "{name}" nfmt = "{fullname}" if namespaces else "{name}"
ffmt = "" ffmt = ""
if flags: if full_compiler or flags:
ffmt += "{%compiler.name}"
if full_compiler:
ffmt += "{@compiler.version}"
ffmt += " {compiler_flags}" ffmt += " {compiler_flags}"
vfmt = "{variants}" if variants else "" vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + ffmt + vfmt format_string = nfmt + "{@version}" + ffmt + vfmt
@@ -600,18 +509,6 @@ def __init__(self, name):
super().__init__("{0} is not a permissible Spack command name.".format(name)) super().__init__("{0} is not a permissible Spack command name.".format(name))
class MultipleSpecsMatch(Exception):
"""Raised when multiple specs match a constraint, in a context where
this is not allowed.
"""
class NoSpecMatches(Exception):
"""Raised when no spec matches a constraint, in a context where
this is not allowed.
"""
######################################## ########################################
# argparse types for argument validation # argparse types for argument validation
######################################## ########################################
@@ -696,24 +593,3 @@ def find_environment(args):
def first_line(docstring): def first_line(docstring):
"""Return the first line of the docstring.""" """Return the first line of the docstring."""
return docstring.split("\n")[0] return docstring.split("\n")[0]
class CommandNotFoundError(spack.error.SpackError):
"""Exception class thrown when a requested command is not recognized as
such.
"""
def __init__(self, cmd_name):
msg = (
f"{cmd_name} is not a recognized Spack command or extension command; "
"check with `spack commands`."
)
long_msg = None
similar = difflib.get_close_matches(cmd_name, all_commands())
if 1 <= len(similar) <= 5:
long_msg = "\nDid you mean one of the following commands?\n "
long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)

View File

@@ -19,23 +19,12 @@
def setup_parser(subparser): def setup_parser(subparser):
# DEPRECATED: equivalent to --generic --target
subparser.add_argument( subparser.add_argument(
"-g", "-g", "--generic-target", action="store_true", help="show the best generic target"
"--generic-target",
action="store_true",
help="show the best generic target (deprecated)",
) )
subparser.add_argument( subparser.add_argument(
"--known-targets", action="store_true", help="show a list of all known targets and exit" "--known-targets", action="store_true", help="show a list of all known targets and exit"
) )
target_type = subparser.add_mutually_exclusive_group()
target_type.add_argument(
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
)
target_type.add_argument(
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
)
parts = subparser.add_mutually_exclusive_group() parts = subparser.add_mutually_exclusive_group()
parts2 = subparser.add_mutually_exclusive_group() parts2 = subparser.add_mutually_exclusive_group()
parts.add_argument( parts.add_argument(
@@ -91,7 +80,6 @@ def display_target_group(header, target_group):
def arch(parser, args): def arch(parser, args):
if args.generic_target: if args.generic_target:
# TODO: add deprecation warning in 0.24
print(archspec.cpu.host().generic) print(archspec.cpu.host().generic)
return return
@@ -108,10 +96,6 @@ def arch(parser, args):
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = host_platform.operating_system(os_args) host_os = host_platform.operating_system(os_args)
host_target = host_platform.target(target_args) host_target = host_platform.target(target_args)
if args.family:
host_target = host_target.family
elif args.generic:
host_target = host_target.generic
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target))) architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
if args.platform: if args.platform:

View File

@@ -16,7 +16,7 @@
import spack.bootstrap.config import spack.bootstrap.config
import spack.bootstrap.core import spack.bootstrap.core
import spack.config import spack.config
import spack.mirrors.utils import spack.mirror
import spack.spec import spack.spec
import spack.stage import spack.stage
import spack.util.path import spack.util.path
@@ -400,7 +400,7 @@ def _mirror(args):
llnl.util.tty.set_msg_enabled(False) llnl.util.tty.set_msg_enabled(False)
spec = spack.spec.Spec(spec_str).concretized() spec = spack.spec.Spec(spec_str).concretized()
for node in spec.traverse(): for node in spec.traverse():
spack.mirrors.utils.create(mirror_dir, [node]) spack.mirror.create(mirror_dir, [node])
llnl.util.tty.set_msg_enabled(True) llnl.util.tty.set_msg_enabled(True)
if args.binary_packages: if args.binary_packages:

View File

@@ -21,7 +21,7 @@
import spack.deptypes as dt import spack.deptypes as dt
import spack.environment as ev import spack.environment as ev
import spack.error import spack.error
import spack.mirrors.mirror import spack.mirror
import spack.oci.oci import spack.oci.oci
import spack.spec import spack.spec
import spack.stage import spack.stage
@@ -34,8 +34,6 @@
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.spec import Spec, save_dependency_specfiles from spack.spec import Spec, save_dependency_specfiles
from ..enums import InstallRecordStatus
description = "create, download and install binary packages" description = "create, download and install binary packages"
section = "packaging" section = "packaging"
level = "long" level = "long"
@@ -310,10 +308,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
def _matching_specs(specs: List[Spec]) -> List[Spec]: def _matching_specs(specs: List[Spec]) -> List[Spec]:
"""Disambiguate specs and return a list of matching specs""" """Disambiguate specs and return a list of matching specs"""
return [ return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
for s in specs
]
def _format_spec(spec: Spec) -> str: def _format_spec(spec: Spec) -> str:
@@ -392,7 +387,7 @@ def push_fn(args):
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots() roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror mirror = args.mirror
assert isinstance(mirror, spack.mirrors.mirror.Mirror) assert isinstance(mirror, spack.mirror.Mirror)
push_url = mirror.push_url push_url = mirror.push_url
@@ -750,7 +745,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
copy_buildcache_file(copy_file["src"], dest) copy_buildcache_file(copy_file["src"], dest)
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False): def update_index(mirror: spack.mirror.Mirror, update_keys=False):
# Special case OCI images for now. # Special case OCI images for now.
try: try:
image_ref = spack.oci.oci.image_from_mirror(mirror) image_ref = spack.oci.oci.image_from_mirror(mirror)

View File

@@ -20,7 +20,7 @@
import spack.config as cfg import spack.config as cfg
import spack.environment as ev import spack.environment as ev
import spack.hash_types as ht import spack.hash_types as ht
import spack.mirrors.mirror import spack.mirror
import spack.util.gpg as gpg_util import spack.util.gpg as gpg_util
import spack.util.timer as timer import spack.util.timer as timer
import spack.util.url as url_util import spack.util.url as url_util
@@ -62,6 +62,13 @@ def setup_parser(subparser):
"path to the file where generated jobs file should be written. " "path to the file where generated jobs file should be written. "
"default is .gitlab-ci.yml in the root of the repository", "default is .gitlab-ci.yml in the root of the repository",
) )
generate.add_argument(
"--copy-to",
default=None,
help="path to additional directory for job files\n\n"
"this option provides an absolute path to a directory where the generated "
"jobs yaml file should be copied. default is not to copy",
)
generate.add_argument( generate.add_argument(
"--optimize", "--optimize",
action="store_true", action="store_true",
@@ -76,6 +83,12 @@ def setup_parser(subparser):
default=False, default=False,
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)", help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
) )
generate.add_argument(
"--buildcache-destination",
default=None,
help="override the mirror configured in the environment\n\n"
"allows for pushing binaries from the generated pipeline to a different location",
)
prune_group = generate.add_mutually_exclusive_group() prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument( prune_group.add_argument(
"--prune-dag", "--prune-dag",
@@ -201,10 +214,20 @@ def ci_generate(args):
env = spack.cmd.require_active_env(cmd_name="ci generate") env = spack.cmd.require_active_env(cmd_name="ci generate")
if args.copy_to:
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
if args.buildcache_destination:
tty.warn(
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
)
output_file = args.output_file output_file = args.output_file
copy_yaml_to = args.copy_to
prune_dag = args.prune_dag prune_dag = args.prune_dag
index_only = args.index_only index_only = args.index_only
artifacts_root = args.artifacts_root artifacts_root = args.artifacts_root
buildcache_destination = args.buildcache_destination
if not output_file: if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml") output_file = os.path.abspath(".gitlab-ci.yml")
@@ -222,8 +245,15 @@ def ci_generate(args):
prune_dag=prune_dag, prune_dag=prune_dag,
check_index_only=index_only, check_index_only=index_only,
artifacts_root=artifacts_root, artifacts_root=artifacts_root,
remote_mirror_override=buildcache_destination,
) )
if copy_yaml_to:
copy_to_dir = os.path.dirname(copy_yaml_to)
if not os.path.exists(copy_to_dir):
os.makedirs(copy_to_dir)
shutil.copyfile(output_file, copy_yaml_to)
def ci_reindex(args): def ci_reindex(args):
"""rebuild the buildcache index for the remote mirror """rebuild the buildcache index for the remote mirror
@@ -240,7 +270,7 @@ def ci_reindex(args):
ci_mirrors = yaml_root["mirrors"] ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()] mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0] remote_mirror_url = mirror_urls[0]
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url) mirror = spack.mirror.Mirror(remote_mirror_url)
buildcache.update_index(mirror, update_keys=True) buildcache.update_index(mirror, update_keys=True)
@@ -268,13 +298,22 @@ def ci_rebuild(args):
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR") job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR") job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR") repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
# TODO: Remove this in Spack 0.23
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR") concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
ci_job_name = os.environ.get("CI_JOB_NAME") ci_job_name = os.environ.get("CI_JOB_NAME")
signing_key = os.environ.get("SPACK_SIGNING_KEY") signing_key = os.environ.get("SPACK_SIGNING_KEY")
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME") job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH") job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE") spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
# TODO: Remove this in Spack 0.23
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
# TODO: Remove this in Spack 0.23
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME") spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
# TODO: Remove this in Spack 0.23
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING") rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING") require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
@@ -294,10 +333,12 @@ def ci_rebuild(args):
job_log_dir = os.path.join(ci_project_dir, job_log_dir) job_log_dir = os.path.join(ci_project_dir, job_log_dir)
job_test_dir = os.path.join(ci_project_dir, job_test_dir) job_test_dir = os.path.join(ci_project_dir, job_test_dir)
repro_dir = os.path.join(ci_project_dir, repro_dir) repro_dir = os.path.join(ci_project_dir, repro_dir)
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir) concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
# Debug print some of the key environment variables we should have received # Debug print some of the key environment variables we should have received
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir)) tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name)) tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
# Query the environment manifest to find out whether we're reporting to a # Query the environment manifest to find out whether we're reporting to a
@@ -328,12 +369,52 @@ def ci_rebuild(args):
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True) pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors: if "buildcache-destination" in pipeline_mirrors:
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination") buildcache_destination = pipeline_mirrors["buildcache-destination"]
else:
deprecated_mirror_config = True
# TODO: This will be an error in Spack 0.23
buildcache_destination = pipeline_mirrors["buildcache-destination"] # If no override url exists, then just push binary package to the
# normal remote mirror url.
# TODO: Remove in Spack 0.23
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
if buildcache_destination:
buildcache_mirror_url = buildcache_destination.push_url
# Figure out what is our temporary storage mirror: Is it artifacts
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
# force something or pipelines might not have a way to propagate build
# artifacts from upstream to downstream jobs.
# TODO: Remove this in Spack 0.23
pipeline_mirror_url = None
# TODO: Remove this in Spack 0.23
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in ci_config:
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
# TODO: Remove this in Spack 0.23
enable_artifacts_mirror = False
if "enable-artifacts-buildcache" in ci_config:
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
if enable_artifacts_mirror or (
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
):
# If you explicitly enabled the artifacts buildcache feature, or
# if this is a PR pipeline but you did not enable either of the
# per-pipeline temporary storage features, we force the use of
# artifacts buildcache. Otherwise jobs will not have binary
# dependencies from previous stages available since we do not
# allow pushing binaries to the remote mirror during PR pipelines.
enable_artifacts_mirror = True
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
tty.debug(mirror_msg)
# Get the concrete spec to be built by this job. # Get the concrete spec to be built by this job.
try: try:
@@ -408,7 +489,48 @@ def ci_rebuild(args):
fd.write(spack_info.encode("utf8")) fd.write(spack_info.encode("utf8"))
fd.write(b"\n") fd.write(b"\n")
matches = None if full_rebuild else bindist.get_mirrors_for_spec(job_spec, index_only=False) pipeline_mirrors = []
# If we decided there should be a temporary storage mechanism, add that
# mirror now so it's used when we check for a hash match already
# built for this spec.
# TODO: Remove this block in Spack 0.23
if pipeline_mirror_url:
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
spack.mirror.add(mirror, cfg.default_modify_scope())
pipeline_mirrors.append(pipeline_mirror_url)
# Check configured mirrors for a built spec with a matching hash
# TODO: Remove this block in Spack 0.23
mirrors_to_check = None
if remote_mirror_override:
if spack_pipeline_type == "spack_protected_branch":
# Passing "mirrors_to_check" below means we *only* look in the override
# mirror to see if we should skip building, which is what we want.
mirrors_to_check = {"override": remote_mirror_override}
# Adding this mirror to the list of configured mirrors means dependencies
# could be installed from either the override mirror or any other configured
# mirror (e.g. remote_mirror_url which is defined in the environment or
# pipeline_mirror_url), which is also what we want.
spack.mirror.add(
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
cfg.default_modify_scope(),
)
pipeline_mirrors.append(remote_mirror_override)
# TODO: Remove this in Spack 0.23
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
if shared_pr_mirror_url != "None":
pipeline_mirrors.append(shared_pr_mirror_url)
matches = (
None
if full_rebuild
else bindist.get_mirrors_for_spec(
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
)
)
if matches: if matches:
# Got a hash match on at least one configured mirror. All # Got a hash match on at least one configured mirror. All
@@ -420,10 +542,25 @@ def ci_rebuild(args):
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name)) tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
for match in matches: for match in matches:
tty.msg(" {0}".format(match["mirror_url"])) tty.msg(" {0}".format(match["mirror_url"]))
# TODO: Remove this block in Spack 0.23
if enable_artifacts_mirror:
matching_mirror = matches[0]["mirror_url"]
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
tty.debug("Downloading to {0}".format(build_cache_dir))
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
# Now we are done and successful # Now we are done and successful
return 0 return 0
# Before beginning the install, if this is a "rebuild everything" pipeline, we
# only want to keep the mirror being used by the current pipeline as it's binary
# package destination. This ensures that the when we rebuild everything, we only
# consume binary dependencies built in this pipeline.
# TODO: Remove this in Spack 0.23
if deprecated_mirror_config and full_rebuild:
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
# No hash match anywhere means we need to rebuild spec # No hash match anywhere means we need to rebuild spec
# Start with spack arguments # Start with spack arguments
@@ -544,11 +681,17 @@ def ci_rebuild(args):
cdash_handler.copy_test_results(reports_dir, job_test_dir) cdash_handler.copy_test_results(reports_dir, job_test_dir)
if install_exit_code == 0: if install_exit_code == 0:
# If the install succeeded, push it to the buildcache destination. Failure to push # If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
# will result in a non-zero exit code. Pushing is best-effort. # will result in a non-zero exit code. Pushing is best-effort.
mirror_urls = [buildcache_mirror_url]
# TODO: Remove this block in Spack 0.23
if pipeline_mirror_url:
mirror_urls.append(pipeline_mirror_url)
for result in spack_ci.create_buildcache( for result in spack_ci.create_buildcache(
input_spec=job_spec, input_spec=job_spec,
destination_mirror_urls=[buildcache_destination.push_url], destination_mirror_urls=mirror_urls,
sign_binaries=spack_ci.can_sign_binaries(), sign_binaries=spack_ci.can_sign_binaries(),
): ):
if not result.success: if not result.success:

View File

@@ -105,8 +105,7 @@ def clean(parser, args):
# Then do the cleaning falling through the cases # Then do the cleaning falling through the cases
if args.specs: if args.specs:
specs = spack.cmd.parse_specs(args.specs, concretize=False) specs = spack.cmd.parse_specs(args.specs, concretize=False)
specs = spack.cmd.matching_specs_from_env(specs) specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
for spec in specs: for spec in specs:
msg = "Cleaning build stage [{0}]" msg = "Cleaning build stage [{0}]"
tty.msg(msg.format(spec.short_spec)) tty.msg(msg.format(spec.short_spec))

View File

@@ -14,8 +14,7 @@
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.environment as ev import spack.environment as ev
import spack.mirrors.mirror import spack.mirror
import spack.mirrors.utils
import spack.reporters import spack.reporters
import spack.spec import spack.spec
import spack.store import spack.store
@@ -582,51 +581,23 @@ def add_concretizer_args(subparser):
def add_connection_args(subparser, add_help): def add_connection_args(subparser, add_help):
def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs): subparser.add_argument(
group = parser.add_mutually_exclusive_group() "--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
group.add_argument(arg, **kwargs)
# Update help string
if "help" in kwargs:
kwargs["help"] = "environment variable containing " + kwargs["help"]
group.add_argument(arg + "-variable", **kwargs)
s3_connection_parser = subparser.add_argument_group("S3 Connection")
add_argument_string_or_variable(
s3_connection_parser,
"--s3-access-key-id",
help="ID string to use to connect to this S3 mirror",
) )
add_argument_string_or_variable( subparser.add_argument(
s3_connection_parser, "--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
"--s3-access-key-secret",
help="secret string to use to connect to this S3 mirror",
) )
add_argument_string_or_variable( subparser.add_argument(
s3_connection_parser, "--s3-access-token", help="access token to use to connect to this S3 mirror"
"--s3-access-token",
help="access token to use to connect to this S3 mirror",
) )
s3_connection_parser.add_argument( subparser.add_argument(
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None "--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
) )
s3_connection_parser.add_argument( subparser.add_argument(
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror" "--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
) )
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
oci_connection_parser = subparser.add_argument_group("OCI Connection") subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
add_argument_string_or_variable(
oci_connection_parser,
"--oci-username",
deprecate_str=False,
help="username to use to connect to this OCI mirror",
)
add_argument_string_or_variable(
oci_connection_parser,
"--oci-password",
help="password to use to connect to this OCI mirror",
)
def use_buildcache(cli_arg_value): def use_buildcache(cli_arg_value):
@@ -689,32 +660,34 @@ def mirror_name_or_url(m):
# accidentally to a dir in the current working directory. # accidentally to a dir in the current working directory.
# If there's a \ or / in the name, it's interpreted as a path or url. # If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m or m in (".", ".."): if "/" in m or "\\" in m:
return spack.mirrors.mirror.Mirror(m) return spack.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist. # Otherwise, the named mirror is required to exist.
try: try:
return spack.mirrors.utils.require_mirror_name(m) return spack.mirror.require_mirror_name(m)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e raise argparse.ArgumentTypeError(
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
)
def mirror_url(url): def mirror_url(url):
try: try:
return spack.mirrors.mirror.Mirror.from_url(url) return spack.mirror.Mirror.from_url(url)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))
def mirror_directory(path): def mirror_directory(path):
try: try:
return spack.mirrors.mirror.Mirror.from_local_path(path) return spack.mirror.Mirror.from_local_path(path)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))
def mirror_name(name): def mirror_name(name):
try: try:
return spack.mirrors.utils.require_mirror_name(name) return spack.mirror.require_mirror_name(name)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))

View File

@@ -5,14 +5,13 @@
import argparse import argparse
import sys import sys
import warnings
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import index_by from llnl.util.lang import index_by
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.compilers.config import spack.compilers
import spack.config import spack.config
import spack.spec import spack.spec
from spack.cmd.common import arguments from spack.cmd.common import arguments
@@ -36,13 +35,13 @@ def setup_parser(subparser):
"--mixed-toolchain", "--mixed-toolchain",
action="store_true", action="store_true",
default=sys.platform == "darwin", default=sys.platform == "darwin",
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)", help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
) )
mixed_toolchain_group.add_argument( mixed_toolchain_group.add_argument(
"--no-mixed-toolchain", "--no-mixed-toolchain",
action="store_false", action="store_false",
dest="mixed_toolchain", dest="mixed_toolchain",
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)", help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
) )
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER) find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument( find_parser.add_argument(
@@ -81,97 +80,77 @@ def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and """Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration. add them to Spack's configuration.
""" """
if args.mixed_toolchain:
warnings.warn(
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
"has no effect. The option will be removed in Spack v0.25"
)
paths = args.add_paths or None paths = args.add_paths or None
new_compilers = spack.compilers.config.find_compilers( new_compilers = spack.compilers.find_compilers(
path_hints=paths, scope=args.scope, max_workers=args.jobs path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
) )
if new_compilers: if new_compilers:
n = len(new_compilers) n = len(new_compilers)
s = "s" if n > 1 else "" s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages") filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}") tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers) compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4) colify(reversed(compiler_strs), indent=4)
else: else:
tty.msg("Found no new compilers") tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:") tty.msg("Compilers are defined in the following files:")
colify(spack.compilers.config.compiler_config_files(), indent=4) colify(spack.compilers.compiler_config_files(), indent=4)
def compiler_remove(args): def compiler_remove(args):
remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG) compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope) candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
if not candidates:
tty.die(f"No compiler matches '{args.compiler_spec}'")
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates)) if not candidate_compilers:
tty.die("No compilers match spec %s" % compiler_spec)
if not args.all and len(candidates) > 1: if not args.all and len(candidate_compilers) > 1:
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':") tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
print() colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
colify(compiler_strs, indent=4) tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
print()
print(
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
" to remove all of them."
)
sys.exit(1) sys.exit(1)
remover.flush() for current_compiler in candidate_compilers:
tty.msg("The following compilers have been removed:") spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
print() tty.msg(f"{current_compiler.spec.display_str} has been removed")
colify(compiler_strs, indent=4)
print()
def compiler_info(args): def compiler_info(args):
"""Print info about all compilers matching a spec.""" """Print info about all compilers matching a spec."""
query = spack.spec.Spec(args.compiler_spec) cspec = spack.spec.CompilerSpec(args.compiler_spec)
all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False) compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
compilers = [x for x in all_compilers if x.satisfies(query)]
if not compilers: if not compilers:
tty.die(f"No compilers match spec {query.cformat()}") tty.die("No compilers match spec %s" % cspec)
else: else:
for c in compilers: for c in compilers:
print(f"{c.cformat()}:") print(c.spec.display_str + ":")
print(f" prefix: {c.external_path}") print("\tpaths:")
extra_attributes = getattr(c, "extra_attributes", {}) for cpath in ["cc", "cxx", "f77", "fc"]:
if "compilers" in extra_attributes: print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
print(" compilers:") if c.flags:
for language, exe in extra_attributes.get("compilers", {}).items(): print("\tflags:")
print(f" {language}: {exe}") for flag, flag_value in c.flags.items():
if "flags" in extra_attributes: print("\t\t%s = %s" % (flag, flag_value))
print(" flags:") if len(c.environment) != 0:
for flag, flag_value in extra_attributes["flags"].items(): if len(c.environment.get("set", {})) != 0:
print(f" {flag} = {flag_value}") print("\tenvironment:")
# FIXME (compiler as nodes): recover this printing print("\t set:")
# if "environment" in extra_attributes: for key, value in c.environment["set"].items():
# if len(c.environment.get("set", {})) != 0: print("\t %s = %s" % (key, value))
# print("\tenvironment:") if c.extra_rpaths:
# print("\t set:") print("\tExtra rpaths:")
# for key, value in c.environment["set"].items(): for extra_rpath in c.extra_rpaths:
# print("\t %s = %s" % (key, value)) print("\t\t%s" % extra_rpath)
if "extra_rpaths" in extra_attributes: print("\tmodules = %s" % c.modules)
print(" extra rpaths:") print("\toperating system = %s" % c.operating_system)
for extra_rpath in extra_attributes["extra_rpaths"]:
print(f" {extra_rpath}")
if getattr(c, "external_modules", []):
print(" modules: ")
for module in c.external_modules:
print(f" {module}")
print()
def compiler_list(args): def compiler_list(args):
compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False) compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
# If there are no compilers in any scope, and we're outputting to a tty, give a # If there are no compilers in any scope, and we're outputting to a tty, give a
# hint to the user. # hint to the user.
@@ -184,7 +163,7 @@ def compiler_list(args):
tty.msg(msg) tty.msg(msg)
return return
index = index_by(compilers, spack.compilers.config.name_os_target) index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
tty.msg("Available compilers") tty.msg("Available compilers")
@@ -203,10 +182,10 @@ def compiler_list(args):
name, os, target = key name, os, target = key
os_str = os os_str = os
if target: if target:
os_str += f"-{target}" os_str += "-%s" % target
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}" cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
tty.hline(colorize(cname), char="-") tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers))) colify(reversed(sorted(c.spec.display_str for c in compilers)))
def compiler(parser, args): def compiler(parser, args):

View File

@@ -518,6 +518,8 @@ def config_prefer_upstream(args):
for spec in pref_specs: for spec in pref_specs:
# Collect all the upstream compilers and versions for this package. # Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": []}) pkg = pkgs.get(spec.name, {"version": []})
all = pkgs.get("all", {"compiler": []})
pkgs["all"] = all
pkgs[spec.name] = pkg pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version. # We have no existing variant if this is our first added version.
@@ -527,6 +529,10 @@ def config_prefer_upstream(args):
if version not in pkg["version"]: if version not in pkg["version"]:
pkg["version"].append(version) pkg["version"].append(version)
compiler = str(spec.compiler)
if compiler not in all["compiler"]:
all["compiler"].append(compiler)
# Get and list all the variants that differ from the default. # Get and list all the variants that differ from the default.
variants = [] variants = []
for var_name, variant in spec.variants.items(): for var_name, variant in spec.variants.items():

View File

@@ -99,5 +99,5 @@ def deconcretize(parser, args):
" Use `spack deconcretize --all` to deconcretize ALL specs.", " Use `spack deconcretize --all` to deconcretize ALL specs.",
) )
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None] specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
deconcretize_specs(args, specs) deconcretize_specs(args, specs)

View File

@@ -23,10 +23,9 @@
import spack.installer import spack.installer
import spack.store import spack.store
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.database import InstallStatuses
from spack.error import SpackError from spack.error import SpackError
from ..enums import InstallRecordStatus
description = "replace one package with another via symlinks" description = "replace one package with another via symlinks"
section = "admin" section = "admin"
level = "long" level = "long"
@@ -96,12 +95,8 @@ def deprecate(parser, args):
if len(specs) != 2: if len(specs) != 2:
raise SpackError("spack deprecate requires exactly two specs") raise SpackError("spack deprecate requires exactly two specs")
deprecate = spack.cmd.disambiguate_spec( install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
specs[0], deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
env,
local=True,
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
)
if args.install: if args.install:
deprecator = specs[1].concretized() deprecator = specs[1].concretized()

View File

@@ -10,12 +10,11 @@
import sys import sys
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import List, Optional, Set from typing import List, Optional
import llnl.string as string import llnl.string as string
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.symlink import islink, symlink
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
@@ -51,8 +50,6 @@
"update", "update",
"revert", "revert",
"depfile", "depfile",
"track",
"untrack",
] ]
@@ -60,41 +57,35 @@
# env create # env create
# #
def env_create_setup_parser(subparser): def env_create_setup_parser(subparser):
"""create a new environment """create a new environment"""
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
create a new environment or, optionally, copy an existing environment
a manifest file results in a new abstract environment while a lock file
creates a new concrete environment
"""
subparser.add_argument(
"env_name", metavar="env", help="name or directory of the new environment"
)
subparser.add_argument( subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory" "-d", "--dir", action="store_true", help="create an environment in a specific directory"
) )
subparser.add_argument( subparser.add_argument(
"--keep-relative", "--keep-relative",
action="store_true", action="store_true",
help="copy envfile's relative develop paths verbatim", help="copy relative develop paths verbatim into the new environment"
" when initializing from envfile",
) )
view_opts = subparser.add_mutually_exclusive_group() view_opts = subparser.add_mutually_exclusive_group()
view_opts.add_argument( view_opts.add_argument(
"--without-view", action="store_true", help="do not maintain a view for this environment" "--without-view", action="store_true", help="do not maintain a view for this environment"
) )
view_opts.add_argument( view_opts.add_argument(
"--with-view", help="maintain view at WITH_VIEW (vs. environment's directory)" "--with-view",
help="specify that this environment should maintain a view at the"
" specified path (by default the view is maintained in the"
" environment directory)",
) )
subparser.add_argument( subparser.add_argument(
"envfile", "envfile",
nargs="?", nargs="?",
default=None, default=None,
help="manifest or lock file (ends with '.json' or '.lock')", help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
) )
subparser.add_argument( subparser.add_argument(
"--include-concrete", "--include-concrete", action="append", help="name of old environment to copy specs from"
action="append",
help="copy concrete specs from INCLUDE_CONCRETE's environment",
) )
@@ -182,7 +173,7 @@ def _env_create(
# env activate # env activate
# #
def env_activate_setup_parser(subparser): def env_activate_setup_parser(subparser):
"""set the active environment""" """set the current environment"""
shells = subparser.add_mutually_exclusive_group() shells = subparser.add_mutually_exclusive_group()
shells.add_argument( shells.add_argument(
"--sh", "--sh",
@@ -222,14 +213,14 @@ def env_activate_setup_parser(subparser):
view_options = subparser.add_mutually_exclusive_group() view_options = subparser.add_mutually_exclusive_group()
view_options.add_argument( view_options.add_argument(
"-v",
"--with-view", "--with-view",
"-v",
metavar="name", metavar="name",
help="set runtime environment variables for the named view", help="set runtime environment variables for specific view",
) )
view_options.add_argument( view_options.add_argument(
"-V",
"--without-view", "--without-view",
"-V",
action="store_true", action="store_true",
help="do not set runtime environment variables for any view", help="do not set runtime environment variables for any view",
) )
@@ -239,14 +230,14 @@ def env_activate_setup_parser(subparser):
"--prompt", "--prompt",
action="store_true", action="store_true",
default=False, default=False,
help="add the active environment to the command line prompt", help="decorate the command line prompt when activating",
) )
subparser.add_argument( subparser.add_argument(
"--temp", "--temp",
action="store_true", action="store_true",
default=False, default=False,
help="create and activate in a temporary directory", help="create and activate an environment in a temporary directory",
) )
subparser.add_argument( subparser.add_argument(
"--create", "--create",
@@ -258,12 +249,13 @@ def env_activate_setup_parser(subparser):
"--envfile", "--envfile",
nargs="?", nargs="?",
default=None, default=None,
help="manifest or lock file (ends with '.json' or '.lock')", help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
) )
subparser.add_argument( subparser.add_argument(
"--keep-relative", "--keep-relative",
action="store_true", action="store_true",
help="copy envfile's relative develop paths verbatim when create", help="copy relative develop paths verbatim into the new environment"
" when initializing from envfile",
) )
subparser.add_argument( subparser.add_argument(
"-d", "-d",
@@ -277,7 +269,10 @@ def env_activate_setup_parser(subparser):
dest="env_name", dest="env_name",
nargs="?", nargs="?",
default=None, default=None,
help=("name or directory of the environment being activated"), help=(
"name of managed environment or directory of the independent env"
" (when using --dir/-d) to activate"
),
) )
@@ -390,7 +385,7 @@ def env_activate(args):
# env deactivate # env deactivate
# #
def env_deactivate_setup_parser(subparser): def env_deactivate_setup_parser(subparser):
"""deactivate the active environment""" """deactivate any active environment in the shell"""
shells = subparser.add_mutually_exclusive_group() shells = subparser.add_mutually_exclusive_group()
shells.add_argument( shells.add_argument(
"--sh", "--sh",
@@ -449,253 +444,104 @@ def env_deactivate(args):
sys.stdout.write(cmds) sys.stdout.write(cmds)
#
# env track
#
def env_track_setup_parser(subparser):
"""track an environment from a directory in Spack"""
subparser.add_argument("-n", "--name", help="custom environment name")
subparser.add_argument("dir", help="path to environment")
arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_track(args):
src_path = os.path.abspath(args.dir)
if not ev.is_env_dir(src_path):
tty.die("Cannot track environment. Path doesn't contain an environment")
if args.name:
name = args.name
else:
name = os.path.basename(src_path)
try:
dst_path = ev.environment_dir_from_name(name, exists_ok=False)
except ev.SpackEnvironmentError:
tty.die(
f"An environment named {name} already exists. Set a name with:"
"\n\n"
f" spack env track --name NAME {src_path}\n"
)
symlink(src_path, dst_path)
tty.msg(f"Tracking environment in {src_path}")
tty.msg(
"You can now activate this environment with the following command:\n\n"
f" spack env activate {name}\n"
)
#
# env remove & untrack helpers
#
def filter_managed_env_names(env_names: Set[str]) -> Set[str]:
tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))}
managed_env_names = env_names - set(tracked_env_names)
num_managed_envs = len(managed_env_names)
managed_envs_str = " ".join(managed_env_names)
if num_managed_envs >= 2:
tty.error(
f"The following are not tracked environments. "
"To remove them completely run,"
"\n\n"
f" spack env rm {managed_envs_str}\n"
)
elif num_managed_envs > 0:
tty.error(
f"'{managed_envs_str}' is not a tracked env. "
"To remove it completely run,"
"\n\n"
f" spack env rm {managed_envs_str}\n"
)
return tracked_env_names
def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]:
valid_envs = set()
for env_name in env_names:
try:
env = ev.read(env_name)
valid_envs.add(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
pass
return valid_envs
def _env_untrack_or_remove(
env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False
):
all_env_names = set(ev.all_environment_names())
known_env_names = set(env_names).intersection(all_env_names)
unknown_env_names = set(env_names) - known_env_names
# print error for unknown environments
for env_name in unknown_env_names:
tty.error(f"Environment '{env_name}' does not exist")
# if only unlinking is allowed, remove all environments
# which do not point internally at symlinks
if not remove:
env_names_to_remove = filter_managed_env_names(known_env_names)
else:
env_names_to_remove = known_env_names
# initalize all environments with valid spack.yaml configs
all_valid_envs = get_valid_envs(all_env_names)
# build a task list of environments and bad env names to remove
envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove]
bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove}
for remove_env in envs_to_remove:
for env in all_valid_envs:
# don't check if an environment is included to itself
if env.name == remove_env.name:
continue
# check if an environment is included un another
if remove_env.path in env.included_concrete_envs:
msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'"
if force:
tty.warn(msg)
else:
tty.error(msg)
envs_to_remove.remove(remove_env)
# ask the user if they really want to remove the known environments
# force should do the same as yes to all here following the symantics of rm
if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove):
environments = string.plural(len(env_names_to_remove), "environment", show_n=False)
envs = string.comma_and(list(env_names_to_remove))
answer = tty.get_yes_or_no(
f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False
)
if not answer:
tty.die("Will not remove any environments")
# keep track of the environments we remove for later printing the exit code
removed_env_names = []
for env in envs_to_remove:
name = env.name
if not force and env.active:
tty.error(
f"Environment '{name}' can't be "
f"{'removed' if remove else 'untracked'} while activated."
)
continue
# Get path to check if environment is a tracked / symlinked environment
if islink(env.path):
real_env_path = os.path.realpath(env.path)
os.unlink(env.path)
tty.msg(
f"Sucessfully untracked environment '{name}', "
"but it can still be found at:\n\n"
f" {real_env_path}\n"
)
else:
env.destroy()
tty.msg(f"Successfully removed environment '{name}'")
removed_env_names.append(env.name)
for bad_env_name in bad_env_names_to_remove:
shutil.rmtree(
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
)
tty.msg(f"Successfully removed environment '{bad_env_name}'")
removed_env_names.append(env.name)
# Following the design of linux rm we should exit with a status of 1
# anytime we cannot delete every environment the user asks for.
# However, we should still process all the environments we know about
# and delete them instead of failing on the first unknown enviornment.
if len(removed_env_names) < len(known_env_names):
sys.exit(1)
#
# env untrack
#
def env_untrack_setup_parser(subparser):
"""track an environment from a directory in Spack"""
subparser.add_argument("env", nargs="+", help="tracked environment name")
subparser.add_argument(
"-f", "--force", action="store_true", help="force unlink even when environment is active"
)
arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_untrack(args):
_env_untrack_or_remove(
env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False
)
# #
# env remove # env remove
# #
def env_remove_setup_parser(subparser): def env_remove_setup_parser(subparser):
"""remove managed environment(s) """remove an existing environment"""
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
remove existing environment(s) managed by Spack
directory environments and manifests embedded in repositories must be
removed manually
"""
subparser.add_argument(
"rm_env", metavar="env", nargs="+", help="name(s) of the environment(s) being removed"
)
arguments.add_common_arguments(subparser, ["yes_to_all"]) arguments.add_common_arguments(subparser, ["yes_to_all"])
subparser.add_argument( subparser.add_argument(
"-f", "-f",
"--force", "--force",
action="store_true", action="store_true",
help="force removal even when included in other environment(s)", help="remove the environment even if it is included in another environment",
) )
def env_remove(args): def env_remove(args):
"""remove existing environment(s)""" """Remove a *named* environment.
_env_untrack_or_remove(
env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all This removes an environment managed by Spack. Directory environments
) and manifests embedded in repositories should be removed manually.
"""
remove_envs = []
valid_envs = []
bad_envs = []
for env_name in ev.all_environment_names():
try:
env = ev.read(env_name)
valid_envs.append(env)
if env_name in args.rm_env:
remove_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
if args.force:
tty.warn(msg)
else:
tty.die(msg)
if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False)
envs = string.comma_and(args.rm_env)
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
if not answer:
tty.die("Will not remove any environments")
for env in remove_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")
env.destroy()
tty.msg(f"Successfully removed environment '{name}'")
for bad_env_name in bad_envs:
shutil.rmtree(
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
)
tty.msg(f"Successfully removed environment '{bad_env_name}'")
# #
# env rename # env rename
# #
def env_rename_setup_parser(subparser): def env_rename_setup_parser(subparser):
"""rename an existing environment """rename an existing environment"""
rename a managed environment or move an independent/directory environment
operation cannot be performed to or from an active environment
"""
subparser.add_argument( subparser.add_argument(
"mv_from", metavar="from", help="current name or directory of the environment" "mv_from", metavar="from", help="name (or path) of existing environment"
)
subparser.add_argument(
"mv_to", metavar="to", help="new name (or path) for existing environment"
) )
subparser.add_argument("mv_to", metavar="to", help="new name or directory for the environment")
subparser.add_argument( subparser.add_argument(
"-d", "-d",
"--dir", "--dir",
action="store_true", action="store_true",
help="positional arguments are environment directory paths", help="the specified arguments correspond to directory paths",
) )
subparser.add_argument( subparser.add_argument(
"-f", "-f", "--force", action="store_true", help="allow overwriting of an existing environment"
"--force",
action="store_true",
help="force renaming even if overwriting an existing environment",
) )
def env_rename(args): def env_rename(args):
"""rename or move an existing environment""" """Rename an environment.
This renames a managed environment or moves an independent environment.
"""
# Directory option has been specified # Directory option has been specified
if args.dir: if args.dir:
@@ -744,7 +590,7 @@ def env_rename(args):
# env list # env list
# #
def env_list_setup_parser(subparser): def env_list_setup_parser(subparser):
"""list all managed environments""" """list managed environments"""
def env_list(args): def env_list(args):
@@ -780,14 +626,13 @@ def actions():
# env view # env view
# #
def env_view_setup_parser(subparser): def env_view_setup_parser(subparser):
"""manage the environment's view """manage a view associated with the environment"""
provide the path when enabling a view with a non-default path
"""
subparser.add_argument( subparser.add_argument(
"action", choices=ViewAction.actions(), help="action to take for the environment's view" "action", choices=ViewAction.actions(), help="action to take for the environment's view"
) )
subparser.add_argument("view_path", nargs="?", help="view's non-default path when enabling it") subparser.add_argument(
"view_path", nargs="?", help="when enabling a view, optionally set the path manually"
)
def env_view(args): def env_view(args):
@@ -815,7 +660,7 @@ def env_view(args):
# env status # env status
# #
def env_status_setup_parser(subparser): def env_status_setup_parser(subparser):
"""print active environment status""" """print whether there is an active environment"""
def env_status(args): def env_status(args):
@@ -875,22 +720,14 @@ def env_loads(args):
def env_update_setup_parser(subparser): def env_update_setup_parser(subparser):
"""update the environment manifest to the latest schema format """update environments to the latest format"""
update the environment to the latest schema format, which may not be
readable by older versions of spack
a backup copy of the manifest is retained in case there is a need to revert
this operation
"""
subparser.add_argument( subparser.add_argument(
metavar="env", dest="update_env", help="name or directory of the environment" metavar="env", dest="update_env", help="name or directory of the environment to activate"
) )
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"]) spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_update(args): def env_update(args):
"""update the manifest to the latest format"""
manifest_file = ev.manifest_file(args.update_env) manifest_file = ev.manifest_file(args.update_env)
backup_file = manifest_file + ".bkp" backup_file = manifest_file + ".bkp"
@@ -920,22 +757,14 @@ def env_update(args):
def env_revert_setup_parser(subparser): def env_revert_setup_parser(subparser):
"""restore the environment manifest to its previous format """restore environments to their state before update"""
revert the environment's manifest to the schema format from its last
'spack env update'
the current manifest will be overwritten by the backup copy and the backup
copy will be removed
"""
subparser.add_argument( subparser.add_argument(
metavar="env", dest="revert_env", help="name or directory of the environment" metavar="env", dest="revert_env", help="name or directory of the environment to activate"
) )
spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"]) spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_revert(args): def env_revert(args):
"""restore the environment manifest to its previous format"""
manifest_file = ev.manifest_file(args.revert_env) manifest_file = ev.manifest_file(args.revert_env)
backup_file = manifest_file + ".bkp" backup_file = manifest_file + ".bkp"
@@ -967,19 +796,15 @@ def env_revert(args):
def env_depfile_setup_parser(subparser): def env_depfile_setup_parser(subparser):
"""generate a depfile to exploit parallel builds across specs """generate a depfile from the concrete environment specs"""
requires the active environment to be concrete
"""
subparser.add_argument( subparser.add_argument(
"--make-prefix", "--make-prefix",
"--make-target-prefix", "--make-target-prefix",
default=None, default=None,
metavar="TARGET", metavar="TARGET",
help="prefix Makefile targets/variables with <TARGET>/<name>,\n" help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
"which can be an empty string (--make-prefix '')\n" "the absolute path to the directory makedeps under the environment metadata dir is "
"defaults to the absolute path of the environment's makedeps\n" "used. can be set to an empty string --make-prefix ''",
"environment metadata dir\n",
) )
subparser.add_argument( subparser.add_argument(
"--make-disable-jobserver", "--make-disable-jobserver",
@@ -994,8 +819,8 @@ def env_depfile_setup_parser(subparser):
type=arguments.use_buildcache, type=arguments.use_buildcache,
default="package:auto,dependencies:auto", default="package:auto,dependencies:auto",
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]", metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
help="use `only` to prune redundant build dependencies\n" help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
"option is also passed to generated spack install commands", "this flag is passed on to the generated spack install commands",
) )
subparser.add_argument( subparser.add_argument(
"-o", "-o",
@@ -1009,14 +834,14 @@ def env_depfile_setup_parser(subparser):
"--generator", "--generator",
default="make", default="make",
choices=("make",), choices=("make",),
help="specify the depfile type (only supports `make`)", help="specify the depfile type\n\ncurrently only make is supported",
) )
subparser.add_argument( subparser.add_argument(
metavar="specs", metavar="specs",
dest="specs", dest="specs",
nargs=argparse.REMAINDER, nargs=argparse.REMAINDER,
default=None, default=None,
help="limit the generated file to matching specs", help="generate a depfile only for matching specs in the environment",
) )
@@ -1085,12 +910,7 @@ def setup_parser(subparser):
setup_parser_cmd_name = "env_%s_setup_parser" % name setup_parser_cmd_name = "env_%s_setup_parser" % name
setup_parser_cmd = globals()[setup_parser_cmd_name] setup_parser_cmd = globals()[setup_parser_cmd_name]
subsubparser = sp.add_parser( subsubparser = sp.add_parser(name, aliases=aliases, help=setup_parser_cmd.__doc__)
name,
aliases=aliases,
description=setup_parser_cmd.__doc__,
help=spack.cmd.first_line(setup_parser_cmd.__doc__),
)
setup_parser_cmd(subsubparser) setup_parser_cmd(subsubparser)

View File

@@ -17,8 +17,7 @@
import spack.spec import spack.spec
import spack.store import spack.store
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.database import InstallStatuses
from ..enums import InstallRecordStatus
description = "list and search installed packages" description = "list and search installed packages"
section = "basic" section = "basic"
@@ -99,7 +98,7 @@ def setup_parser(subparser):
"--show-full-compiler", "--show-full-compiler",
action="store_true", action="store_true",
dest="show_full_compiler", dest="show_full_compiler",
help="(DEPRECATED) show full compiler specs. Currently it's a no-op", help="show full compiler specs",
) )
implicit_explicit = subparser.add_mutually_exclusive_group() implicit_explicit = subparser.add_mutually_exclusive_group()
implicit_explicit.add_argument( implicit_explicit.add_argument(
@@ -138,22 +137,21 @@ def setup_parser(subparser):
subparser.add_argument( subparser.add_argument(
"--loaded", action="store_true", help="show only packages loaded in the user environment" "--loaded", action="store_true", help="show only packages loaded in the user environment"
) )
only_missing_or_deprecated = subparser.add_mutually_exclusive_group() subparser.add_argument(
only_missing_or_deprecated.add_argument(
"-M", "-M",
"--only-missing", "--only-missing",
action="store_true", action="store_true",
dest="only_missing", dest="only_missing",
help="show only missing dependencies", help="show only missing dependencies",
) )
only_missing_or_deprecated.add_argument(
"--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument( subparser.add_argument(
"--deprecated", "--deprecated",
action="store_true", action="store_true",
help="show deprecated packages as well as installed specs", help="show deprecated packages as well as installed specs",
) )
subparser.add_argument(
"--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument( subparser.add_argument(
"--install-tree", "--install-tree",
action="store", action="store",
@@ -167,35 +165,26 @@ def setup_parser(subparser):
def query_arguments(args): def query_arguments(args):
if args.only_missing and (args.deprecated or args.missing): # Set up query arguments.
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing") installed = []
if not (args.only_missing or args.only_deprecated):
installed.append(InstallStatuses.INSTALLED)
if (args.deprecated or args.only_deprecated) and not args.only_missing:
installed.append(InstallStatuses.DEPRECATED)
if (args.missing or args.only_missing) and not args.only_deprecated:
installed.append(InstallStatuses.MISSING)
if args.only_deprecated and (args.deprecated or args.missing): known = any
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
installed = InstallRecordStatus.INSTALLED
if args.only_missing:
installed = InstallRecordStatus.MISSING
elif args.only_deprecated:
installed = InstallRecordStatus.DEPRECATED
if args.missing:
installed |= InstallRecordStatus.MISSING
if args.deprecated:
installed |= InstallRecordStatus.DEPRECATED
predicate_fn = None
if args.unknown: if args.unknown:
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name) known = False
explicit = None explicit = any
if args.explicit: if args.explicit:
explicit = True explicit = True
if args.implicit: if args.implicit:
explicit = False explicit = False
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit} q_args = {"installed": installed, "known": known, "explicit": explicit}
install_tree = args.install_tree install_tree = args.install_tree
upstreams = spack.config.get("upstreams", {}) upstreams = spack.config.get("upstreams", {})
@@ -233,9 +222,11 @@ def decorator(spec, fmt):
def display_env(env, args, decorator, results): def display_env(env, args, decorator, results):
"""Display extra find output when running in an environment. """Display extra find output when running in an environment.
In an environment, `spack find` outputs a preliminary section Find in an environment outputs 2 or 3 sections:
showing the root specs of the environment (this is in addition
to the section listing out specs matching the query parameters). 1. Root specs
2. Concretized roots (if asked for with -c)
3. Installed specs
""" """
tty.msg("In environment %s" % env.name) tty.msg("In environment %s" % env.name)
@@ -279,6 +270,7 @@ def root_decorator(spec, string):
# these enforce details in the root specs to show what the user asked for # these enforce details in the root specs to show what the user asked for
namespaces=True, namespaces=True,
show_flags=True, show_flags=True,
show_full_compiler=True,
decorator=root_decorator, decorator=root_decorator,
variants=True, variants=True,
) )
@@ -301,61 +293,12 @@ def root_decorator(spec, string):
decorator=lambda s, f: color.colorize("@*{%s}" % f), decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True, namespace=True,
show_flags=True, show_flags=True,
show_full_compiler=True,
variants=True, variants=True,
) )
print() print()
def _find_query(args, env):
q_args = query_arguments(args)
concretized_but_not_installed = list()
if env:
all_env_specs = env.all_specs()
if args.constraint:
init_specs = cmd.parse_specs(args.constraint)
env_specs = env.all_matching_specs(*init_specs)
else:
env_specs = all_env_specs
spec_hashes = set(x.dag_hash() for x in env_specs)
specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args))
results = list()
with spack.store.STORE.db.read_transaction():
for spec in env_specs:
if not spec.installed:
concretized_but_not_installed.append(spec)
if spec in specs_meeting_q_args:
results.append(spec)
else:
results = args.specs(**q_args)
# use groups by default except with format.
if args.groups is None:
args.groups = not args.format
# Exit early with an error code if no package matches the constraint
if concretized_but_not_installed and args.show_concretized:
pass
elif results:
pass
elif args.constraint:
raise cmd.NoSpecMatches()
# If tags have been specified on the command line, filter by tags
if args.tags:
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
results = [x for x in results if x.name in packages_with_tags]
concretized_but_not_installed = [
x for x in concretized_but_not_installed if x.name in packages_with_tags
]
if args.loaded:
results = cmd.filter_loaded_specs(results)
return results, concretized_but_not_installed
def find(parser, args): def find(parser, args):
env = ev.active_environment() env = ev.active_environment()
@@ -364,12 +307,34 @@ def find(parser, args):
if not env and args.show_concretized: if not env and args.show_concretized:
tty.die("-c / --show-concretized requires an active environment") tty.die("-c / --show-concretized requires an active environment")
try: if env:
results, concretized_but_not_installed = _find_query(args, env) if args.constraint:
except cmd.NoSpecMatches: init_specs = spack.cmd.parse_specs(args.constraint)
# Note: this uses args.constraint vs. args.constraint_specs because results = env.all_matching_specs(*init_specs)
# the latter only exists if you call args.specs() else:
tty.die(f"No package matches the query: {' '.join(args.constraint)}") results = env.all_specs()
else:
q_args = query_arguments(args)
results = args.specs(**q_args)
decorator = make_env_decorator(env) if env else lambda s, f: f
# use groups by default except with format.
if args.groups is None:
args.groups = not args.format
# Exit early with an error code if no package matches the constraint
if not results and args.constraint:
constraint_str = " ".join(str(s) for s in args.constraint_specs)
tty.die(f"No package matches the query: {constraint_str}")
# If tags have been specified on the command line, filter by tags
if args.tags:
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
results = [x for x in results if x.name in packages_with_tags]
if args.loaded:
results = spack.cmd.filter_loaded_specs(results)
if args.install_status or args.show_concretized: if args.install_status or args.show_concretized:
status_fn = spack.spec.Spec.install_status status_fn = spack.spec.Spec.install_status
@@ -380,16 +345,14 @@ def find(parser, args):
if args.json: if args.json:
cmd.display_specs_as_json(results, deps=args.deps) cmd.display_specs_as_json(results, deps=args.deps)
else: else:
decorator = make_env_decorator(env) if env else lambda s, f: f
if not args.format: if not args.format:
if env: if env:
display_env(env, args, decorator, results) display_env(env, args, decorator, results)
if not args.only_roots: if not args.only_roots:
display_results = list(results) display_results = results
if args.show_concretized: if not args.show_concretized:
display_results += concretized_but_not_installed display_results = list(x for x in results if x.installed)
cmd.display_specs( cmd.display_specs(
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
) )
@@ -407,9 +370,13 @@ def find(parser, args):
concretized_suffix += " (show with `spack find -c`)" concretized_suffix += " (show with `spack find -c`)"
pkg_type = "loaded" if args.loaded else "installed" pkg_type = "loaded" if args.loaded else "installed"
cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix) spack.cmd.print_how_many_pkgs(
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
)
if env: if env:
cmd.print_how_many_pkgs( spack.cmd.print_how_many_pkgs(
concretized_but_not_installed, "concretized", suffix=concretized_suffix list(x for x in results if not x.installed),
"concretized",
suffix=concretized_suffix,
) )

View File

@@ -8,7 +8,7 @@
import tempfile import tempfile
import spack.binary_distribution import spack.binary_distribution
import spack.mirrors.mirror import spack.mirror
import spack.paths import spack.paths
import spack.stage import spack.stage
import spack.util.gpg import spack.util.gpg
@@ -217,11 +217,11 @@ def gpg_publish(args):
mirror = None mirror = None
if args.directory: if args.directory:
url = spack.util.url.path_to_file_url(args.directory) url = spack.util.url.path_to_file_url(args.directory)
mirror = spack.mirrors.mirror.Mirror(url, url) mirror = spack.mirror.Mirror(url, url)
elif args.mirror_name: elif args.mirror_name:
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name) mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
elif args.mirror_url: elif args.mirror_url:
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url) mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir: with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys( spack.binary_distribution._url_push_keys(

View File

@@ -78,8 +78,8 @@
boxlib @B{dim=2} boxlib built for 2 dimensions boxlib @B{dim=2} boxlib built for 2 dimensions
libdwarf @g{%intel} ^libelf@g{%gcc} libdwarf @g{%intel} ^libelf@g{%gcc}
libdwarf, built with intel compiler, linked to libelf built with gcc libdwarf, built with intel compiler, linked to libelf built with gcc
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock} mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
mvapich2, built with gcc compiler, with support for multiple fabrics mvapich2, built with pgi compiler, with support for multiple fabrics
""" """

View File

@@ -11,7 +11,6 @@
import llnl.util.tty.color as color import llnl.util.tty.color as color
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
import spack.builder
import spack.deptypes as dt import spack.deptypes as dt
import spack.fetch_strategy as fs import spack.fetch_strategy as fs
import spack.install_test import spack.install_test
@@ -203,13 +202,11 @@ def print_namespace(pkg, args):
def print_phases(pkg, args): def print_phases(pkg, args):
"""output installation phases""" """output installation phases"""
builder = spack.builder.create(pkg) if hasattr(pkg.builder, "phases") and pkg.builder.phases:
if hasattr(builder, "phases") and builder.phases:
color.cprint("") color.cprint("")
color.cprint(section_title("Installation Phases:")) color.cprint(section_title("Installation Phases:"))
phase_str = "" phase_str = ""
for phase in builder.phases: for phase in pkg.builder.phases:
phase_str += " {0}".format(phase) phase_str += " {0}".format(phase)
color.cprint(phase_str) color.cprint(phase_str)

View File

@@ -10,8 +10,7 @@
import spack.cmd import spack.cmd
import spack.store import spack.store
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.database import InstallStatuses
from ..enums import InstallRecordStatus
description = "mark packages as explicitly or implicitly installed" description = "mark packages as explicitly or implicitly installed"
section = "admin" section = "admin"
@@ -68,7 +67,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
has_errors = False has_errors = False
for spec in specs: for spec in specs:
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED) install_query = [InstallStatuses.INSTALLED]
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
# For each spec provided, make sure it refers to only one package. # For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't # Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1: if not allow_multiple_matches and len(matching) > 1:
@@ -80,8 +80,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
has_errors = True has_errors = True
# No installed package matches the query # No installed package matches the query
if len(matching) == 0 and spec is not None: if len(matching) == 0 and spec is not any:
tty.die(f"{spec} does not match any installed packages.") tty.die("{0} does not match any installed packages.".format(spec))
specs_from_cli.extend(matching) specs_from_cli.extend(matching)
@@ -98,9 +98,8 @@ def do_mark(specs, explicit):
specs (list): list of specs to be marked specs (list): list of specs to be marked
explicit (bool): whether to mark specs as explicitly installed explicit (bool): whether to mark specs as explicitly installed
""" """
with spack.store.STORE.db.write_transaction(): for spec in specs:
for spec in specs: spack.store.STORE.db.update_explicit(spec, explicit)
spack.store.STORE.db.mark(spec, "explicit", explicit)
def mark_specs(args, specs): def mark_specs(args, specs):
@@ -117,6 +116,6 @@ def mark(parser, args):
" Use `spack mark --all` to mark ALL packages.", " Use `spack mark --all` to mark ALL packages.",
) )
# [None] here handles the --all case by forcing all specs to be returned # [any] here handles the --all case by forcing all specs to be returned
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None] specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
mark_specs(args, specs) mark_specs(args, specs)

View File

@@ -14,8 +14,7 @@
import spack.concretize import spack.concretize
import spack.config import spack.config
import spack.environment as ev import spack.environment as ev
import spack.mirrors.mirror import spack.mirror
import spack.mirrors.utils
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.util.web as web_util import spack.util.web as web_util
@@ -232,133 +231,31 @@ def setup_parser(subparser):
) )
def _configure_access_pair(
args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None
):
"""Configure the access_pair options"""
# Check if any of the arguments are set to update this access_pair.
# If none are set, then skip computing the new access pair
args_id = getattr(args, id_tok)
args_id_variable = getattr(args, id_variable_tok)
args_secret = getattr(args, secret_tok)
args_secret_variable = getattr(args, secret_variable_tok)
if not any([args_id, args_id_variable, args_secret, args_secret_variable]):
return None
def _default_value(id_):
if isinstance(default, list):
return default[0] if id_ == "id" else default[1]
elif isinstance(default, dict):
return default.get(id_)
else:
return None
def _default_variable(id_):
if isinstance(default, dict):
return default.get(id_ + "_variable")
else:
return None
id_ = None
id_variable = None
secret = None
secret_variable = None
# Get the value/default value if the argument of the inverse
if not args_id_variable:
id_ = getattr(args, id_tok) or _default_value("id")
if not args_id:
id_variable = getattr(args, id_variable_tok) or _default_variable("id")
if not args_secret_variable:
secret = getattr(args, secret_tok) or _default_value("secret")
if not args_secret:
secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret")
if (id_ or id_variable) and (secret or secret_variable):
if secret:
if not id_:
raise SpackError("Cannot add mirror with a variable id and text secret")
return [id_, secret]
else:
return dict(
[
(("id", id_) if id_ else ("id_variable", id_variable)),
("secret_variable", secret_variable),
]
)
else:
if id_ or id_variable or secret or secret_variable is not None:
id_arg_tok = id_tok.replace("_", "-")
secret_arg_tok = secret_tok.replace("_", "-")
tty.warn(
"Expected both parts of the access pair to be specified. "
f"(i.e. --{id_arg_tok} and --{secret_arg_tok})"
)
return None
def mirror_add(args): def mirror_add(args):
"""add a mirror to Spack""" """add a mirror to Spack"""
if ( if (
args.s3_access_key_id args.s3_access_key_id
or args.s3_access_key_secret or args.s3_access_key_secret
or args.s3_access_token or args.s3_access_token
or args.s3_access_key_id_variable
or args.s3_access_key_secret_variable
or args.s3_access_token_variable
or args.s3_profile or args.s3_profile
or args.s3_endpoint_url or args.s3_endpoint_url
or args.type or args.type
or args.oci_username or args.oci_username
or args.oci_password or args.oci_password
or args.oci_username_variable
or args.oci_password_variable
or args.autopush or args.autopush
or args.signed is not None or args.signed is not None
): ):
connection = {"url": args.url} connection = {"url": args.url}
# S3 Connection if args.s3_access_key_id and args.s3_access_key_secret:
if args.s3_access_key_secret: connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
tty.warn(
"Configuring mirror secrets as plain text with --s3-access-key-secret is "
"deprecated. Use --s3-access-key-secret-variable instead"
)
if args.oci_password:
tty.warn(
"Configuring mirror secrets as plain text with --oci-password is deprecated. "
"Use --oci-password-variable instead"
)
access_pair = _configure_access_pair(
args,
"s3_access_key_id",
"s3_access_key_id_variable",
"s3_access_key_secret",
"s3_access_key_secret_variable",
)
if access_pair:
connection["access_pair"] = access_pair
if args.s3_access_token: if args.s3_access_token:
connection["access_token"] = args.s3_access_token connection["access_token"] = args.s3_access_token
elif args.s3_access_token_variable:
connection["access_token_variable"] = args.s3_access_token_variable
if args.s3_profile: if args.s3_profile:
connection["profile"] = args.s3_profile connection["profile"] = args.s3_profile
if args.s3_endpoint_url: if args.s3_endpoint_url:
connection["endpoint_url"] = args.s3_endpoint_url connection["endpoint_url"] = args.s3_endpoint_url
if args.oci_username and args.oci_password:
# OCI Connection connection["access_pair"] = [args.oci_username, args.oci_password]
access_pair = _configure_access_pair(
args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable"
)
if access_pair:
connection["access_pair"] = access_pair
if args.type: if args.type:
connection["binary"] = "binary" in args.type connection["binary"] = "binary" in args.type
connection["source"] = "source" in args.type connection["source"] = "source" in args.type
@@ -366,15 +263,15 @@ def mirror_add(args):
connection["autopush"] = args.autopush connection["autopush"] = args.autopush
if args.signed is not None: if args.signed is not None:
connection["signed"] = args.signed connection["signed"] = args.signed
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name) mirror = spack.mirror.Mirror(connection, name=args.name)
else: else:
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name) mirror = spack.mirror.Mirror(args.url, name=args.name)
spack.mirrors.utils.add(mirror, args.scope) spack.mirror.add(mirror, args.scope)
def mirror_remove(args): def mirror_remove(args):
"""remove a mirror by name""" """remove a mirror by name"""
spack.mirrors.utils.remove(args.name, args.scope) spack.mirror.remove(args.name, args.scope)
def _configure_mirror(args): def _configure_mirror(args):
@@ -383,40 +280,21 @@ def _configure_mirror(args):
if args.name not in mirrors: if args.name not in mirrors:
tty.die(f"No mirror found with name {args.name}.") tty.die(f"No mirror found with name {args.name}.")
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name) entry = spack.mirror.Mirror(mirrors[args.name], args.name)
direction = "fetch" if args.fetch else "push" if args.push else None direction = "fetch" if args.fetch else "push" if args.push else None
changes = {} changes = {}
if args.url: if args.url:
changes["url"] = args.url changes["url"] = args.url
if args.s3_access_key_id and args.s3_access_key_secret:
default_access_pair = entry._get_value("access_pair", direction or "fetch") changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
# TODO: Init access_pair args with the fetch/push/base values in the current mirror state
access_pair = _configure_access_pair(
args,
"s3_access_key_id",
"s3_access_key_id_variable",
"s3_access_key_secret",
"s3_access_key_secret_variable",
default=default_access_pair,
)
if access_pair:
changes["access_pair"] = access_pair
if args.s3_access_token: if args.s3_access_token:
changes["access_token"] = args.s3_access_token changes["access_token"] = args.s3_access_token
if args.s3_profile: if args.s3_profile:
changes["profile"] = args.s3_profile changes["profile"] = args.s3_profile
if args.s3_endpoint_url: if args.s3_endpoint_url:
changes["endpoint_url"] = args.s3_endpoint_url changes["endpoint_url"] = args.s3_endpoint_url
access_pair = _configure_access_pair( if args.oci_username and args.oci_password:
args, changes["access_pair"] = [args.oci_username, args.oci_password]
"oci_username",
"oci_username_variable",
"oci_password",
"oci_password_variable",
default=default_access_pair,
)
if access_pair:
changes["access_pair"] = access_pair
if getattr(args, "signed", None) is not None: if getattr(args, "signed", None) is not None:
changes["signed"] = args.signed changes["signed"] = args.signed
if getattr(args, "autopush", None) is not None: if getattr(args, "autopush", None) is not None:
@@ -450,7 +328,7 @@ def mirror_set_url(args):
def mirror_list(args): def mirror_list(args):
"""print out available mirrors to the console""" """print out available mirrors to the console"""
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope) mirrors = spack.mirror.MirrorCollection(scope=args.scope)
if not mirrors: if not mirrors:
tty.msg("No mirrors configured.") tty.msg("No mirrors configured.")
return return
@@ -490,9 +368,9 @@ def concrete_specs_from_user(args):
def extend_with_additional_versions(specs, num_versions): def extend_with_additional_versions(specs, num_versions):
if num_versions == "all": if num_versions == "all":
mirror_specs = spack.mirrors.utils.get_all_versions(specs) mirror_specs = spack.mirror.get_all_versions(specs)
else: else:
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions) mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = [x.concretized() for x in mirror_specs] mirror_specs = [x.concretized() for x in mirror_specs]
return mirror_specs return mirror_specs
@@ -571,7 +449,7 @@ def concrete_specs_from_environment():
def all_specs_with_all_versions(): def all_specs_with_all_versions():
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()] specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirrors.utils.get_all_versions(specs) mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version)) mirror_specs.sort(key=lambda s: (s.name, s.version))
return mirror_specs return mirror_specs
@@ -660,21 +538,19 @@ def _specs_and_action(args):
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions): def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats( mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions path, skip_unstable_versions=skip_unstable_versions
) )
for candidate in mirror_specs: for candidate in mirror_specs:
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name) pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
pkg_obj = pkg_cls(spack.spec.Spec(candidate)) pkg_obj = pkg_cls(spack.spec.Spec(candidate))
mirror_stats.next_spec(pkg_obj.spec) mirror_stats.next_spec(pkg_obj.spec)
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats) spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
process_mirror_stats(*mirror_stats.stats()) process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions): def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirrors.utils.create( present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
path, mirror_specs, skip_unstable_versions
)
tty.msg("Summary for mirror in {}".format(path)) tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error) process_mirror_stats(present, mirrored, error)
@@ -684,7 +560,7 @@ def mirror_destroy(args):
mirror_url = None mirror_url = None
if args.mirror_name: if args.mirror_name:
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name) result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
mirror_url = result.push_url mirror_url = result.push_url
elif args.mirror_url: elif args.mirror_url:
mirror_url = args.mirror_url mirror_url = args.mirror_url

View File

@@ -19,7 +19,6 @@
import spack.modules import spack.modules
import spack.modules.common import spack.modules.common
import spack.repo import spack.repo
from spack.cmd import MultipleSpecsMatch, NoSpecMatches
from spack.cmd.common import arguments from spack.cmd.common import arguments
description = "manipulate module files" description = "manipulate module files"
@@ -92,6 +91,18 @@ def add_loads_arguments(subparser):
arguments.add_common_arguments(subparser, ["recurse_dependencies"]) arguments.add_common_arguments(subparser, ["recurse_dependencies"])
class MultipleSpecsMatch(Exception):
"""Raised when multiple specs match a constraint, in a context where
this is not allowed.
"""
class NoSpecMatches(Exception):
"""Raised when no spec matches a constraint, in a context where
this is not allowed.
"""
def one_spec_or_raise(specs): def one_spec_or_raise(specs):
"""Ensures exactly one spec has been selected, or raises the appropriate """Ensures exactly one spec has been selected, or raises the appropriate
exception. exception.
@@ -367,10 +378,7 @@ def refresh(module_type, specs, args):
def modules_cmd(parser, args, module_type, callbacks=callbacks): def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs # Qualifiers to be used when querying the db for specs
constraint_qualifiers = { constraint_qualifiers = {
"refresh": { "refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
"installed": True,
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
}
} }
query_args = constraint_qualifiers.get(args.subparser_name, {}) query_args = constraint_qualifiers.get(args.subparser_name, {})

View File

@@ -8,7 +8,6 @@
import spack.cmd.common.arguments import spack.cmd.common.arguments
import spack.cmd.modules import spack.cmd.modules
import spack.config import spack.config
import spack.modules
import spack.modules.lmod import spack.modules.lmod

View File

@@ -7,7 +7,6 @@
import spack.cmd.common.arguments import spack.cmd.common.arguments
import spack.cmd.modules import spack.cmd.modules
import spack.config import spack.config
import spack.modules
import spack.modules.tcl import spack.modules.tcl

View File

@@ -33,9 +33,8 @@ def patch(parser, args):
spack.config.set("config:checksum", False, scope="command_line") spack.config.set("config:checksum", False, scope="command_line")
specs = spack.cmd.parse_specs(args.specs, concretize=False) specs = spack.cmd.parse_specs(args.specs, concretize=False)
specs = spack.cmd.matching_specs_from_env(specs)
for spec in specs: for spec in specs:
_patch(spec.package) _patch(spack.cmd.matching_spec_from_env(spec).package)
def _patch_env(env: ev.Environment): def _patch_env(env: ev.Environment):

Some files were not shown because too many files have changed in this diff Show More