Compare commits

..

4 Commits

Author SHA1 Message Date
Wouter Deconinck
a5f8cfd3f3 RPackages with custom url: url -> urls=[url] 2024-08-20 13:27:57 -05:00
Wouter Deconinck
d38b4024bc RPackages with bioc: remove redundant homepage and url 2024-08-20 13:27:24 -05:00
Wouter Deconinck
4d09bc13d9 RPackage: urls=[bioc/src/contrib,data/annotation/src/contrib] for bioc 2024-08-20 13:22:50 -05:00
Wouter Deconinck
1794cd598c RPackage: urls=[src/contrib,src/contrib/Archive], list_url=src/contrib 2024-08-20 13:21:41 -05:00
883 changed files with 10509 additions and 13118 deletions

View File

@@ -1,5 +1,4 @@
{
"name": "Ubuntu 20.04",
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
}

View File

@@ -1,5 +0,0 @@
{
"name": "Ubuntu 22.04",
"image": "ghcr.io/spack/ubuntu-22.04:v2024-05-07",
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
}

73
.github/workflows/audit.yaml vendored Normal file
View File

@@ -0,0 +1,73 @@
name: audit
on:
workflow_call:
inputs:
with_coverage:
required: true
type: string
python_version:
required: true
type: string
concurrency:
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
jobs:
# Run audits on all the packages in the built-in repository
package-audits:
runs-on: ${{ matrix.system.os }}
strategy:
matrix:
system:
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
- { os: ubuntu-latest, shell: bash }
- { os: macos-latest, shell: bash }
defaults:
run:
shell: ${{ matrix.system.shell }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest coverage[toml]
- name: Setup for Windows run
if: runner.os == 'Windows'
run: |
python -m pip install --upgrade pywin32
- name: Package audits (with coverage)
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) audit configs
coverage run $(which spack) -d audit externals
coverage combine
coverage xml
- name: Package audits (without coverage)
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
run: |
. share/spack/setup-env.sh
spack -d audit packages
spack -d audit configs
spack -d audit externals
- name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }}
run: |
. share/spack/setup-env.sh
spack -d audit packages
./share/spack/qa/validate_last_exit.ps1
spack -d audit configs
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

170
.github/workflows/bootstrap.yml vendored Normal file
View File

@@ -0,0 +1,170 @@
name: Bootstrapping
on:
# This Workflow can be triggered manually
workflow_dispatch:
workflow_call:
schedule:
# nightly at 2:16 AM
- cron: '16 2 * * *'
concurrency:
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
jobs:
distros-clingo-sources:
runs-on: ubuntu-latest
container: ${{ matrix.image }}
strategy:
matrix:
image: ["fedora:latest", "opensuse/leap:latest"]
steps:
- name: Setup Fedora
if: ${{ matrix.image == 'fedora:latest' }}
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }}
run: |
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
zypper update -y || zypper update -y
zypper install -y \
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
clingo-sources:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
run: |
brew install cmake bison tree
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: "3.12"
- name: Bootstrap clingo
env:
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
run: |
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison
spack -d solve zlib
${{ env.VALIDATE_LAST_EXIT }}
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
gnupg-sources:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: |
brew install tree gawk
sudo rm -rf $(command -v gpg gpg2)
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
from-binaries:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: |
3.8
3.9
3.10
3.11
3.12
- name: Set bootstrap sources
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
- name: Bootstrap clingo
run: |
set -e
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
if [[ -d "$ver_dir" ]] ; then
echo "Testing $ver_dir"
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
export PATH="$old_path"
fi
fi
if (($not_found)) ; then
echo Required python version $ver not found in runner!
exit 1
fi
done
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack -d gpg list
tree ~/.spack/bootstrap/store/

133
.github/workflows/build-containers.yml vendored Normal file
View File

@@ -0,0 +1,133 @@
name: Containers
on:
# This Workflow can be triggered manually
workflow_dispatch:
# Build new Spack develop containers nightly.
schedule:
- cron: '34 0 * * *'
# Run on pull requests that modify this file
pull_request:
branches:
- develop
paths:
- '.github/workflows/build-containers.yml'
- 'share/spack/docker/*'
- 'share/spack/templates/container/*'
- 'lib/spack/spack/container/*'
# Let's also build & tag Spack containers on releases.
release:
types: [published]
concurrency:
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
jobs:
deploy-images:
runs-on: ubuntu-latest
permissions:
packages: write
strategy:
# Even if one container fails to build we still want the others
# to continue their builds.
fail-fast: false
# A matrix of Dockerfile paths, associated tags, and which architectures
# they support.
matrix:
# Meaning of the various items in the matrix list
# 0: Container name (e.g. ubuntu-bionic)
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta
with:
images: |
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
tags: |
type=schedule,pattern=nightly
type=schedule,pattern=develop
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
- name: Generate the Dockerfile
env:
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
run: |
.github/workflows/bin/generate_spack_yaml_containerize.sh
. share/spack/setup-env.sh
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
exit 1;
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
with:
name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
- name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
merge-dockerfiles:
runs-on: ubuntu-latest
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
with:
name: dockerfiles
pattern: dockerfiles_*
delete-merged: true

View File

@@ -74,3 +74,19 @@ jobs:
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
#
bootstrap:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
needs: [ prechecks, changes ]
uses: ./.github/workflows/bootstrap.yml
secrets: inherit
unit-tests:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml
secrets: inherit
all:
needs: [ unit-tests, bootstrap ]
runs-on: ubuntu-latest
steps:
- name: Success
run: "true"

View File

@@ -0,0 +1,31 @@
name: Windows Paraview Nightly
on:
schedule:
- cron: '0 2 * * *' # Run at 2 am
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools coverage
- name: Build Test
run: |
spack compiler find
spack external find cmake ninja win-sdk win-wdk wgl msmpi
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
exit 0

247
.github/workflows/unit_tests.yaml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: unit tests
on:
workflow_dispatch:
workflow_call:
concurrency:
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
jobs:
# Run unit tests with different configurations on linux
ubuntu:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- python-version: '3.6'
os: ubuntu-20.04
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-latest
on_develop: false
- python-version: '3.8'
os: ubuntu-latest
on_develop: false
- python-version: '3.9'
os: ubuntu-latest
on_develop: false
- python-version: '3.10'
os: ubuntu-latest
on_develop: false
- python-version: '3.11'
os: ubuntu-latest
on_develop: false
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
run: |
sudo apt-get -y update
# Needed for unit tests
sudo apt-get -y install \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
cmake bison libbison-dev kcov
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
- name: Bootstrap clingo
if: ${{ matrix.concretizer == 'clingo' }}
env:
SPACK_PYTHON: python
run: |
. share/spack/setup-env.sh
spack bootstrap disable spack-install
spack bootstrap now
spack -v solve zlib
- name: Run unit tests
env:
SPACK_PYTHON: python
SPACK_TEST_PARALLEL: 2
COVERAGE: true
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Test shell integration
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: '3.11'
- name: Install System packages
run: |
sudo apt-get -y update
# Needed for shell tests
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
- name: Run shell tests
env:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Test RHEL8 UBI with platform Python. This job is run
# only on PRs modifying core Spack
rhel8-platform-python:
runs-on: ubuntu-latest
container: registry.access.redhat.com/ubi8/ubi
steps:
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Run unit tests
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack -d bootstrap now --dev
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
# Test for the clingo based solver (using clingo-cffi)
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: '3.11'
- name: Install System packages
run: |
sudo apt-get -y update
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
- name: Run unit tests (full suite with coverage)
env:
COVERAGE: true
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Run unit tests on MacOS
macos:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-13, macos-14]
python-version: ["3.11"]
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
run: |
pip install --upgrade pip setuptools
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
- name: Setup Homebrew packages
run: |
brew install dash fish gcc gnupg2 kcov
- name: Run unit tests
env:
SPACK_TEST_PARALLEL: 4
run: |
git --version
. .github/workflows/bin/setup_git.sh
. share/spack/setup-env.sh
$(which spack) bootstrap disable spack-install
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Run unit tests on Windows
windows:
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/bin/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: '3.11'
cache: 'pip'
@@ -38,7 +38,7 @@ jobs:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: '3.11'
cache: 'pip'
@@ -54,4 +54,36 @@ jobs:
- name: Run style tests
run: |
share/spack/qa/run-style-tests
audit:
uses: ./.github/workflows/audit.yaml
secrets: inherit
with:
with_coverage: ${{ inputs.with_coverage }}
python_version: '3.11'
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8:
runs-on: ubuntu-latest
container: registry.access.redhat.com/ubi8/ubi
steps:
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Bootstrap Spack development environment
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack debug report
spack -d bootstrap now --dev
spack style -t black
spack unit-test -V

View File

@@ -115,6 +115,12 @@ config:
suppress_gpg_warnings: false
# If set to true, Spack will attempt to build any compiler on the spec
# that is not already available. If set to False, Spack will only use
# compilers already configured in compilers.yaml
install_missing_compilers: false
# If set to true, Spack will always check checksums after downloading
# archives. If false, Spack skips the checksum step.
checksum: true

View File

@@ -72,13 +72,3 @@ packages:
permissions:
read: world
write: user
cray-mpich:
buildable: false
cray-mvapich2:
buildable: false
fujitsu-mpi:
buildable: false
hpcx-mpi:
buildable: false
spectrum-mpi:
buildable: false

View File

@@ -218,7 +218,6 @@ def setup(sphinx):
("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
("py:class", "spack.traverse.EdgeAndDepth"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -181,6 +181,10 @@ Spec-related modules
:mod:`spack.parser`
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
:mod:`spack.concretize`
Contains :class:`~spack.concretize.Concretizer` implementation,
which allows site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version`
Implements a simple :class:`~spack.version.Version` class with simple
comparison semantics. Also implements :class:`~spack.version.VersionRange`

View File

@@ -863,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
spack:
definitions:
- compilers: ['%gcc', '%clang']
- when: arch.satisfies('target=x86_64:')
- when: arch.satisfies('x86_64:')
compilers: ['%intel']
.. note::
@@ -893,9 +893,8 @@ The valid variables for a ``when`` clause are:
#. ``env``. The user environment (usually ``os.environ`` in Python).
#. ``hostname``. The hostname of the system.
#. ``full_hostname``. The fully qualified hostname of the system.
#. ``hostname``. The hostname of the system (if ``hostname`` is an
executable in the user's PATH).
^^^^^^^^^^^^^^^^^^^^^^^^
SpecLists as Constraints

View File

@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
option ``--depth 1`` will be used if the version of git and the specified
transport protocol support it, and ``--single-branch`` will be used if the
version of git supports it.
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1366,41 +1361,6 @@ Submodules
For more information about git submodules see the manpage of git: ``man
git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
If ``git_sparse_paths`` is supplied and the git version is too old
then a warning will be issued and that package will use the standard cloning operations instead.
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
or a more complex package attribute using the ``@property`` decorator. The return value should be
a list for a callable implementation of ``git_sparse_paths``.
.. code-block:: python
def sparse_path_function(package)
"""a callable function that can be used in side a version"""
# paths can be directories or functions, all subdirectories and files are included
paths = ["doe", "rae", "me/file.cpp"]
if package.spec.version > Version("1.2.0"):
paths.extend(["fae"])
return paths
class MyPackage(package):
# can also be a package attribute that will be used if not specified in versions
git_sparse_paths = ["doe", "rae"]
# use the package attribute
version("1.0.0")
version("1.1.0")
# use the function
version("1.1.5", git_sparse_paths=sparse_path_func)
version("1.2.0", git_sparse_paths=sparse_path_func)
version("1.2.5", git_sparse_paths=sparse_path_func)
version("1.1.5", git_sparse_paths=sparse_path_func)
.. _github-fetch:
^^^^^^

View File

@@ -663,7 +663,11 @@ build the package.
When including a bootstrapping phase as in the example above, the result is that
the bootstrapped compiler packages will be pushed to the binary mirror (and the
local artifacts mirror) before the actual release specs are built.
local artifacts mirror) before the actual release specs are built. In this case,
the jobs corresponding to subsequent release specs are configured to
``install_missing_compilers``, so that if spack is asked to install a package
with a compiler it doesn't know about, it can be quickly installed from the
binary mirror first.
Since bootstrapping compilers is optional, those items can be left out of the
environment/stack file, and in that case no bootstrapping will be done (only the

View File

@@ -5,8 +5,8 @@ sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
docutils==0.20.1
pygments==2.18.0
urllib3==2.2.3
pytest==8.3.3
urllib3==2.2.2
pytest==8.3.2
isort==5.13.2
black==24.8.0
flake8==7.1.1

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
astunparse
----------------

View File

@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
with os.scandir(self.etc_dir) as it:
etc_files = [
p.path for p in it
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
]
basenames = [
basename
for basename in os.listdir(self.etc_dir)
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
and os.path.isfile(os.path.join(self.etc_dir, basename))
]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
etc_files.sort()
basenames.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
etc_files = [
os.path.join(self.etc_dir, basename)
for basename in _DISTRO_RELEASE_BASENAMES
]
for filepath in etc_files:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
basenames = _DISTRO_RELEASE_BASENAMES
for basename in basenames:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match is None:
continue
filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -231,6 +231,96 @@ def is_host_name(instance):
return True
try:
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
except ImportError:
pass
else:
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
try:
from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(draft7="iri", raises=ValueError)
def is_iri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(draft7="iri-reference", raises=ValueError)
def is_iri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI_reference")
try:
from strict_rfc3339 import validate_rfc3339
except ImportError:
try:
from rfc3339_validator import validate_rfc3339
except ImportError:
validate_rfc3339 = None
if validate_rfc3339:
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3339(instance)
@_checks_drafts(draft7="time")
def is_time(instance):
if not isinstance(instance, str_types):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
try:
import webcolors
except ImportError:
pass
else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str_types) or
instance.lower() in webcolors.css21_names_to_hex
):
return True
return is_css_color_code(instance)
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
try:
import jsonpointer
except ImportError:
pass
else:
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str_types):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str_types):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
try:
import uritemplate.exceptions
except ImportError:
pass
else:
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
raises=uritemplate.exceptions.InvalidTemplate,
)
def is_uri_template(
instance,
template_validator=uritemplate.Validator().force_balanced_braces(),
):
template = uritemplate.URITemplate(instance)
return template_validator.validate(template)

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch(
name: str = "",
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(),
compilers={},
generation=generation,
cpu_part=cpu_part,
)
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
)
if architecture in (PPC64LE, PPC64):
@@ -351,10 +345,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures."""
"""Types and functions to manage information
on CPU microarchitectures.
"""
import functools
import platform
import re
@@ -63,24 +65,21 @@ class Microarchitecture:
passed in as argument above.
* versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant.
cpu_part (str): cpu part of the architecture, if relevant.
generation (int): generation of the micro-architecture, if
relevant.
"""
# pylint: disable=too-many-arguments,too-many-instance-attributes
# pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name
self.parents = parents
self.vendor = vendor
self.features = features
self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation
# Only relevant for AArch64
self.cpu_part = cpu_part
# Cache the ancestor computation
self._ancestors = None
@@ -112,7 +111,6 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers
and self.generation == other.generation
and self.cpu_part == other.cpu_part
)
@coerce_target_names
@@ -145,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__
fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
"cpu_part={0.cpu_part!r})"
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
)
return fmt.format(self)
@@ -193,7 +190,6 @@ def to_dict(self):
"generation": self.generation,
"parents": [str(x) for x in self.parents],
"compilers": self.compilers,
"cpupart": self.cpu_part,
}
@staticmethod
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]),
compilers=data.get("compilers", {}),
generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
)
def optimization_flags(self, compiler, version):
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"])
compilers = values.get("compilers", {})
generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture(
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]

View File

@@ -2225,14 +2225,10 @@
],
"nvhpc": [
{
"versions": "21.11:23.8",
"versions": "21.11:",
"name": "zen3",
"flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
}
]
}
@@ -2715,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99"
}
]
},
"cpupart": "0x0af"
}
},
"a64fx": {
"from": ["armv8.2a"],
@@ -2784,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
}
]
},
"cpupart": "0x001"
}
},
"cortex_a72": {
"from": ["aarch64"],
@@ -2822,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72"
}
]
},
"cpupart": "0xd08"
}
},
"neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"],
@@ -2909,8 +2902,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd0c"
}
},
"neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"],
@@ -2934,6 +2926,8 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
@@ -3034,8 +3028,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd40"
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
@@ -3059,10 +3052,13 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
@@ -3070,12 +3066,18 @@
"sb",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
"bf16",
"dgh"
],
"compilers" : {
"gcc": [
@@ -3100,19 +3102,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.3.99",
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{
"versions": "12.0:12.2.99",
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "12.3:",
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
@@ -3147,113 +3145,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}
},
"m1": {
"from": ["armv8.4a"],
@@ -3319,8 +3211,7 @@
"flags" : "-mcpu=apple-m1"
}
]
},
"cpupart": "0x022"
}
},
"m2": {
"from": ["m1", "armv8.5a"],
@@ -3398,8 +3289,7 @@
"flags" : "-mcpu=apple-m2"
}
]
},
"cpupart": "0x032"
}
},
"arm": {
"from": [],

View File

@@ -52,9 +52,6 @@
}
}
}
},
"cpupart": {
"type": "string"
}
},
"required": [
@@ -110,4 +107,4 @@
"additionalProperties": false
}
}
}
}

View File

@@ -1,45 +0,0 @@
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
index 89e1868047..50c3b18d4d 100644
--- a/lib/spack/external/_vendoring/distro/distro.py
+++ b/lib/spack/external/_vendoring/distro/distro.py
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
- basenames = [
- basename
- for basename in os.listdir(self.etc_dir)
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
- and os.path.isfile(os.path.join(self.etc_dir, basename))
- ]
+ with os.scandir(self.etc_dir) as it:
+ etc_files = [
+ p.path for p in it
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
+ ]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
- basenames.sort()
+ etc_files.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
- basenames = _DISTRO_RELEASE_BASENAMES
- for basename in basenames:
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ etc_files = [
+ os.path.join(self.etc_dir, basename)
+ for basename in _DISTRO_RELEASE_BASENAMES
+ ]
+
+ for filepath in etc_files:
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
if match is None:
continue
- filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
-__version__ = metadata.version("jsonschema")
+
+__version__ = "3.2.0"
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
index 281a7cfcff..29061e3661 100644
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
@@ -231,96 +231,6 @@ def is_host_name(instance):
return True
-try:
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
- import idna
-except ImportError:
- pass
-else:
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
- def is_idn_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- idna.encode(instance)
- return True
-
-
-try:
- import rfc3987
-except ImportError:
- try:
- from rfc3986_validator import validate_rfc3986
- except ImportError:
- pass
- else:
- @_checks_drafts(name="uri")
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI_reference")
-
-else:
- @_checks_drafts(draft7="iri", raises=ValueError)
- def is_iri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI")
-
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
- def is_iri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI_reference")
-
- @_checks_drafts(name="uri", raises=ValueError)
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI_reference")
-
-
-try:
- from strict_rfc3339 import validate_rfc3339
-except ImportError:
- try:
- from rfc3339_validator import validate_rfc3339
- except ImportError:
- validate_rfc3339 = None
-
-if validate_rfc3339:
- @_checks_drafts(name="date-time")
- def is_datetime(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3339(instance)
-
- @_checks_drafts(draft7="time")
- def is_time(instance):
- if not isinstance(instance, str_types):
- return True
- return is_datetime("1970-01-01T" + instance)
-
-
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
-
-
-try:
- import webcolors
-except ImportError:
- pass
-else:
- def is_css_color_code(instance):
- return webcolors.normalize_hex(instance)
-
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
- def is_css21_color(instance):
- if (
- not isinstance(instance, str_types) or
- instance.lower() in webcolors.css21_names_to_hex
- ):
- return True
- return is_css_color_code(instance)
-
- def is_css3_color(instance):
- if instance.lower() in webcolors.css3_names_to_hex:
- return True
- return is_css_color_code(instance)
-
-
-try:
- import jsonpointer
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="json-pointer",
- draft7="json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_json_pointer(instance):
- if not isinstance(instance, str_types):
- return True
- return jsonpointer.JsonPointer(instance)
-
- # TODO: I don't want to maintain this, so it
- # needs to go either into jsonpointer (pending
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
- # into a new external library.
- @_checks_drafts(
- draft7="relative-json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_relative_json_pointer(instance):
- # Definition taken from:
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
- if not isinstance(instance, str_types):
- return True
- non_negative_integer, rest = [], ""
- for i, character in enumerate(instance):
- if character.isdigit():
- non_negative_integer.append(character)
- continue
-
- if not non_negative_integer:
- return False
-
- rest = instance[i:]
- break
- return (rest == "#") or jsonpointer.JsonPointer(rest)
-
-
-try:
- import uritemplate.exceptions
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="uri-template",
- draft7="uri-template",
- raises=uritemplate.exceptions.InvalidTemplate,
- )
- def is_uri_template(
- instance,
- template_validator=uritemplate.Validator().force_balanced_braces(),
- ):
- template = uritemplate.URITemplate(instance)
- return template_validator.validate(template)

View File

@@ -27,6 +27,8 @@
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from spack.util.executable import Executable, which
from ..path import path_to_os_path, system_path_filter
if sys.platform != "win32":
@@ -51,6 +53,7 @@
"find_all_headers",
"find_libraries",
"find_system_libraries",
"fix_darwin_install_name",
"force_remove",
"force_symlink",
"getuid",
@@ -245,6 +248,42 @@ def path_contains_subdirectory(path, root):
return norm_path.startswith(norm_root)
@memoized
def file_command(*args):
"""Creates entry point to `file` system command with provided arguments"""
file_cmd = which("file", required=True)
for arg in args:
file_cmd.add_default_arg(arg)
return file_cmd
@memoized
def _get_mime_type():
"""Generate method to call `file` system command to aquire mime type
for a specified path
"""
if sys.platform == "win32":
# -h option (no-dereference) does not exist in Windows
return file_command("-b", "--mime-type")
else:
return file_command("-b", "-h", "--mime-type")
def mime_type(filename):
"""Returns the mime type and subtype of a file.
Args:
filename: file to be analyzed
Returns:
Tuple containing the MIME type and subtype
"""
output = _get_mime_type()(filename, output=str, error=str).strip()
tty.debug("==> " + output)
type, _, subtype = output.partition("/")
return type, subtype
#: This generates the library filenames that may appear on any OS.
library_extensions = ["a", "la", "so", "tbd", "dylib"]
@@ -1585,12 +1624,6 @@ def remove_linked_tree(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
if sys.platform == "win32":
# Adding this prefix allows shutil to remove long paths on windows
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
long_path_pfx = "\\\\?\\"
if not path.startswith(long_path_pfx):
path = long_path_pfx + path
shutil.rmtree(path, **kwargs)
@@ -1640,6 +1673,41 @@ def safe_remove(*files_or_dirs):
raise
@system_path_filter
def fix_darwin_install_name(path):
"""Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
1. Use ``install_name('-id', ...)`` to change install name of a single lib
2. Use ``install_name('-change', ...)`` to change the cross linking between
libs. The function assumes that all libraries are in one folder and
currently won't follow subfolders.
Parameters:
path (str): directory in which .dylib files are located
"""
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
install_name_tool = Executable("install_name_tool")
install_name_tool("-id", lib, lib)
otool = Executable("otool")
long_deps = otool("-L", lib, output=str).split("\n")
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
# We really want to check for either
# dep == os.path.basename(loc) or
# dep == join_path(builddir, os.path.basename(loc)),
# but we don't know builddir (nor how symbolic links look
# in builddir). We thus only compare the basenames.
if os.path.basename(dep) == os.path.basename(loc):
install_name_tool("-change", dep, loc, lib)
break
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
"""Find the first file matching a pattern.

View File

@@ -6,6 +6,7 @@
import collections.abc
import contextlib
import functools
import inspect
import itertools
import os
import re
@@ -15,7 +16,7 @@
from typing import Any, Callable, Iterable, List, Tuple
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
ignore_modules = [r"^\.#", "~$"]
def index_by(objects, *funcs):
@@ -83,6 +84,20 @@ def index_by(objects, *funcs):
return result
def caller_locals():
"""This will return the locals of the *parent* of the caller.
This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
del stack
def attr_setdefault(obj, name, value):
"""Like dict.setdefault, but for objects."""
if not hasattr(obj, name):
@@ -90,6 +105,15 @@ def attr_setdefault(obj, name, value):
return getattr(obj, name)
def has_method(cls, name):
for base in inspect.getmro(cls):
if base is object:
continue
if name in base.__dict__:
return True
return False
def union_dicts(*dicts):
"""Use update() to combine all dicts into one.
@@ -154,22 +178,19 @@ def list_modules(directory, **kwargs):
order."""
list_directories = kwargs.setdefault("directories", True)
ignore = re.compile(ignore_modules)
for name in os.listdir(directory):
if name == "__init__.py":
continue
with os.scandir(directory) as it:
for entry in it:
if entry.name == "__init__.py" or entry.name == "__pycache__":
continue
path = os.path.join(directory, name)
if list_directories and os.path.isdir(path):
init_py = os.path.join(path, "__init__.py")
if os.path.isfile(init_py):
yield name
if (
list_directories
and entry.is_dir()
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
):
yield entry.name
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
yield entry.name[:-3] # strip .py
elif name.endswith(".py"):
if not any(re.search(pattern, name) for pattern in ignore_modules):
yield re.sub(".py$", "", name)
def decorator_with_or_without_args(decorator):
@@ -216,8 +237,8 @@ def setter(name, value):
value.__name__ = name
setattr(cls, name, value)
if not hasattr(cls, "_cmp_key"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
if not has_method(cls, "_cmp_key"):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
@@ -367,8 +388,8 @@ def cd_fun():
TypeError: If the class does not have a ``_cmp_iter`` method
"""
if not hasattr(cls, "_cmp_iter"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
if not has_method(cls, "_cmp_iter"):
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
# comparison operators are implemented in terms of lazy_eq and lazy_lt
def eq(self, other):
@@ -843,19 +864,20 @@ def uniq(sequence):
return uniq_list
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements,
replacing intervening elements with '...'. For example::
elide_list(["1", "2", "3", "4", "5", "6"], 4)
elide_list([1,2,3,4,5,6], 4)
gives::
["1", "2", "3", "...", "6"]
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
return [*line_list[: max_num - 1], "...", line_list[-1]]
return line_list
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
else:
return line_list
@contextlib.contextmanager

View File

@@ -10,7 +10,6 @@
import errno
import io
import multiprocessing
import multiprocessing.connection
import os
import re
import select

131
lib/spack/spack/abi.py Normal file
View File

@@ -0,0 +1,131 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from llnl.util.lang import memoized
import spack.spec
import spack.version
from spack.compilers.clang import Clang
from spack.util.executable import Executable, ProcessError
class ABI:
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec
) -> bool:
"""Return true if architecture of target spec is ABI compatible
to the architecture of constraint spec. If either the target
or constraint specs have no architecture, target is also defined
as architecture ABI compatible to constraint."""
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
)
@memoized
def _gcc_get_libstdcxx_version(self, version):
"""Returns gcc ABI compatibility info by getting the library version of
a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
spec = spack.spec.CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
return None
compiler = compilers[0]
rungcc = None
libname = None
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
libname = "libgcc_s." + dso_suffix
else:
return None
try:
# Some gcc's are actually clang and don't respond properly to
# --print-file-name (they just print the filename, not the
# full path). Ignore these and expect them to be handled as clang.
if Clang.default_version(rungcc.exe[0]) != "unknown":
return None
output = rungcc("--print-file-name=%s" % libname, output=str)
except ProcessError:
return None
if not output:
return None
libpath = os.path.realpath(output.strip())
if not libpath:
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
are ABI compatible."""
plib = self._gcc_get_libstdcxx_version(pversion)
clib = self._gcc_get_libstdcxx_version(cversion)
if not plib or not clib:
return False
return plib == clib
def _intel_compiler_compare(
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
) -> bool:
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
# Test major and minor versions. Ignore build version.
pv = pversion.lo
cv = cversion.lo
return pv.up_to(2) == cv.up_to(2)
def compiler_compatible(
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
) -> bool:
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
if parent.compiler.name != child.compiler.name:
# Different compiler families are assumed ABI incompatible
return False
if loose:
return True
# TODO: Can we move the specialized ABI matching stuff
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion
):
return True
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
pversion, cversion
):
return True
return False
def compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
) -> bool:
"""Returns true if target spec is ABI compatible to constraint spec"""
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
target, constraint, loose=loose
)

View File

@@ -39,21 +39,19 @@ def _search_duplicate_compilers(error_cls):
import collections
import collections.abc
import glob
import inspect
import io
import itertools
import os
import pathlib
import pickle
import re
import warnings
from typing import Iterable, List, Set, Tuple
from urllib.request import urlopen
import llnl.util.lang
import spack.config
import spack.patch
import spack.paths
import spack.repo
import spack.spec
import spack.util.crypto
@@ -75,9 +73,7 @@ def __init__(self, summary, details):
self.details = tuple(details)
def __str__(self):
if self.details:
return f"{self.summary}\n" + "\n".join(f" {detail}" for detail in self.details)
return self.summary
return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
def __eq__(self, other):
if self.summary != other.summary or self.details != other.details:
@@ -214,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
)
#: Sanity checks on packages.yaml
config_repos = AuditClass(
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
)
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
@@ -261,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls):
return errors
@config_packages
def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(attribute_name, config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
syaml.dump_config(dict_view, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
if "all" in packages_yaml and "version" in packages_yaml["all"]:
summary = "Using the deprecated 'version' attribute under 'packages:all'"
errors.append(make_error("version", packages_yaml["all"], summary))
for package_name in packages_yaml:
if package_name == "all":
continue
package_conf = packages_yaml[package_name]
for attribute in ("compiler", "providers", "target"):
if attribute not in package_conf:
continue
summary = (
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
)
errors.append(make_error(attribute, package_conf, summary))
return errors
@config_packages
def _avoid_mismatched_variants(error_cls):
"""Warns if variant preferences have mismatched types or names."""
@@ -342,27 +367,6 @@ def _ensure_all_virtual_packages_have_default_providers(error_cls):
]
@config_repos
def _ensure_no_folders_without_package_py(error_cls):
"""Check that we don't leave any folder without a package.py in repos"""
errors = []
for repository in spack.repo.PATH.repos:
missing = []
for entry in os.scandir(repository.packages_path):
if not entry.is_dir():
continue
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
if not package_py.exists():
missing.append(entry.path)
if missing:
summary = (
f"The '{repository.namespace}' repository misses a package.py file"
f" in the following folders"
)
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
@@ -494,7 +498,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
name_definitions = collections.defaultdict(list)
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for cls_item in pkg_cls.__mro__:
for cls_item in inspect.getmro(pkg_cls):
for name in RESERVED_NAMES:
current_value = cls_item.__dict__.get(name)
if current_value is None:
@@ -523,7 +527,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, []))
return errors
@@ -683,88 +687,6 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
return errors
class DeprecatedMagicGlobals(ast.NodeVisitor):
def __init__(self, magic_globals: Iterable[str]):
super().__init__()
self.magic_globals: Set[str] = set(magic_globals)
# State to track whether we're in a class function
self.depth: int = 0
self.in_function: bool = False
self.path = (ast.Module, ast.ClassDef, ast.FunctionDef)
# Defined locals in the current function (heuristically at least)
self.locals: Set[str] = set()
# List of (name, lineno) tuples for references to magic globals
self.references_to_globals: List[Tuple[str, int]] = []
def descend_in_function_def(self, node: ast.AST) -> None:
if not isinstance(node, self.path[self.depth]):
return
self.depth += 1
if self.depth == len(self.path):
self.in_function = True
super().generic_visit(node)
if self.depth == len(self.path):
self.in_function = False
self.locals.clear()
self.depth -= 1
def generic_visit(self, node: ast.AST) -> None:
# Recurse into function definitions
if self.depth < len(self.path):
return self.descend_in_function_def(node)
elif not self.in_function:
return
elif isinstance(node, ast.Global):
for name in node.names:
if name in self.magic_globals:
self.references_to_globals.append((name, node.lineno))
elif isinstance(node, ast.Assign):
# visit the rhs before lhs
super().visit(node.value)
for target in node.targets:
super().visit(target)
elif isinstance(node, ast.Name) and node.id in self.magic_globals:
if isinstance(node.ctx, ast.Load) and node.id not in self.locals:
self.references_to_globals.append((node.id, node.lineno))
elif isinstance(node.ctx, ast.Store):
self.locals.add(node.id)
else:
super().generic_visit(node)
@package_properties
def _uses_deprecated_globals(pkgs, error_cls):
"""Ensure that packages do not use deprecated globals"""
errors = []
for pkg_name in pkgs:
# some packages scheduled to be removed in v0.23 are not worth fixing.
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
if all(v.get("deprecated", False) for v in pkg_cls.versions.values()):
continue
file = spack.repo.PATH.filename_for_package_name(pkg_name)
tree = ast.parse(open(file).read())
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
visitor.visit(tree)
if visitor.references_to_globals:
errors.append(
error_cls(
f"Package '{pkg_name}' uses deprecated globals",
[
f"{file}:{line} references '{name}'"
for name, line in visitor.references_to_globals
],
)
)
return errors
@package_https_directives
def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links"""

View File

@@ -6,6 +6,7 @@
import codecs
import collections
import concurrent.futures
import contextlib
import copy
import hashlib
import io
@@ -24,7 +25,7 @@
import urllib.request
import warnings
from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
from typing import Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.filesystem as fsys
import llnl.util.lang
@@ -54,7 +55,6 @@
import spack.util.archive
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.filesystem as ssys
import spack.util.gpg
import spack.util.parallel
import spack.util.path
@@ -106,7 +106,7 @@ class BuildCacheDatabase(spack_db.Database):
record_fields = ("spec", "ref_count", "in_buildcache")
def __init__(self, root):
super().__init__(root, lock_cfg=spack_db.NO_LOCK, layout=None)
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
self._write_transaction_impl = llnl.util.lang.nullcontext
self._read_transaction_impl = llnl.util.lang.nullcontext
@@ -688,7 +688,7 @@ def get_buildfile_manifest(spec):
# Non-symlinks.
for rel_path in visitor.files:
abs_path = os.path.join(root, rel_path)
m_type, m_subtype = ssys.mime_type(abs_path)
m_type, m_subtype = fsys.mime_type(abs_path)
if relocate.needs_binary_relocation(m_type, m_subtype):
# Why is this branch not part of needs_binary_relocation? :(
@@ -789,9 +789,7 @@ def sign_specfile(key: str, specfile_path: str) -> str:
return signed_specfile_path
def _read_specs_and_push_index(
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
):
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
"""Read all the specs listed in the provided list, using thread given thread parallelism,
generate the index, and push it to the mirror.
@@ -815,7 +813,7 @@ def _read_specs_and_push_index(
else:
continue
db.add(fetched_spec)
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)
# Now generate the index, compute its hash, and push the two files to
@@ -960,7 +958,7 @@ def _spec_files_from_cache(url: str):
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url))
def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
def generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
"""Create or replace the build cache index on the given mirror. The
buildcache index contains an entry for each binary package under the
cache_prefix.
@@ -1121,7 +1119,7 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
return ExistsInBuildcache(signed, unsigned, tarball)
def _url_upload_tarball_and_specfile(
def _upload_tarball_and_specfile(
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
):
files = BuildcacheFiles(spec, tmpdir, out_url)
@@ -1156,146 +1154,49 @@ def _url_upload_tarball_and_specfile(
)
class Uploader:
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
self.tmpdir: str
self.executor: concurrent.futures.Executor
def __enter__(self):
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
self._executor = spack.util.parallel.make_concurrent_executor()
self.tmpdir = self._tmpdir.__enter__()
self.executor = self.executor = self._executor.__enter__()
return self
def __exit__(self, *args):
self._executor.__exit__(*args)
self._tmpdir.__exit__(*args)
def push_or_raise(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
skipped, errors = self.push(specs)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {self.mirror.push_url}:\n"
+ "\n".join(
f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors
)
)
return skipped
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
raise NotImplementedError
def tag(self, tag: str, roots: List[spack.spec.Spec]):
"""Make a list of selected specs together available under the given tag"""
pass
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.target_image = spack.oci.oci.image_from_mirror(mirror)
self.base_image = ImageReference.from_string(base_image) if base_image else None
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
skipped, base_images, checksums, upload_errors = _oci_push(
target_image=self.target_image,
base_image=self.base_image,
installed_specs_with_deps=specs,
force=self.force,
tmpdir=self.tmpdir,
executor=self.executor,
)
self._base_images = base_images
self._checksums = checksums
# only update index if any binaries were uploaded
if self.update_index and len(skipped) + len(upload_errors) < len(specs):
_oci_update_index(self.target_image, self.tmpdir, self.executor)
return skipped, upload_errors
def tag(self, tag: str, roots: List[spack.spec.Spec]):
tagged_image = self.target_image.with_tag(tag)
# _push_oci may not populate self._base_images if binaries were already in the registry
for spec in roots:
_oci_update_base_images(
base_image=self.base_image,
target_image=self.target_image,
spec=spec,
base_image_cache=self._base_images,
)
_oci_put_manifest(
self._base_images, self._checksums, tagged_image, self.tmpdir, None, None, *roots
)
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.url = mirror.push_url
self.signing_key = signing_key
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
return _url_push(
specs,
out_url=self.url,
force=self.force,
update_index=self.update_index,
signing_key=self.signing_key,
tmpdir=self.tmpdir,
executor=self.executor,
)
def make_uploader(
mirror: spack.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
base_image: Optional[str] = None,
) -> Uploader:
"""Builder for the appropriate uploader based on the mirror type"""
if mirror.push_url.startswith("oci://"):
return OCIUploader(
mirror=mirror, force=force, update_index=update_index, base_image=base_image
)
else:
return URLUploader(
mirror=mirror, force=force, update_index=update_index, signing_key=signing_key
)
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
@contextlib.contextmanager
def default_push_context() -> Generator[Tuple[str, concurrent.futures.Executor], None, None]:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
yield tmpdir, executor
def push_or_raise(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> List[Spec]:
"""Same as push, but raises an exception on error. Returns a list of skipped specs already
present in the build cache when force=False."""
skipped, errors = push(specs, out_url, signing_key, force, update_index)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {out_url}:\n"
+ "\n".join(f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors)
)
return skipped
def push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
present (when force=False). Does not raise on error."""
with default_push_context() as (tmpdir, executor):
return _push(specs, out_url, signing_key, force, update_index, tmpdir, executor)
class FancyProgress:
def __init__(self, total: int):
self.n = 0
@@ -1333,7 +1234,7 @@ def fail(self) -> None:
tty.info(f"{self.pre}Failed to push {self.pretty_spec}")
def _url_push(
def _push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
@@ -1378,7 +1279,7 @@ def _url_push(
upload_futures = [
executor.submit(
_url_upload_tarball_and_specfile,
_upload_tarball_and_specfile,
spec,
tmpdir,
out_url,
@@ -1408,12 +1309,12 @@ def _url_push(
if signing_key:
keys_tmpdir = os.path.join(tmpdir, "keys")
os.mkdir(keys_tmpdir)
_url_push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
if update_index:
index_tmpdir = os.path.join(tmpdir, "index")
os.mkdir(index_tmpdir)
_url_generate_package_index(out_url, index_tmpdir)
generate_package_index(out_url, index_tmpdir)
return skipped, errors
@@ -1530,9 +1431,12 @@ def _oci_put_manifest(
for s in expected_blobs:
# If a layer for a dependency has gone missing (due to removed manifest in the registry, a
# failed push, or a local forced uninstall), we cannot create a runnable container image.
# If an OCI registry is only used for storage, this is not a hard error, but for now we
# raise an exception unconditionally, until someone requests a more lenient behavior.
checksum = checksums.get(s.dag_hash())
if checksum:
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
if not checksum:
raise MissingLayerError(f"missing layer for {_format_spec(s)}")
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
@@ -1577,7 +1481,6 @@ def _oci_put_manifest(
"size": checksums[s.dag_hash()].size,
}
for s in expected_blobs
if s.dag_hash() in checksums
),
],
}
@@ -1616,7 +1519,7 @@ def _oci_update_base_images(
)
def _oci_push(
def _push_oci(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
@@ -1768,7 +1671,7 @@ def _oci_update_index(
for spec_dict in spec_dicts:
spec = Spec.from_dict(spec_dict)
db.add(spec)
db.add(spec, directory_layout=None)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
@@ -2564,8 +2467,9 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
def install_single_spec(spec, unsigned=False, force=False):
@@ -2741,7 +2645,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
)
def _url_push_keys(
def push_keys(
*mirrors: Union[spack.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
@@ -3192,3 +3096,7 @@ class CannotListKeys(GenerateIndexError):
class PushToBuildCacheError(spack.error.SpackError):
"""Raised when unable to push objects to binary mirror"""
class MissingLayerError(spack.error.SpackError):
"""Raised when a required layer for a dependency is missing in an OCI registry."""

View File

@@ -9,7 +9,6 @@
all_core_root_specs,
ensure_clingo_importable_or_raise,
ensure_core_dependencies,
ensure_file_in_path_or_raise,
ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise,
)
@@ -20,7 +19,6 @@
"is_bootstrapping",
"ensure_bootstrap_configuration",
"ensure_core_dependencies",
"ensure_file_in_path_or_raise",
"ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise",

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package"""
import fnmatch
import importlib
import os.path
import re
import sys
@@ -29,7 +28,7 @@
def _python_import(module: str) -> bool:
try:
importlib.import_module(module)
__import__(module)
except ImportError:
return False
return True

View File

@@ -143,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers)
@contextlib.contextmanager
@@ -152,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
bootstrap_store_path = store_path()
user_configuration = _read_and_sanitize_configuration()
with spack.environment.no_active_environment():
with spack.platforms.use_platform(
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
spack.platforms.real_host()
), spack.repo.use_repositories(spack.paths.packages_path):
# Default configuration scopes excluding command line

View File

@@ -472,8 +472,7 @@ def ensure_clingo_importable_or_raise() -> None:
def gnupg_root_spec() -> str:
"""Return the root spec used to bootstrap GnuPG"""
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
return _root_spec(f"{root_spec_name}@2.3:")
return _root_spec("gnupg@2.3:")
def ensure_gpg_in_path_or_raise() -> None:
@@ -483,19 +482,6 @@ def ensure_gpg_in_path_or_raise() -> None:
)
def file_root_spec() -> str:
"""Return the root spec used to bootstrap file"""
root_spec_name = "win-file" if IS_WINDOWS else "file"
return _root_spec(root_spec_name)
def ensure_file_in_path_or_raise() -> None:
"""Ensure file is in the PATH or raise"""
return ensure_executables_in_path_or_raise(
executables=["file"], abstract_spec=file_root_spec()
)
def patchelf_root_spec() -> str:
"""Return the root spec used to bootstrap patchelf"""
# 0.13.1 is the last version not to require C++17.
@@ -579,15 +565,14 @@ def ensure_core_dependencies() -> None:
"""Ensure the presence of all the core dependencies."""
if sys.platform.lower() == "linux":
ensure_patchelf_in_path_or_raise()
elif sys.platform == "win32":
ensure_file_in_path_or_raise()
ensure_gpg_in_path_or_raise()
if not IS_WINDOWS:
ensure_gpg_in_path_or_raise()
ensure_clingo_importable_or_raise()
def all_core_root_specs() -> List[str]:
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
def bootstrapping_sources(scope: Optional[str] = None):

View File

@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
def _buildcache_requirements() -> List[RequiredResponseType]:
_buildcache_exes = {
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
"file": _missing("file", "required to analyze files for buildcaches"),
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
}
if platform.system().lower() == "darwin":
@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
# Ensure we trigger environment modifications if we have an environment
if BootstrapEnvironment.spack_yaml().exists():
with BootstrapEnvironment() as env:
env.load()
env.update_syspath_and_environ()
return [
_required_executable(

View File

@@ -457,12 +457,9 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
@@ -1553,21 +1550,21 @@ class ModuleChangePropagator:
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
def __init__(self, package: spack.package_base.PackageBase) -> None:
def __init__(self, package):
self._set_self_attributes("package", package)
self._set_self_attributes("current_module", package.module)
#: Modules for the classes in the MRO up to PackageBase
modules_in_mro = []
for cls in package.__class__.__mro__:
module = getattr(cls, "module", None)
for cls in inspect.getmro(type(package)):
module = cls.module
if module is None or module is spack.package_base:
break
if module is self.current_module:
if module == self.current_module:
continue
if module == spack.package_base:
break
modules_in_mro.append(module)
self._set_self_attributes("modules_in_mro", modules_in_mro)
self._set_self_attributes("_set_attributes", {})

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
import os.path
import stat
@@ -548,12 +549,13 @@ def autoreconf(self, pkg, spec, prefix):
tty.warn("* a custom AUTORECONF phase in the package *")
tty.warn("*********************************************************")
with fs.working_dir(self.configure_directory):
m = inspect.getmodule(self.pkg)
# This line is what is needed most of the time
# --install, --verbose, --force
autoreconf_args = ["-ivf"]
autoreconf_args += self.autoreconf_search_path_args
autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args)
m.autoreconf(*autoreconf_args)
@property
def autoreconf_search_path_args(self):
@@ -577,9 +579,7 @@ def set_configure_or_die(self):
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
def configure_args(self):
"""Return the list of all the arguments that must be passed to configure,
@@ -596,7 +596,7 @@ def configure(self, pkg, spec, prefix):
options += self.configure_args()
with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options)
inspect.getmodule(self.pkg).configure(*options)
def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder."""
@@ -604,12 +604,12 @@ def build(self, pkg, spec, prefix):
params = ["V=1"]
params += self.build_targets
with fs.working_dir(self.build_directory):
pkg.module.make(*params)
inspect.getmodule(self.pkg).make(*params)
def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
inspect.getmodule(self.pkg).make(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs
import spack.builder
@@ -70,7 +72,9 @@ def check_args(self):
def build(self, pkg, spec, prefix):
"""Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory):
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
inspect.getmodule(pkg).cargo(
"install", "--root", "out", "--path", ".", *self.build_args
)
def install(self, pkg, spec, prefix):
"""Copy build files into package prefix."""
@@ -82,4 +86,4 @@ def install(self, pkg, spec, prefix):
def check(self):
"""Run "cargo test"."""
with fs.working_dir(self.build_directory):
self.pkg.module.cargo("test", *self.check_args)
inspect.getmodule(self.pkg).cargo("test", *self.check_args)

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import inspect
import os
import pathlib
import platform
@@ -107,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
if _supports_compilation_databases(pkg):
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use.
@@ -543,24 +539,24 @@ def cmake(self, pkg, spec, prefix):
options += self.cmake_args()
options.append(os.path.abspath(self.root_cmakelists_dir))
with fs.working_dir(self.build_directory, create=True):
pkg.module.cmake(*options)
inspect.getmodule(self.pkg).cmake(*options)
def build(self, pkg, spec, prefix):
"""Make the build targets"""
with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles":
pkg.module.make(*self.build_targets)
inspect.getmodule(self.pkg).make(*self.build_targets)
elif self.generator == "Ninja":
self.build_targets.append("-v")
pkg.module.ninja(*self.build_targets)
inspect.getmodule(self.pkg).ninja(*self.build_targets)
def install(self, pkg, spec, prefix):
"""Make the install targets"""
with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles":
pkg.module.make(*self.install_targets)
inspect.getmodule(self.pkg).make(*self.install_targets)
elif self.generator == "Ninja":
pkg.module.ninja(*self.install_targets)
inspect.getmodule(self.pkg).ninja(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -3,9 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from typing import Iterable, List
import spack.variant
from spack.directives import conflicts, depends_on, variant
from spack.multimethod import when
@@ -47,7 +44,6 @@ class CudaPackage(PackageBase):
"87",
"89",
"90",
"90a",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -74,27 +70,6 @@ def cuda_flags(arch_list):
for s in arch_list
]
@staticmethod
def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
"""Adds a decimal place to each CUDA arch.
>>> compute_capabilities(['90', '90a'])
['9.0', '9.0a']
Args:
arch_list: A list of integer strings, optionally followed by a suffix.
Returns:
A list of float strings, optionally followed by a suffix
"""
pattern = re.compile(r"(\d+)")
capabilities = []
for arch in arch_list:
_, number, letter = re.split(pattern, arch)
number = "{0:.1f}".format(float(number) / 10.0)
capabilities.append(number + letter)
return capabilities
depends_on("cuda", when="+cuda")
# CUDA version vs Architecture
@@ -163,7 +138,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -171,7 +146,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs
import spack.builder
@@ -80,7 +82,7 @@ def check_args(self):
def build(self, pkg, spec, prefix):
"""Runs ``go build`` in the source directory"""
with fs.working_dir(self.build_directory):
pkg.module.go("build", *self.build_args)
inspect.getmodule(pkg).go("build", *self.build_args)
def install(self, pkg, spec, prefix):
"""Install built binaries into prefix bin."""
@@ -93,4 +95,4 @@ def install(self, pkg, spec, prefix):
def check(self):
"""Run ``go test .`` in the source directory"""
with fs.working_dir(self.build_directory):
self.pkg.module.go("test", *self.check_args)
inspect.getmodule(self.pkg).go("test", *self.check_args)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List
import llnl.util.filesystem as fs
@@ -102,12 +103,12 @@ def edit(self, pkg, spec, prefix):
def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.build_targets)
inspect.getmodule(self.pkg).make(*self.build_targets)
def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
inspect.getmodule(self.pkg).make(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
from typing import List
@@ -194,19 +195,19 @@ def meson(self, pkg, spec, prefix):
options += self.std_meson_args
options += self.meson_args()
with fs.working_dir(self.build_directory, create=True):
pkg.module.meson(*options)
inspect.getmodule(self.pkg).meson(*options)
def build(self, pkg, spec, prefix):
"""Make the build targets"""
options = ["-v"]
options += self.build_targets
with fs.working_dir(self.build_directory):
pkg.module.ninja(*options)
inspect.getmodule(self.pkg).ninja(*options)
def install(self, pkg, spec, prefix):
"""Make the install targets"""
with fs.working_dir(self.build_directory):
pkg.module.ninja(*self.install_targets)
inspect.getmodule(self.pkg).ninja(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List # novm
import llnl.util.filesystem as fs
@@ -103,7 +104,7 @@ def msbuild_install_args(self):
def build(self, pkg, spec, prefix):
"""Run "msbuild" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.msbuild(
inspect.getmodule(self.pkg).msbuild(
*self.std_msbuild_args,
*self.msbuild_args(),
self.define_targets(*self.build_targets),
@@ -113,6 +114,6 @@ def install(self, pkg, spec, prefix):
"""Run "msbuild" on the install targets specified by the builder.
This is INSTALL by default"""
with fs.working_dir(self.build_directory):
pkg.module.msbuild(
inspect.getmodule(self.pkg).msbuild(
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List # novm
import llnl.util.filesystem as fs
@@ -131,7 +132,9 @@ def build(self, pkg, spec, prefix):
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
with fs.working_dir(self.build_directory):
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
inspect.getmodule(self.pkg).nmake(
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
)
def install(self, pkg, spec, prefix):
"""Run "nmake" on the install targets specified by the builder.
@@ -143,4 +146,6 @@ def install(self, pkg, spec, prefix):
opts.append("/F{}".format(self.makefile_name))
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
with fs.working_dir(self.build_directory):
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
inspect.getmodule(self.pkg).nmake(
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
)

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import spack.builder
import spack.package_base
from spack.directives import build_system, extends
@@ -45,7 +47,7 @@ class OctaveBuilder(BaseBuilder):
def install(self, pkg, spec, prefix):
"""Install the package from the archive file"""
pkg.module.octave(
inspect.getmodule(self.pkg).octave(
"--quiet",
"--norc",
"--built-in-docstrings-file=/dev/null",

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
from typing import Iterable
@@ -133,7 +134,7 @@ def build_method(self):
def build_executable(self):
"""Returns the executable method to build the perl package"""
if self.build_method == "Makefile.PL":
build_executable = self.pkg.module.make
build_executable = inspect.getmodule(self.pkg).make
elif self.build_method == "Build.PL":
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
return build_executable
@@ -157,7 +158,7 @@ def configure(self, pkg, spec, prefix):
options = ["Build.PL", "--install_base", prefix]
options += self.configure_args()
pkg.module.perl(*options)
inspect.getmodule(self.pkg).perl(*options)
# It is possible that the shebang in the Build script that is created from
# Build.PL may be too long causing the build to fail. Patching the shebang

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools
import inspect
import operator
import os
import re
@@ -16,7 +17,7 @@
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path
from llnl.util.filesystem import HeaderList, LibraryList
import spack.builder
import spack.config
@@ -119,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
"""
return []
@property
def bindir(self) -> str:
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
windows = self.spec.satisfies("platform=windows")
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate
@@ -227,7 +222,7 @@ def test_imports(self) -> None:
# Make sure we are importing the installed modules,
# not the ones in the source directory
python = self.module.python
python = inspect.getmodule(self).python # type: ignore[union-attr]
for module in self.import_modules:
with test_part(
self,
@@ -314,9 +309,9 @@ def get_external_python_for_prefix(self):
)
python_externals_detected = [
spec
for spec in python_externals_detection.get("python", [])
if spec.external_path == self.spec.external_path
d.spec
for d in python_externals_detection.get("python", [])
if d.prefix == self.spec.external_path
]
if python_externals_detected:
return python_externals_detected[0]

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from llnl.util.filesystem import working_dir
import spack.builder
@@ -64,17 +66,17 @@ def qmake_args(self):
def qmake(self, pkg, spec, prefix):
"""Run ``qmake`` to configure the project and generate a Makefile."""
with working_dir(self.build_directory):
pkg.module.qmake(*self.qmake_args())
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
def build(self, pkg, spec, prefix):
"""Make the build targets"""
with working_dir(self.build_directory):
pkg.module.make()
inspect.getmodule(self.pkg).make()
def install(self, pkg, spec, prefix):
"""Make the install targets"""
with working_dir(self.build_directory):
pkg.module.make("install")
inspect.getmodule(self.pkg).make("install")
def check(self):
"""Search the Makefile for a ``check:`` target and runs it if found."""

View File

@@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import Optional, Tuple
import llnl.util.lang as lang
from llnl.util.filesystem import mkdirp
from spack.directives import extends
@@ -37,7 +37,6 @@ def configure_vars(self):
def install(self, pkg, spec, prefix):
"""Installs an R package."""
mkdirp(pkg.module.r_lib_dir)
config_args = self.configure_args()
config_vars = self.configure_vars()
@@ -45,14 +44,14 @@ def install(self, pkg, spec, prefix):
args = ["--vanilla", "CMD", "INSTALL"]
if config_args:
args.append(f"--configure-args={' '.join(config_args)}")
args.append("--configure-args={0}".format(" ".join(config_args)))
if config_vars:
args.append(f"--configure-vars={' '.join(config_vars)}")
args.append("--configure-vars={0}".format(" ".join(config_vars)))
args.extend([f"--library={pkg.module.r_lib_dir}", self.stage.source_path])
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
pkg.module.R(*args)
inspect.getmodule(self.pkg).R(*args)
class RPackage(Package):
@@ -81,21 +80,35 @@ class RPackage(Package):
@lang.classproperty
def homepage(cls):
if cls.cran:
return f"https://cloud.r-project.org/package={cls.cran}"
return "https://cloud.r-project.org/package=" + cls.cran
elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}"
return "https://bioconductor.org/packages/" + cls.bioc
@lang.classproperty
def url(cls):
def urls(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
return [
"https://cloud.r-project.org/src/contrib/"
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
]
elif cls.bioc:
return [
"https://bioconductor.org/packages/release/bioc/src/contrib/"
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
"https://bioconductor.org/packages/release/data/annotation/src/contrib/"
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
]
else:
return [cls.url]
@lang.classproperty
def list_url(cls):
if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
return "https://cloud.r-project.org/src/contrib/"
@property
def git(self):
if self.bioc:
return f"https://git.bioconductor.org/packages/{self.bioc}"
return "https://git.bioconductor.org/packages/" + self.bioc

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob
import inspect
import spack.builder
import spack.package_base
@@ -51,10 +52,10 @@ def build(self, pkg, spec, prefix):
gemspecs = glob.glob("*.gemspec")
rakefiles = glob.glob("Rakefile")
if gemspecs:
pkg.module.gem("build", "--norc", gemspecs[0])
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
elif rakefiles:
jobs = pkg.module.make_jobs
pkg.module.rake("package", "-j{0}".format(jobs))
jobs = inspect.getmodule(self.pkg).make_jobs
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
else:
# Some Ruby packages only ship `*.gem` files, so nothing to build
pass
@@ -69,6 +70,6 @@ def install(self, pkg, spec, prefix):
# if --install-dir is not used, GEM_PATH is deleted from the
# environement, and Gems required to build native extensions will
# not be found. Those extensions are built during `gem install`.
pkg.module.gem(
inspect.getmodule(self.pkg).gem(
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
)

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import spack.builder
import spack.package_base
from spack.directives import build_system, depends_on
@@ -61,7 +63,8 @@ def build_args(self, spec, prefix):
def build(self, pkg, spec, prefix):
"""Build the package."""
pkg.module.scons(*self.build_args(spec, prefix))
args = self.build_args(spec, prefix)
inspect.getmodule(self.pkg).scons(*args)
def install_args(self, spec, prefix):
"""Arguments to pass to install."""
@@ -69,7 +72,9 @@ def install_args(self, spec, prefix):
def install(self, pkg, spec, prefix):
"""Install the package."""
pkg.module.scons("install", *self.install_args(spec, prefix))
args = self.install_args(spec, prefix)
inspect.getmodule(self.pkg).scons("install", *args)
def build_test(self):
"""Run unit tests after build.

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
import re
@@ -85,13 +86,14 @@ def import_modules(self):
def python(self, *args, **kwargs):
"""The python ``Executable``."""
self.pkg.module.python(*args, **kwargs)
inspect.getmodule(self).python(*args, **kwargs)
def test_imports(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
python = inspect.getmodule(self).python
for module in self.import_modules:
with spack.install_test.test_part(
self,
@@ -99,7 +101,7 @@ def test_imports(self):
purpose="checking import of {0}".format(module),
work_dir="spack-test",
):
self.python("-c", "import {0}".format(module))
python("-c", "import {0}".format(module))
@spack.builder.builder("sip")
@@ -134,7 +136,7 @@ def configure(self, pkg, spec, prefix):
"""Configure the package."""
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
args = ["--verbose", "--target-dir", pkg.module.python_platlib]
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
args.extend(self.configure_args())
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
@@ -153,7 +155,7 @@ def build(self, pkg, spec, prefix):
args = self.build_args()
with working_dir(self.build_directory):
pkg.module.make(*args)
inspect.getmodule(self.pkg).make(*args)
def build_args(self):
"""Arguments to pass to build."""
@@ -164,7 +166,7 @@ def install(self, pkg, spec, prefix):
args = self.install_args()
with working_dir(self.build_directory):
pkg.module.make("install", *args)
inspect.getmodule(self.pkg).make("install", *args)
def install_args(self):
"""Arguments to pass to install."""

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from llnl.util.filesystem import working_dir
import spack.builder
@@ -88,11 +90,11 @@ def build_directory(self):
def python(self, *args, **kwargs):
"""The python ``Executable``."""
self.pkg.module.python(*args, **kwargs)
inspect.getmodule(self.pkg).python(*args, **kwargs)
def waf(self, *args, **kwargs):
"""Runs the waf ``Executable``."""
jobs = self.pkg.module.make_jobs
jobs = inspect.getmodule(self.pkg).make_jobs
with working_dir(self.build_directory):
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)

View File

@@ -6,12 +6,12 @@
import collections.abc
import copy
import functools
import inspect
from typing import List, Optional, Tuple
from llnl.util import lang
import spack.build_environment
import spack.multimethod
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS = {}
@@ -96,10 +96,11 @@ class hierarchy (look at AspellDictPackage for an example of that)
Args:
pkg (spack.package_base.PackageBase): package object for which we need a builder
"""
package_module = inspect.getmodule(pkg)
package_buildsystem = buildsystem_name(pkg)
default_builder_cls = BUILDER_CLS[package_buildsystem]
builder_cls_name = default_builder_cls.__name__
builder_cls = getattr(pkg.module, builder_cls_name, None)
builder_cls = getattr(package_module, builder_cls_name, None)
if builder_cls:
return builder_cls(pkg)
@@ -294,11 +295,7 @@ def _decorator(fn):
return _decorator
class BuilderMeta(
PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore
):
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
pass

View File

@@ -9,11 +9,11 @@
import llnl.util.lang
from llnl.util.filesystem import mkdirp
from llnl.util.symlink import symlink
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.paths
import spack.util.file_cache
import spack.util.path
@@ -74,6 +74,23 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def symlink(self, mirror_ref):
"""Symlink a human readible path in our mirror to the actual
storage location."""
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
# In this case the link itself exists but it is broken: remove
# it and recreate it (in order to fix any symlinks broken prior
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
symlink(relative_dst, cosmetic_path)
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (

View File

@@ -1110,8 +1110,7 @@ def main_script_replacements(cmd):
cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
elif cdash_config:
# warn only if there was actually a CDash configuration.
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
service_job_retries = {
@@ -1383,10 +1382,8 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
return True
except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}")
@@ -1436,6 +1433,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_log_dir: path into which build log should be copied
"""
tty.debug(f"job spec: {job_spec}")
if not job_spec:
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
tty.error(msg)
return
try:
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import importlib
import os
import re
import sys
@@ -115,8 +114,8 @@ def get_module(cmd_name):
try:
# Try to import the command from the built-in directory
module_name = f"{__name__}.{pname}"
module = importlib.import_module(module_name)
module_name = "%s.%s" % (__name__, pname)
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)

View File

@@ -115,11 +115,15 @@ def audit(parser, args):
def _process_reports(reports):
for check, errors in reports:
if errors:
status = f"{len(errors)} issue{'' if len(errors) == 1 else 's'} found"
print(cl.colorize(f"{check}: @*r{{{status}}}"))
numdigits = len(str(len(errors)))
msg = "{0}: {1} issue{2} found".format(
check, len(errors), "" if len(errors) == 1 else "s"
)
header = "@*b{" + msg + "}"
print(cl.colorize(header))
for idx, error in enumerate(errors):
print(f"{idx + 1:>{numdigits}}. {error}")
print(str(idx + 1) + ". " + str(error))
raise SystemExit(1)
else:
print(cl.colorize(f"{check}: @*g{{passed}}"))
msg = "{0}: 0 issues found.".format(check)
header = "@*b{" + msg + "}"
print(cl.colorize(header))

View File

@@ -37,6 +37,7 @@
from spack import traverse
from spack.cmd import display_specs
from spack.cmd.common import arguments
from spack.oci.image import ImageReference
from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages"
@@ -391,8 +392,13 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
mirror: spack.mirror.Mirror = args.mirror
# Check if this is an OCI image.
try:
target_image = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
target_image = None
push_url = mirror.push_url
@@ -403,11 +409,14 @@ def push_fn(args):
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
if mirror.push_url.startswith("oci://") and not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
if target_image:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
unsigned = True
# Select a signing key, or None if unsigned.
@@ -438,17 +447,49 @@ def push_fn(args):
(s, PackageNotInstalledError("package not installed")) for s in not_installed
)
with bindist.make_uploader(
mirror=mirror,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
base_image=args.base_image,
) as uploader:
skipped, upload_errors = uploader.push(specs=specs)
failed.extend(upload_errors)
if not upload_errors and args.tag:
uploader.tag(args.tag, roots)
with bindist.default_push_context() as (tmpdir, executor):
if target_image:
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
skipped, base_images, checksums, upload_errors = bindist._push_oci(
target_image=target_image,
base_image=base_image,
installed_specs_with_deps=specs,
force=args.force,
tmpdir=tmpdir,
executor=executor,
)
if upload_errors:
failed.extend(upload_errors)
# Apart from creating manifests for each individual spec, we allow users to create a
# separate image tag for all root specs and their runtime dependencies.
elif args.tag:
tagged_image = target_image.with_tag(args.tag)
# _push_oci may not populate base_images if binaries were already in the registry
for spec in roots:
bindist._oci_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
bindist._oci_put_manifest(
base_images, checksums, tagged_image, tmpdir, None, None, *roots
)
tty.info(f"Tagged {tagged_image}")
else:
skipped, upload_errors = bindist._push(
specs,
out_url=push_url,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
tmpdir=tmpdir,
executor=executor,
)
failed.extend(upload_errors)
if skipped:
if len(specs) == 1:
@@ -460,7 +501,7 @@ def push_fn(args):
"The following {} specs were skipped as they already exist in the buildcache:\n"
" {}\n"
" Use --force to overwrite them.".format(
len(skipped), ", ".join(elide_list([_format_spec(s) for s in skipped], 5))
len(skipped), ", ".join(elide_list(skipped, 5))
)
)
@@ -481,6 +522,13 @@ def push_fn(args):
),
)
# Update the OCI index if requested
if target_image and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
bindist._oci_update_index(target_image, tmpdir, executor)
def install_fn(args):
"""install from a binary package"""
@@ -768,7 +816,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist._url_generate_package_index(url, tmpdir)
bindist.generate_package_index(url, tmpdir)
if update_keys:
keys_url = url_util.join(

View File

@@ -7,7 +7,6 @@
import copy
import os
import re
import shlex
import sys
from argparse import ArgumentParser, Namespace
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
@@ -19,7 +18,6 @@
import spack.cmd
import spack.main
import spack.paths
import spack.platforms
from spack.main import section_descriptions
description = "list available spack commands"
@@ -141,7 +139,7 @@ def usage(self, usage: str) -> str:
cmd = self.parser.prog.replace(" ", "-")
if cmd in self.documented:
string = f"{string}\n:ref:`More documentation <cmd-{cmd}>`\n"
string += "\n:ref:`More documentation <cmd-{0}>`\n".format(cmd)
return string
@@ -251,27 +249,33 @@ def body(
Function body.
"""
if positionals:
return f"""
return """
if $list_options
then
{self.optionals(optionals)}
{0}
else
{self.positionals(positionals)}
{1}
fi
"""
""".format(
self.optionals(optionals), self.positionals(positionals)
)
elif subcommands:
return f"""
return """
if $list_options
then
{self.optionals(optionals)}
{0}
else
{self.subcommands(subcommands)}
{1}
fi
"""
""".format(
self.optionals(optionals), self.subcommands(subcommands)
)
else:
return f"""
{self.optionals(optionals)}
"""
return """
{0}
""".format(
self.optionals(optionals)
)
def positionals(self, positionals: Sequence[str]) -> str:
"""Return the syntax for reporting positional arguments.
@@ -300,7 +304,7 @@ def optionals(self, optionals: Sequence[str]) -> str:
Returns:
Syntax for optional flags.
"""
return f'SPACK_COMPREPLY="{" ".join(optionals)}"'
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
def subcommands(self, subcommands: Sequence[str]) -> str:
"""Return the syntax for reporting subcommands.
@@ -311,7 +315,7 @@ def subcommands(self, subcommands: Sequence[str]) -> str:
Returns:
Syntax for subcommand parsers
"""
return f'SPACK_COMPREPLY="{" ".join(subcommands)}"'
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
# Map argument destination names to their complete commands
@@ -391,7 +395,7 @@ def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
subcmd = s[1] if len(s) == 2 else ""
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
if subcmd.startswith(prog_key) and re.match(f"^{pos_key}$", dest):
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
return value
return None
@@ -423,6 +427,24 @@ def format(self, cmd: Command) -> str:
+ self.complete(cmd.prog, positionals, optionals, subcommands)
)
def _quote(self, string: str) -> str:
"""Quote string and escape special characters if necessary.
Args:
string: Input string.
Returns:
Quoted string.
"""
# Goal here is to match fish_indent behavior
# Strings without spaces (or other special characters) do not need to be escaped
if not any([sub in string for sub in [" ", "'", '"']]):
return string
string = string.replace("'", r"\'")
return f"'{string}'"
def optspecs(
self,
prog: str,
@@ -441,7 +463,7 @@ def optspecs(
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
if optionals is None:
return f"set -g {optspec_var}\n"
return "set -g %s\n" % optspec_var
# Build optspec by iterating over options
args = []
@@ -468,11 +490,11 @@ def optspecs(
long = [f[2:] for f in flags if f.startswith("--")]
while len(short) > 0 and len(long) > 0:
arg = f"{short.pop()}/{long.pop()}{required}"
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
while len(short) > 0:
arg = f"{short.pop()}/{required}"
arg = "%s/%s" % (short.pop(), required)
while len(long) > 0:
arg = f"{long.pop()}{required}"
arg = "%s%s" % (long.pop(), required)
args.append(arg)
@@ -481,7 +503,7 @@ def optspecs(
# indicate that such subcommand exists.
args = " ".join(args)
return f"set -g {optspec_var} {args}\n"
return "set -g %s %s\n" % (optspec_var, args)
@staticmethod
def complete_head(
@@ -502,14 +524,12 @@ def complete_head(
subcmd = s[1] if len(s) == 2 else ""
if index is None:
return f"complete -c {s[0]} -n '__fish_spack_using_command {subcmd}'"
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
return (
f"complete -c {s[0]} -n '__fish_spack_using_command_pos_remainder "
f"{index} {subcmd}'"
)
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
else:
return f"complete -c {s[0]} -n '__fish_spack_using_command_pos {index} {subcmd}'"
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
return head % (s[0], index, subcmd)
def complete(
self,
@@ -577,18 +597,25 @@ def positionals(
if choices is not None:
# If there are choices, we provide a completion for all possible values.
commands.append(f"{head} -f -a {shlex.quote(' '.join(choices))}")
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
else:
# Otherwise, we try to find a predefined completion for it
value = _fish_dest_get_complete(prog, args)
if value is not None:
commands.append(f"{head} {value}")
commands.append(head + " " + value)
return "\n".join(commands) + "\n"
def prog_comment(self, prog: str) -> str:
"""Return a comment line for the command."""
return f"\n# {prog}\n"
"""Return a comment line for the command.
Args:
prog: Program name.
Returns:
Comment line.
"""
return "\n# %s\n" % prog
def optionals(
self,
@@ -631,28 +658,28 @@ def optionals(
for f in flags:
if f.startswith("--"):
long = f[2:]
prefix = f"{prefix} -l {long}"
prefix += " -l %s" % long
elif f.startswith("-"):
short = f[1:]
assert len(short) == 1
prefix = f"{prefix} -s {short}"
prefix += " -s %s" % short
# Check if option require argument.
# Currently multi-argument options are not supported, so we treat it like one argument.
if nargs != 0:
prefix = f"{prefix} -r"
prefix += " -r"
if dest is not None:
# If there are choices, we provide a completion for all possible values.
commands.append(f"{prefix} -f -a {shlex.quote(' '.join(dest))}")
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
else:
# Otherwise, we try to find a predefined completion for it
value = _fish_dest_get_complete(prog, dest)
if value is not None:
commands.append(f"{prefix} {value}")
commands.append(prefix + " " + value)
if help:
commands.append(f"{prefix} -d {shlex.quote(help)}")
commands.append(prefix + " -d %s" % self._quote(help))
return "\n".join(commands) + "\n"
@@ -670,11 +697,11 @@ def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, st
head = self.complete_head(prog, 0)
for _, subcommand, help in subcommands:
command = f"{head} -f -a {shlex.quote(subcommand)}"
command = head + " -f -a %s" % self._quote(subcommand)
if help is not None and len(help) > 0:
help = help.split("\n")[0]
command = f"{command} -d {shlex.quote(help)}"
command += " -d %s" % self._quote(help)
commands.append(command)
@@ -720,7 +747,7 @@ def rst_index(out: IO) -> None:
for i, cmd in enumerate(sorted(commands)):
description = description.capitalize() if i == 0 else ""
ref = f":ref:`{cmd} <spack-{cmd}>`"
ref = ":ref:`%s <spack-%s>`" % (cmd, cmd)
comma = "," if i != len(commands) - 1 else ""
bar = "| " if i % 8 == 0 else " "
out.write(line % (description, bar + ref + comma))
@@ -831,10 +858,10 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
# check header first so we don't open out files unnecessarily
if args.header and not os.path.exists(args.header):
tty.die(f"No such file: '{args.header}'")
tty.die("No such file: '%s'" % args.header)
if args.update:
tty.msg(f"Updating file: {args.update}")
tty.msg("Updating file: %s" % args.update)
with open(args.update, "w") as f:
prepend_header(args, f)
formatter(args, f)

View File

@@ -50,7 +50,6 @@ def setup_parser(subparser):
default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
arguments.add_common_arguments(find_parser, ["jobs"])
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
@@ -79,21 +78,25 @@ def setup_parser(subparser):
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
# None signals spack.compiler.find_compilers to use its default logic
paths = args.add_paths or None
new_compilers = spack.compilers.find_compilers(
path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
n = len(new_compilers)
s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4)
config = spack.config.CONFIG
filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
else:
tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:")

View File

@@ -6,7 +6,6 @@
import re
import sys
import urllib.parse
from typing import List
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -15,15 +14,9 @@
import spack.stage
import spack.util.web
from spack.spec import Spec
from spack.url import (
UndetectableNameError,
UndetectableVersionError,
find_versions_of_archive,
parse_name,
parse_version,
)
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
from spack.util.editor import editor
from spack.util.executable import which
from spack.util.executable import ProcessError, which
from spack.util.format import get_version_lines
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
@@ -96,20 +89,14 @@ class BundlePackageTemplate:
url_def = " # There is no URL since there is no code to download."
body_def = " # There is no need for install() since there is no code."
def __init__(self, name: str, versions, languages: List[str]):
def __init__(self, name, versions):
self.name = name
self.class_name = mod_to_class(name)
self.versions = versions
self.languages = languages
def write(self, pkg_path):
"""Writes the new package file."""
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
if all_deps and self.dependencies:
all_deps.append("")
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
@@ -119,7 +106,7 @@ def write(self, pkg_path):
base_class_name=self.base_class_name,
url_def=self.url_def,
versions=self.versions,
dependencies="\n".join(all_deps),
dependencies=self.dependencies,
body_def=self.body_def,
)
)
@@ -138,8 +125,8 @@ def install(self, spec, prefix):
url_line = ' url = "{url}"'
def __init__(self, name, url, versions, languages: List[str]):
super().__init__(name, versions, languages)
def __init__(self, name, url, versions):
super().__init__(name, versions)
self.url_def = self.url_line.format(url=url)
@@ -227,13 +214,13 @@ def luarocks_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
if not name.startswith("lua-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to lua-{0}".format(name))
name = "lua-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class MesonPackageTemplate(PackageTemplate):
@@ -334,14 +321,14 @@ class RacketPackageTemplate(PackageTemplate):
# subdirectory = None
"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
if not name.startswith("rkt-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
name = "rkt-{0}".format(name)
self.body_def = self.body_def.format(name[4:])
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
@@ -374,7 +361,7 @@ def config_settings(self, spec, prefix):
settings = {}
return settings"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
@@ -428,7 +415,7 @@ def __init__(self, name, url, versions, languages: List[str]):
+ self.url_line
)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class RPackageTemplate(PackageTemplate):
@@ -447,7 +434,7 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
@@ -467,7 +454,7 @@ def __init__(self, name, url, versions, languages: List[str]):
if bioc:
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PerlmakePackageTemplate(PackageTemplate):
@@ -487,14 +474,14 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
@@ -519,7 +506,7 @@ class OctavePackageTemplate(PackageTemplate):
# FIXME: Add additional dependencies if required.
# depends_on("octave-foo", type=("build", "run"))"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
@@ -527,7 +514,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to octave-{0}".format(name))
name = "octave-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class RubyPackageTemplate(PackageTemplate):
@@ -547,7 +534,7 @@ def build(self, spec, prefix):
# FIXME: If not needed delete this function
pass"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name ruby-numpy`, don't rename it
# ruby-ruby-numpy
if not name.startswith("ruby-"):
@@ -555,7 +542,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
name = "ruby-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class MakefilePackageTemplate(PackageTemplate):
@@ -593,14 +580,14 @@ def configure_args(self, spec, prefix):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
templates = {
@@ -671,48 +658,8 @@ def setup_parser(subparser):
)
#: C file extensions
C_EXT = {".c"}
#: C++ file extensions
CXX_EXT = {
".C",
".c++",
".cc",
".ccm",
".cpp",
".CPP",
".cxx",
".h++",
".hh",
".hpp",
".hxx",
".inl",
".ipp",
".ixx",
".tcc",
".tpp",
}
#: Fortran file extensions
FORTRAN_EXT = {
".f77",
".F77",
".f90",
".F90",
".f95",
".F95",
".f",
".F",
".for",
".FOR",
".ftn",
".FTN",
}
class BuildSystemAndLanguageGuesser:
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
class BuildSystemGuesser:
"""An instance of BuildSystemGuesser provides a callable object to be used
during ``spack create``. By passing this object to ``spack checksum``, we
can take a peek at the fetched tarball and discern the build system it uses
"""
@@ -720,119 +667,81 @@ class BuildSystemAndLanguageGuesser:
def __init__(self):
"""Sets the default build system."""
self.build_system = "generic"
self._c = False
self._cxx = False
self._fortran = False
# List of files in the archive ordered by their depth in the directory tree.
self._file_entries: List[str] = []
def __call__(self, archive: str, url: str) -> None:
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
if url is not None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
return
if url.endswith(".gem"):
self.build_system = "ruby"
return
if url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
return
if url.endswith(".rock"):
self.build_system = "lua"
return
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
]
# Peek inside the compressed file.
if archive.endswith(".zip") or ".zip#" in archive:
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
try:
unzip = which("unzip")
assert unzip is not None
output = unzip("-lq", archive, output=str)
except Exception:
output = unzip("-lq", stage.archive_file, output=str)
except ProcessError:
output = ""
else:
try:
tar = which("tar")
assert tar is not None
output = tar("tf", archive, output=str)
except Exception:
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
output = ""
self._file_entries[:] = output.splitlines()
lines = output.splitlines()
# Files closest to the root should be considered first when determining build system.
self._file_entries.sort(key=lambda p: p.count("/"))
self._determine_build_system(url)
self._determine_language()
def _determine_build_system(self, url: str) -> None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
elif url.endswith(".gem"):
self.build_system = "ruby"
elif url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
elif url.endswith(".rock"):
self.build_system = "lua"
elif self._file_entries:
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(re.compile(pattern), build_system)
for pattern, build_system in (
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
)
]
# Determine the build system based on the files contained in the archive.
for file in self._file_entries:
for pattern, build_system in clues:
if pattern.search(file):
self.build_system = build_system
return
def _determine_language(self):
for entry in self._file_entries:
_, ext = os.path.splitext(entry)
if not self._c and ext in C_EXT:
self._c = True
elif not self._cxx and ext in CXX_EXT:
self._cxx = True
elif not self._fortran and ext in FORTRAN_EXT:
self._fortran = True
if self._c and self._cxx and self._fortran:
return
@property
def languages(self) -> List[str]:
langs: List[str] = []
if self._c:
langs.append("c")
if self._cxx:
langs.append("cxx")
if self._fortran:
langs.append("fortran")
return langs
# Determine the build system based on the files contained
# in the archive.
for pattern, bs in clues:
if any(re.search(pattern, line) for line in lines):
self.build_system = bs
break
def get_name(name, url):
@@ -902,7 +811,7 @@ def get_url(url):
def get_versions(args, name):
"""Returns a list of versions and hashes for a package.
Also returns a BuildSystemAndLanguageGuesser object.
Also returns a BuildSystemGuesser object.
Returns default values if no URL is provided.
@@ -911,7 +820,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@@ -925,7 +834,7 @@ def get_versions(args, name):
# version("1.2.4")"""
# Default guesser
guesser = BuildSystemAndLanguageGuesser()
guesser = BuildSystemGuesser()
valid_url = True
try:
@@ -938,7 +847,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = find_versions_of_archive(args.url)
url_dict = spack.url.find_versions_of_archive(args.url)
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
if url_dict_filtered is None:
@@ -965,7 +874,7 @@ def get_versions(args, name):
return versions, guesser
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
def get_build_system(template, url, guesser):
"""Determine the build system template.
If a template is specified, always use that. Otherwise, if a URL
@@ -973,10 +882,11 @@ def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGue
build system it uses. Otherwise, use a generic template by default.
Args:
template: ``--template`` argument given to ``spack create``
url: ``url`` argument given to ``spack create``
guesser: The first_stage_function given to ``spack checksum`` which records the build
system it detects
template (str): ``--template`` argument given to ``spack create``
url (str): ``url`` argument given to ``spack create``
args (argparse.Namespace): The arguments given to ``spack create``
guesser (BuildSystemGuesser): The first_stage_function given to
``spack checksum`` which records the build system it detects
Returns:
str: The name of the build system template to use
@@ -1050,7 +960,7 @@ def create(parser, args):
build_system = get_build_system(args.template, url, guesser)
# Create the package template object
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
constr_args = {"name": name, "versions": versions}
package_class = templates[build_system]
if package_class != BundlePackageTemplate:
constr_args["url"] = url

View File

@@ -14,6 +14,7 @@
installation and its deprecator.
"""
import argparse
import os
import llnl.util.tty as tty
from llnl.util.symlink import symlink
@@ -75,7 +76,12 @@ def setup_parser(sp):
)
sp.add_argument(
"-l", "--link-type", type=str, default=None, choices=["soft", "hard"], help="(deprecated)"
"-l",
"--link-type",
type=str,
default="soft",
choices=["soft", "hard"],
help="type of filesystem link to use for deprecation (default soft)",
)
sp.add_argument(
@@ -85,9 +91,6 @@ def setup_parser(sp):
def deprecate(parser, args):
"""Deprecate one spec in favor of another"""
if args.link_type is not None:
tty.warn("The --link-type option is deprecated and will be removed in a future release.")
env = ev.active_environment()
specs = spack.cmd.parse_specs(args.specs)
@@ -141,5 +144,7 @@ def deprecate(parser, args):
if not answer:
tty.die("Will not deprecate any packages.")
link_fn = os.link if args.link_type == "hard" else symlink
for dcate, dcator in zip(all_deprecate, all_deprecators):
dcate.package.do_deprecate(dcator, symlink)
dcate.package.do_deprecate(dcator, link_fn)

View File

@@ -10,10 +10,8 @@
import spack.cmd
import spack.config
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.stage
import spack.util.path
import spack.version
from spack.cmd.common import arguments
@@ -64,7 +62,7 @@ def change_fn(section):
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
def _retrieve_develop_source(spec, abspath):
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
# better if we can create the `source_path` directly into its final
@@ -73,13 +71,13 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
source_stage: spack.stage.Stage = package.stage[0]
source_stage = package.stage[0]
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
source_stage.fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
source_stage.default_fetcher_only = True
source_stage.disable_mirrors()
source_stage.fetcher.set_package(package)
package.stage.steal_source(abspath)

View File

@@ -468,30 +468,32 @@ def env_remove(args):
This removes an environment managed by Spack. Directory environments
and manifests embedded in repositories should be removed manually.
"""
remove_envs = []
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
try:
env = ev.read(env_name)
valid_envs.append(env)
valid_envs.append(env_name)
if env_name in args.rm_env:
remove_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
if name == env_name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
@@ -504,7 +506,7 @@ def env_remove(args):
if not answer:
tty.die("Will not remove any environments")
for env in remove_envs:
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")

View File

@@ -224,7 +224,7 @@ def gpg_publish(args):
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(
spack.binary_distribution.push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
)

View File

@@ -48,7 +48,6 @@ def setup_parser(subparser):
options = [
("--detectable", print_detectable.__doc__),
("--maintainers", print_maintainers.__doc__),
("--namespace", print_namespace.__doc__),
("--no-dependencies", "do not " + print_dependencies.__doc__),
("--no-variants", "do not " + print_variants.__doc__),
("--no-versions", "do not " + print_versions.__doc__),
@@ -190,15 +189,6 @@ def print_maintainers(pkg, args):
color.cprint(section_title("Maintainers: ") + mnt)
def print_namespace(pkg, args):
"""output package namespace"""
repo = spack.repo.PATH.get_repo(pkg.namespace)
color.cprint("")
color.cprint(section_title("Namespace:"))
color.cprint(f" @c{{{repo.namespace}}} at {repo.root}")
def print_phases(pkg, args):
"""output installation phases"""
@@ -512,7 +502,7 @@ def print_licenses(pkg, args):
def info(parser, args):
spec = spack.spec.Spec(args.package)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
pkg = pkg_cls(spec)
# Output core package information
@@ -532,7 +522,6 @@ def info(parser, args):
# Now output optional information in expected order
sections = [
(args.all or args.maintainers, print_maintainers),
(args.all or args.namespace, print_namespace),
(args.all or args.detectable, print_detectable),
(args.all or args.tags, print_tags),
(args.all or not args.no_versions, print_versions),

View File

@@ -23,6 +23,11 @@ def setup_parser(subparser):
output.add_argument(
"-s", "--safe", action="store_true", help="only list safe versions of the package"
)
output.add_argument(
"--safe-only",
action="store_true",
help="[deprecated] only list safe versions of the package",
)
output.add_argument(
"-r", "--remote", action="store_true", help="only list remote versions of the package"
)
@@ -42,13 +47,17 @@ def versions(parser, args):
safe_versions = pkg.versions
if args.safe_only:
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
args.safe = args.safe_only
if not (args.remote or args.new):
if sys.stdout.isatty():
tty.msg("Safe versions (already checksummed):")
if not safe_versions:
if sys.stdout.isatty():
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug("Manually add versions to the package.")
else:
colify(sorted(safe_versions, reverse=True), indent=2)
@@ -74,12 +83,12 @@ def versions(parser, args):
if not remote_versions:
if sys.stdout.isatty():
if not fetched_versions:
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug(
"Check the list_url and list_depth attributes of "
"the package to help Spack find versions."
)
else:
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@@ -29,9 +29,6 @@
__all__ = ["Compiler"]
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
@@ -703,30 +700,6 @@ def compiler_environment(self):
os.environ.clear()
os.environ.update(backup_env)
def to_dict(self):
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
flags_dict.update(
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
)
result = {
"spec": str(self.spec),
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
"flags": flags_dict,
"operating_system": str(self.operating_system),
"target": str(self.target),
"modules": self.modules or [],
"environment": self.environment or {},
"extra_rpaths": self.extra_rpaths or [],
}
if self.enable_implicit_rpaths is not None:
result["implicit_rpaths"] = self.enable_implicit_rpaths
if self.alias:
result["alias"] = self.alias
return result
class CompilerAccessError(spack.error.SpackError):
def __init__(self, compiler, paths):

View File

@@ -6,11 +6,12 @@
"""This module contains functions related to finding compilers on the
system and configuring Spack to use multiple compilers.
"""
import importlib
import collections
import itertools
import multiprocessing.pool
import os
import sys
import warnings
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -21,15 +22,16 @@
import spack.compiler
import spack.config
import spack.error
import spack.operating_systems
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
import spack.version
from spack.operating_systems import windows_os
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
_path_instance_vars = ["cc", "cxx", "f77", "fc"]
_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
_other_instance_vars = [
"modules",
"operating_system",
@@ -61,10 +63,6 @@
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def pkg_spec_for_compiler(cspec):
"""Return the spec of the package that provides the compiler."""
for spec, package in _compiler_to_pkg.items():
@@ -87,7 +85,29 @@ def converter(cspec_like, *args, **kwargs):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
return {"compiler": compiler.to_dict()}
d = {}
d["spec"] = str(compiler.spec)
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
d["flags"].update(
dict(
(attr, getattr(compiler, attr, None))
for attr in _flags_instance_vars
if hasattr(compiler, attr)
)
)
d["operating_system"] = str(compiler.operating_system)
d["target"] = str(compiler.target)
d["modules"] = compiler.modules or []
d["environment"] = compiler.environment or {}
d["extra_rpaths"] = compiler.extra_rpaths or []
if compiler.enable_implicit_rpaths is not None:
d["implicit_rpaths"] = compiler.enable_implicit_rpaths
if compiler.alias:
d["alias"] = compiler.alias
return {"compiler": d}
def get_compiler_config(
@@ -107,7 +127,7 @@ def get_compiler_config(
# Do not init config because there is a non-empty scope
return config
find_compilers(scope=scope)
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
return config
@@ -116,8 +136,125 @@ def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
"""Return the compiler configuration from packages.yaml"""
packages_yaml = configuration.get("packages", scope=scope)
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
config = configuration.get("packages", scope=scope)
if not config:
return []
packages = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in config.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
packages.extend(_compiler_config_from_package_config(externals_config))
return packages
def _compiler_config_from_package_config(config):
compilers = []
for entry in config:
compiler = _compiler_config_from_external(entry)
if compiler:
compilers.append(compiler)
return compilers
def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"])
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
err_header = f"The external spec '{spec}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if extra_attributes_key not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": config.get("modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
@@ -141,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -190,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -239,77 +380,79 @@ def all_compiler_specs(scope=None, init_config=True):
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
mixed_toolchain: bool = False,
max_workers: Optional[int] = None,
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
) -> List["spack.compiler.Compiler"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
"""Return the list of compilers found in the paths given as arguments.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
max_workers: number of processes used to search for compilers
"""
import spack.detection
known_compilers = set(all_compilers(init_config=False))
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
def remove_errors(
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
) -> DetectVersionArgs:
value, _ = item
assert value is not None
return value
return make_compiler_list(
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
mixed_toolchain=mixed_toolchain,
)
valid_compilers = {}
for name, detected in detected_packages.items():
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
if not compilers:
continue
valid_compilers[name] = compilers
def _has_fortran_compilers(x):
if "compilers" not in x.extra_attributes:
return False
def find_new_compilers(
path_hints: Optional[List[str]] = None,
scope: Optional[str] = None,
*,
mixed_toolchain: bool = False,
):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
return "fortran" in x.extra_attributes["compilers"]
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: scope to look for a compiler. If None consider the merged configuration.
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
"""
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
if mixed_toolchain:
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
if gccs:
best_gcc = sorted(
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
)[-1]
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
for name in ("llvm", "apple-clang"):
if name not in valid_compilers:
continue
candidates = valid_compilers[name]
for candidate in candidates:
if _has_fortran_compilers(candidate):
continue
candidate.extra_attributes["compilers"]["fortran"] = gfortran
new_compilers = []
for name, detected in valid_compilers.items():
for config in CompilerConfigFactory.from_specs(detected):
c = _compiler_from_config_entry(config["compiler"])
if c in known_compilers:
continue
new_compilers.append(c)
add_compilers_to_config(new_compilers, scope=scope)
return new_compilers
return select_new_compilers(compilers, scope)
def select_new_compilers(compilers, scope=None):
@@ -319,9 +462,7 @@ def select_new_compilers(compilers, scope=None):
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
same_specs = compilers_for_spec(
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
)
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
if not same_specs:
compilers_not_in_config.append(c)
@@ -369,9 +510,8 @@ def replace_apple_clang(name):
return [replace_apple_clang(name) for name in all_compiler_module_names()]
@llnl.util.lang.memoized
def all_compiler_module_names() -> List[str]:
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
@_auto_compiler_spec
@@ -391,12 +531,7 @@ def find(compiler_spec, scope=None, init_config=True):
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
compiler spec. Return an empty list if nothing found."""
return [
c.spec
for c in compilers_for_spec(
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
)
]
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
def all_compilers(scope=None, init_config=True):
@@ -418,11 +553,14 @@ def all_compilers_from(configuration, scope=None, init_config=True):
@_auto_compiler_spec
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
def compilers_for_spec(
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
for cspec in matches:
@@ -431,7 +569,7 @@ def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
config = all_compilers_config(spack.config.CONFIG, scope=scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -463,15 +601,13 @@ def compiler_from_dict(items):
os = items.get("operating_system", None)
target = items.get("target", None)
if not (
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
):
if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in spack.compiler.PATH_INSTANCE_VARS:
for c in _path_instance_vars:
compiler_path = items["paths"][c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
@@ -599,6 +735,24 @@ def compiler_for_spec(compiler_spec, arch_spec):
return compilers[0]
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.CONFIG
scope_to_compilers = {}
for scope in config.scopes:
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
if compilers:
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = {}
for scope, compilers in scope_to_compilers.items():
config_file = config.get_config_filename(scope, "compilers")
cfg_file_to_duplicates[config_file] = compilers
return cfg_file_to_duplicates
@llnl.util.lang.memoized
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
@@ -612,7 +766,7 @@ def class_for_compiler_name(compiler_name):
submodule_name = compiler_name.replace("-", "_")
module_name = ".".join(["spack", "compilers", submodule_name])
module_obj = importlib.import_module(module_name)
module_obj = __import__(module_name, fromlist=[None])
cls = getattr(module_obj, mod_to_class(compiler_name))
# make a note of the name in the module so we can get to it easily.
@@ -621,10 +775,272 @@ def class_for_compiler_name(compiler_name):
return cls
def all_os_classes():
"""
Return the list of classes for all operating systems available on
this platform
"""
classes = []
platform = spack.platforms.host()
for os_class in platform.operating_sys.values():
classes.append(os_class)
return classes
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
#: Gathers the attribute values by which a detected compiler is considered
#: unique in Spack.
#:
#: - os: the operating system
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
#: - version: the version of the compiler
#:
CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
#: Variations on a matched compiler name
NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
#: Groups together the arguments needed by `detect_version`. The four entries
#: in the tuple are:
#:
#: - id: An instance of the CompilerID named tuple (version can be set to None
#: as it will be detected later)
#: - variation: a NameVariation for file being tested
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
#: - path: full path to the executable being tested
#:
DetectVersionArgs = collections.namedtuple(
"DetectVersionArgs", ["id", "variation", "language", "path"]
)
def arguments_to_detect_version_fn(
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
) -> List[DetectVersionArgs]:
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
The ``operating_system`` instance can customize the behavior of this
function by providing a method called with the same name.
Args:
operating_system: the operating system on which we are looking for compilers
paths: paths to search for compilers
Returns:
List of DetectVersionArgs tuples. Each item in the list will be later
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
command_arguments: List[DetectVersionArgs] = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
compiler_id = CompilerID(operating_system, compiler_name, None)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
language=language,
path=full_path,
)
command_arguments.append(detect_version_args)
return command_arguments
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
return fn(paths)
def detect_version(
detect_version_args: DetectVersionArgs,
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
"""Computes the version of a compiler and adds it to the information
passed as input.
As this function is meant to be executed by worker processes it won't
raise any exception but instead will return a (value, error) tuple that
needs to be checked by the code dispatching the calls.
Args:
detect_version_args: information on the compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
version of the compiler was computed correctly and the first argument
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, f"{language}_version")
try:
version = callback(path)
if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = f"Couldn't get version for compiler {path}".format(path)
except spack.util.executable.ProcessError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except spack.util.executable.ProcessTimeoutError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, str(e)
)
return None, error
operating_system = detect_version_args.id.os
fn = getattr(operating_system, "detect_version", _default)
return fn(detect_version_args)
def make_compiler_list(
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
) -> List["spack.compiler.Compiler"]:
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions: list of DetectVersionArgs containing a valid version
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
Returns:
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
# https://github.com/spack/spack/pull/33385/files#r1040036318
target = archspec.cpu.host()
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
"cc" not in by_compiler_id[variation], # None last
"cxx" not in by_compiler_id[variation], # None last
getattr(variation, "prefix", None),
getattr(variation, "suffix", None),
)
# Flatten to a list of compiler id, primary variation and compiler dictionary
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# Fill any missing parts from subsequent entries (without mixing toolchains)
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
)
if next_lang:
selected[lang] = next_lang
flat_compilers.append((compiler_id, selected_variation, selected))
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
if mixed_toolchain:
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
return compilers
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
"""Add missing compilers across toolchains when they are missing for a particular language.
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
fortran compiler (no flang)."""
# First collect the clangs that are missing a fortran compiler
clangs_without_flang = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name in ("clang", "apple-clang")
and "f77" not in compiler
and "fc" not in compiler
]
if not clangs_without_flang:
return
# Filter on GCCs with fortran compiler
gccs_with_fortran = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
]
# Sort these GCCs by "best variation" (no prefix / suffix first)
gccs_with_fortran.sort(
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
)
# Attach the optimal GCC fortran compiler to the clangs that don't have one
for clang_id, _, clang_compiler in clangs_without_flang:
gcc_compiler = next(
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
)
if not gcc_compiler:
continue
# Update the fc / f77 entries
clang_compiler["f77"] = gcc_compiler["f77"]
clang_compiler["fc"] = gcc_compiler["fc"]
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.
@@ -671,164 +1087,20 @@ def name_matches(name, name_list):
return False
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
class CompilerConfigFactory:
"""Class aggregating all ways of constructing a list of compiler config entries."""
@staticmethod
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
result = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for s in specs:
if s.name not in compiler_package_names:
continue
candidate = CompilerConfigFactory.from_external_spec(s)
if candidate is None:
continue
result.append(candidate)
return result
@staticmethod
def from_packages_yaml(packages_yaml) -> List[dict]:
compiler_specs = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
current_specs = []
for current_external in externals_config:
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
if compiler:
current_specs.append(compiler)
compiler_specs.extend(current_specs)
return CompilerConfigFactory.from_specs(compiler_specs)
@staticmethod
def _spec_from_external_config(config):
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
return result
@staticmethod
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
spec = spack.spec.parse_with_version_concrete(spec)
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
if extra_attributes is None:
return None
paths = CompilerConfigFactory._extract_compiler_paths(spec)
if paths is None:
return None
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": getattr(spec, "external_modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
@staticmethod
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
err_header = f"The external spec '{spec}' cannot be used as a compiler"
extra_attributes = spec.extra_attributes
# If I have 'extra_attributes' warn if 'compilers' is missing,
# or we don't have a C compiler
if _COMPILERS_KEY not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
)
return None
attribute_compilers = extra_attributes[_COMPILERS_KEY]
if _C_KEY not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
)
return None
c_compiler = attribute_compilers[_C_KEY]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if _CXX_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if _FORTRAN_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
return {
"cc": c_compiler,
"cxx": attribute_compilers.get(_CXX_KEY, None),
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
}
@staticmethod
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return operating_system, target
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__(
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
f"Compiler configuration must contain entries for "
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
% _path_instance_vars,
)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super().__init__("Spack could not find any compilers!")
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
@@ -839,3 +1111,25 @@ def __init__(self, compiler_spec, target):
super().__init__(
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
)
class CompilerDuplicateError(spack.error.SpackError):
def __init__(self, compiler_spec, arch_spec):
config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
descriptor = lambda num: "time" if num == 1 else "times"
duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
cfgfile, str(count), descriptor(count)
)
msg = (
"Compiler configuration contains entries with duplicate"
+ " specification ({0}, {1})".format(compiler_spec, arch_spec)
+ " in the following files:\n\t"
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
)
super().__init__(msg)
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)

View File

@@ -223,30 +223,6 @@ def get_oneapi_root(pth: str):
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
@property
def cxx11_flag(self):
return "/std:c++11"
@property
def cxx14_flag(self):
return "/std:c++14"
@property
def cxx17_flag(self):
return "/std:c++17"
@property
def cxx20_flag(self):
return "/std:c++20"
@property
def c11_flag(self):
return "/std:c11"
@property
def c17_flag(self):
return "/std:c17"
@property
def msvc_version(self):
"""This is the VCToolset version *NOT* the actual version of the cl compiler

View File

@@ -8,6 +8,7 @@
from contextlib import contextmanager
from itertools import chain
import spack.abi
import spack.compilers
import spack.config
import spack.environment
@@ -19,23 +20,35 @@
import spack.tengine
import spack.util.path
CHECK_COMPILER_EXISTENCE = True
class Concretizer:
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
#: Controls whether we check that compiler versions actually exist
#: during concretization. Used for testing and for mirror creation
check_for_compiler_existence = None
def __init__(self):
if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not spack.config.get(
"config:install_missing_compilers", False
)
@contextmanager
def disable_compiler_existence_check():
global CHECK_COMPILER_EXISTENCE
CHECK_COMPILER_EXISTENCE, saved = False, CHECK_COMPILER_EXISTENCE
saved = Concretizer.check_for_compiler_existence
Concretizer.check_for_compiler_existence = False
yield
CHECK_COMPILER_EXISTENCE = saved
Concretizer.check_for_compiler_existence = saved
@contextmanager
def enable_compiler_existence_check():
global CHECK_COMPILER_EXISTENCE
CHECK_COMPILER_EXISTENCE, saved = True, CHECK_COMPILER_EXISTENCE
saved = Concretizer.check_for_compiler_existence
Concretizer.check_for_compiler_existence = True
yield
CHECK_COMPILER_EXISTENCE = saved
Concretizer.check_for_compiler_existence = saved
def find_spec(spec, condition, default=None):

View File

@@ -1090,7 +1090,7 @@ def validate(
def read_config_file(
path: str, schema: Optional[YamlConfigDict] = None
filename: str, schema: Optional[YamlConfigDict] = None
) -> Optional[YamlConfigDict]:
"""Read a YAML configuration file.
@@ -1100,9 +1100,21 @@ def read_config_file(
# to preserve flexibility in calling convention (don't need to provide
# schema when it's not necessary) while allowing us to validate against a
# known schema when the top-level key could be incorrect.
if not os.path.exists(filename):
# Ignore nonexistent files.
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
return None
elif not os.path.isfile(filename):
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
elif not os.access(filename, os.R_OK):
raise ConfigFileError(f"Config file is not readable: {filename}")
try:
with open(path) as f:
tty.debug(f"Reading config from file {path}")
tty.debug(f"Reading config from file {filename}")
with open(filename) as f:
data = syaml.load_config(f)
if data:
@@ -1113,20 +1125,15 @@ def read_config_file(
return data
except FileNotFoundError:
# Ignore nonexistent files.
tty.debug(f"Skipping nonexistent config path {path}", level=3)
return None
except OSError as e:
raise ConfigFileError(f"Path is not a file or is not readable: {path}: {str(e)}") from e
except StopIteration as e:
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {path}") from e
except StopIteration:
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {filename}")
except syaml.SpackYAMLError as e:
raise ConfigFileError(str(e)) from e
except OSError as e:
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
def _override(string: str) -> bool:
"""Test if a spack YAML string is an override.

View File

@@ -308,7 +308,8 @@ def __call__(self):
return t.render(**self.to_dict())
import spack.container.writers.docker # noqa: E402
# Import after function definition all the modules in this package,
# so that registration of writers will happen automatically
from . import docker # noqa: F401 E402
from . import singularity # noqa: F401 E402
import spack.container.writers.singularity # noqa: E402

View File

@@ -14,14 +14,12 @@
import llnl.util.tty as tty
import spack.cmd
import spack.compilers
import spack.deptypes as dt
import spack.error
import spack.hash_types as hash_types
import spack.platforms
import spack.repo
import spack.spec
import spack.store
from spack.schema.cray_manifest import schema as manifest_schema
#: Cray systems can store a Spack-compatible description of system
@@ -239,7 +237,7 @@ def read(path, apply_updates):
tty.debug(f"Include this\n{traceback.format_exc()}")
if apply_updates:
for spec in specs.values():
spack.store.STORE.db.add(spec)
spack.store.STORE.db.add(spec, directory_layout=None)
class ManifestValidationError(spack.error.SpackError):

View File

@@ -59,11 +59,7 @@
import spack.util.lock as lk
import spack.util.spack_json as sjson
import spack.version as vn
from spack.directory_layout import (
DirectoryLayout,
DirectoryLayoutError,
InconsistentInstallDirectoryError,
)
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
from spack.error import SpackError
from spack.util.crypto import bit_length
@@ -207,12 +203,12 @@ class InstallRecord:
def __init__(
self,
spec: "spack.spec.Spec",
path: Optional[str],
path: str,
installed: bool,
ref_count: int = 0,
explicit: bool = False,
installation_time: Optional[float] = None,
deprecated_for: Optional[str] = None,
deprecated_for: Optional["spack.spec.Spec"] = None,
in_buildcache: bool = False,
origin=None,
):
@@ -599,11 +595,9 @@ class Database:
def __init__(
self,
root: str,
*,
upstream_dbs: Optional[List["Database"]] = None,
is_upstream: bool = False,
lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
layout: Optional[DirectoryLayout] = None,
) -> None:
"""Database for Spack installations.
@@ -626,7 +620,6 @@ def __init__(
"""
self.root = root
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
self.layout = layout
# Set up layout of database files within the db dir
self._index_path = os.path.join(self.database_directory, "index.json")
@@ -671,6 +664,14 @@ def __init__(
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
# whether there was an error at the start of a read transaction
self._error = None
# For testing: if this is true, an exception is thrown when missing
# dependencies are detected (rather than just printing a warning
# message)
self._fail_when_missing_deps = False
self._write_transaction_impl = lk.WriteTransaction
self._read_transaction_impl = lk.ReadTransaction
@@ -773,13 +774,7 @@ def query_local_by_spec_hash(self, hash_key):
with self.read_transaction():
return self._data.get(hash_key, None)
def _assign_dependencies(
self,
spec_reader: Type["spack.spec.SpecfileReaderBase"],
hash_key: str,
installs: dict,
data: Dict[str, InstallRecord],
):
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
spec = data[hash_key].spec
@@ -792,20 +787,26 @@ def _assign_dependencies(
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
yaml_deps
):
# It is important that we always check upstream installations in the same order,
# and that we always check the local installation first: if a downstream Spack
# installs a package then dependents in that installation could be using it. If a
# hash is installed locally and upstream, there isn't enough information to
# determine which one a local package depends on, so the convention ensures that
# this isn't an issue.
_, record = self.query_by_spec_hash(dhash, data=data)
# It is important that we always check upstream installations
# in the same order, and that we always check the local
# installation first: if a downstream Spack installs a package
# then dependents in that installation could be using it.
# If a hash is installed locally and upstream, there isn't
# enough information to determine which one a local package
# depends on, so the convention ensures that this isn't an
# issue.
upstream, record = self.query_by_spec_hash(dhash, data=data)
child = record.spec if record else None
if not child:
tty.warn(
f"Missing dependency not in database: "
f"{spec.cformat('{name}{/hash:7}')} needs {dname}-{dhash[:7]}"
msg = "Missing dependency not in database: " "%s needs %s-%s" % (
spec.cformat("{name}{/hash:7}"),
dname,
dhash[:7],
)
if self._fail_when_missing_deps:
raise MissingDependenciesError(msg)
tty.warn(msg)
continue
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
@@ -845,7 +846,7 @@ def check(cond, msg):
):
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
self.reindex()
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
)
@@ -872,8 +873,8 @@ def invalid_record(hash_key, error):
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
# Pass 1: Iterate through database and build specs w/o dependencies
data: Dict[str, InstallRecord] = {}
installed_prefixes: Set[str] = set()
data = {}
installed_prefixes = set()
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
@@ -910,7 +911,7 @@ def invalid_record(hash_key, error):
self._data = data
self._installed_prefixes = installed_prefixes
def reindex(self):
def reindex(self, directory_layout):
"""Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already.
@@ -925,116 +926,105 @@ def _read_suppress_error():
if os.path.isfile(self._index_path):
self._read_from_file(self._index_path)
except CorruptDatabaseError as e:
tty.warn(f"Reindexing corrupt database, error was: {e}")
self._error = e
self._data = {}
self._installed_prefixes = set()
with lk.WriteTransaction(self.lock, acquire=_read_suppress_error, release=self._write):
old_installed_prefixes, self._installed_prefixes = self._installed_prefixes, set()
old_data, self._data = self._data, {}
transaction = lk.WriteTransaction(
self.lock, acquire=_read_suppress_error, release=self._write
)
with transaction:
if self._error:
tty.warn("Spack database was corrupt. Will rebuild. Error was:", str(self._error))
self._error = None
old_data = self._data
old_installed_prefixes = self._installed_prefixes
try:
self._reindex(old_data)
self._construct_from_directory_layout(directory_layout, old_data)
except BaseException:
# If anything explodes, restore old data, skip write.
self._data = old_data
self._installed_prefixes = old_installed_prefixes
raise
def _reindex(self, old_data: Dict[str, InstallRecord]):
# Specs on the file system are the source of truth for record.spec. The old database values
# if available are the source of truth for the rest of the record.
assert self.layout, "Database layout must be set to reindex"
def _construct_entry_from_directory_layout(
self, directory_layout, old_data, spec, deprecator=None
):
# Try to recover explicit value from old DB, but
# default it to True if DB was corrupt. This is
# just to be conservative in case a command like
# "autoremove" is run by the user after a reindex.
tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
explicit = True
inst_time = os.stat(spec.prefix).st_ctime
if old_data is not None:
old_info = old_data.get(spec.dag_hash())
if old_info is not None:
explicit = old_info.explicit
inst_time = old_info.installation_time
specs_from_fs = self.layout.all_specs()
deprecated_for = self.layout.deprecated_for(specs_from_fs)
extra_args = {"explicit": explicit, "installation_time": inst_time}
self._add(spec, directory_layout, **extra_args)
if deprecator:
self._deprecate(spec, deprecator)
known_specs: List[spack.spec.Spec] = [
*specs_from_fs,
*(deprecated for _, deprecated in deprecated_for),
*(rec.spec for rec in old_data.values()),
]
def _construct_from_directory_layout(self, directory_layout, old_data):
# Read first the `spec.yaml` files in the prefixes. They should be
# considered authoritative with respect to DB reindexing, as
# entries in the DB may be corrupted in a way that still makes
# them readable. If we considered DB entries authoritative
# instead, we would perpetuate errors over a reindex.
with directory_layout.disable_upstream_check():
# Initialize data in the reconstructed DB
self._data = {}
self._installed_prefixes = set()
upstream_hashes = {
dag_hash for upstream in self.upstream_dbs for dag_hash in upstream._data
}
upstream_hashes.difference_update(spec.dag_hash() for spec in known_specs)
# Start inspecting the installed prefixes
processed_specs = set()
def create_node(edge: spack.spec.DependencySpec, is_upstream: bool):
if is_upstream:
return
for spec in directory_layout.all_specs():
self._construct_entry_from_directory_layout(directory_layout, old_data, spec)
processed_specs.add(spec)
self._data[edge.spec.dag_hash()] = InstallRecord(
spec=edge.spec.copy(deps=False),
path=edge.spec.external_path if edge.spec.external else None,
installed=edge.spec.external,
)
# Store all nodes of known specs, excluding ones found in upstreams
tr.traverse_breadth_first_with_visitor(
known_specs,
tr.CoverNodesVisitor(
NoUpstreamVisitor(upstream_hashes, create_node), key=tr.by_dag_hash
),
)
# Store the prefix and other information for specs were found on the file system
for s in specs_from_fs:
record = self._data[s.dag_hash()]
record.path = s.prefix
record.installed = True
record.explicit = True # conservative assumption
record.installation_time = os.stat(s.prefix).st_ctime
# Deprecate specs
for new, old in deprecated_for:
self._data[old.dag_hash()].deprecated_for = new.dag_hash()
# Copy data we have from the old database
for old_record in old_data.values():
record = self._data[old_record.spec.dag_hash()]
record.explicit = old_record.explicit
record.installation_time = old_record.installation_time
record.origin = old_record.origin
record.deprecated_for = old_record.deprecated_for
# Warn when the spec has been removed from the file system (i.e. it was not detected)
if not record.installed and old_record.installed:
tty.warn(
f"Spec {old_record.spec.short_spec} was marked installed in the database "
"but was not found on the file system. It is now marked as missing."
for spec, deprecator in directory_layout.all_deprecated_specs():
self._construct_entry_from_directory_layout(
directory_layout, old_data, spec, deprecator
)
processed_specs.add(spec)
def create_edge(edge: spack.spec.DependencySpec, is_upstream: bool):
if not edge.parent:
return
parent_record = self._data[edge.parent.dag_hash()]
if is_upstream:
upstream, child_record = self.query_by_spec_hash(edge.spec.dag_hash())
assert upstream and child_record, "Internal error: upstream spec not found"
else:
child_record = self._data[edge.spec.dag_hash()]
parent_record.spec._add_dependency(
child_record.spec, depflag=edge.depflag, virtuals=edge.virtuals
)
for key, entry in old_data.items():
# We already took care of this spec using
# `spec.yaml` from its prefix.
if entry.spec in processed_specs:
msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
msg += " [already reconstructed from spec.yaml]"
tty.debug(msg.format(entry.spec))
continue
# Then store edges
tr.traverse_breadth_first_with_visitor(
known_specs,
tr.CoverEdgesVisitor(
NoUpstreamVisitor(upstream_hashes, create_edge), key=tr.by_dag_hash
),
)
# If we arrived here it very likely means that
# we have external specs that are not dependencies
# of other specs. This may be the case for externally
# installed compilers or externally installed
# applications.
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
try:
layout = None if entry.spec.external else directory_layout
kwargs = {
"spec": entry.spec,
"directory_layout": layout,
"explicit": entry.explicit,
"installation_time": entry.installation_time,
}
self._add(**kwargs)
processed_specs.add(entry.spec)
except Exception as e:
# Something went wrong, so the spec was not restored
# from old data
tty.debug(e)
# Finally update the ref counts
for record in self._data.values():
for dep in record.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
dep_record = self._data.get(dep.dag_hash())
if dep_record: # dep might be upstream
dep_record.ref_count += 1
if record.deprecated_for:
self._data[record.deprecated_for].ref_count += 1
self._check_ref_counts()
self._check_ref_counts()
def _check_ref_counts(self):
"""Ensure consistency of reference counts in the DB.
@@ -1043,7 +1033,7 @@ def _check_ref_counts(self):
Does no locking.
"""
counts: Dict[str, int] = {}
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
@@ -1127,23 +1117,29 @@ def _read(self):
def _add(
self,
spec: "spack.spec.Spec",
explicit: bool = False,
installation_time: Optional[float] = None,
allow_missing: bool = False,
spec,
directory_layout=None,
explicit=False,
installation_time=None,
allow_missing=False,
):
"""Add an install record for this spec to the database.
Also ensures dependencies are present and updated in the DB as either installed or missing.
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
Also ensures dependencies are present and updated in the DB as
either installed or missing.
Args:
spec: spec to be added
spec (spack.spec.Spec): spec to be added
directory_layout: layout of the spec installation
explicit:
Possible values: True, False, any
A spec that was installed following a specific user request is marked as explicit.
If instead it was pulled-in as a dependency of a user requested spec it's
considered implicit.
A spec that was installed following a specific user
request is marked as explicit. If instead it was
pulled-in as a dependency of a user requested spec
it's considered implicit.
installation_time:
Date and time of installation
@@ -1154,42 +1150,48 @@ def _add(
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
key = spec.dag_hash()
spec_pkg_hash = spec._package_hash # type: ignore[attr-defined]
spec_pkg_hash = spec._package_hash
upstream, record = self.query_by_spec_hash(key)
if upstream:
return
# Retrieve optional arguments
installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
# warnings when a spec is installed, and its build dep
# is missing a build dep; there's no need to install the
# build dep's build dep first, and there's no need to warn
# about it missing.
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
self._add(
edge.spec,
directory_layout,
explicit=False,
installation_time=installation_time,
# allow missing build-only deps. This prevents excessive warnings when a spec is
# installed, and its build dep is missing a build dep; there's no need to install
# the build dep's build dep first, and there's no need to warn about it missing.
allow_missing=allow_missing or edge.depflag == dt.BUILD,
allow_missing=dep_allow_missing,
)
# Make sure the directory layout agrees whether the spec is installed
if not spec.external and self.layout:
path = self.layout.path_for_spec(spec)
if not spec.external and directory_layout:
path = directory_layout.path_for_spec(spec)
installed = False
try:
self.layout.ensure_installed(spec)
directory_layout.ensure_installed(spec)
installed = True
self._installed_prefixes.add(path)
except DirectoryLayoutError as e:
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
action = "updated" if key in self._data else "registered"
tty.warn(
f"{spec.short_spec} is being {action} in the database with prefix {path}, "
msg = (
"{0} is being {1} in the database with prefix {2}, "
"but this directory does not contain an installation of "
f"the spec, due to: {e}"
"the spec, due to: {3}"
)
action = "updated" if key in self._data else "registered"
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
elif spec.external_path:
path = spec.external_path
installed = True
@@ -1200,27 +1202,23 @@ def _add(
if key not in self._data:
# Create a new install record with no deps initially.
new_spec = spec.copy(deps=False)
self._data[key] = InstallRecord(
new_spec,
path=path,
installed=installed,
ref_count=0,
explicit=explicit,
installation_time=installation_time,
origin=None if not hasattr(spec, "origin") else spec.origin,
)
extra_args = {"explicit": explicit, "installation_time": installation_time}
# Commands other than 'spack install' may add specs to the DB,
# we can record the source of an installed Spec with 'origin'
if hasattr(spec, "origin"):
extra_args["origin"] = spec.origin
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
assert record, f"Missing dependency {dep.spec.short_spec} in DB"
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
if not upstream:
record.ref_count += 1
# Mark concrete once everything is built, and preserve the original hashes of concrete
# specs.
# Mark concrete once everything is built, and preserve
# the original hashes of concrete specs.
new_spec._mark_concrete()
new_spec._hash = key
new_spec._package_hash = spec_pkg_hash
@@ -1233,7 +1231,7 @@ def _add(
self._data[key].explicit = explicit
@_autospec
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
def add(self, spec, directory_layout, explicit=False):
"""Add spec at path to database, locking and reading DB to sync.
``add()`` will lock and read from the DB on disk.
@@ -1242,9 +1240,9 @@ def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
# TODO: ensure that spec is concrete?
# Entire add is transactional.
with self.write_transaction():
self._add(spec, explicit=explicit)
self._add(spec, directory_layout, explicit=explicit)
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
def _get_matching_spec_key(self, spec, **kwargs):
"""Get the exact spec OR get a single spec that matches."""
key = spec.dag_hash()
upstream, record = self.query_by_spec_hash(key)
@@ -1256,12 +1254,12 @@ def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
return key
@_autospec
def get_record(self, spec: "spack.spec.Spec", **kwargs) -> Optional[InstallRecord]:
def get_record(self, spec, **kwargs):
key = self._get_matching_spec_key(spec, **kwargs)
upstream, record = self.query_by_spec_hash(key)
return record
def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
def _decrement_ref_count(self, spec):
key = spec.dag_hash()
if key not in self._data:
@@ -1278,7 +1276,7 @@ def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
self._decrement_ref_count(dep)
def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
def _increment_ref_count(self, spec):
key = spec.dag_hash()
if key not in self._data:
@@ -1287,14 +1285,14 @@ def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
rec = self._data[key]
rec.ref_count += 1
def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
def _remove(self, spec):
"""Non-locking version of remove(); does real work."""
key = self._get_matching_spec_key(spec)
rec = self._data[key]
# This install prefix is now free for other specs to use, even if the
# spec is only marked uninstalled.
if not rec.spec.external and rec.installed and rec.path:
if not rec.spec.external and rec.installed:
self._installed_prefixes.remove(rec.path)
if rec.ref_count > 0:
@@ -1318,7 +1316,7 @@ def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
return rec.spec
@_autospec
def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
def remove(self, spec):
"""Removes a spec from the database. To be called on uninstall.
Reads the database, then:
@@ -1333,7 +1331,7 @@ def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
with self.write_transaction():
return self._remove(spec)
def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
def deprecator(self, spec):
"""Return the spec that the given spec is deprecated for, or None"""
with self.read_transaction():
spec_key = self._get_matching_spec_key(spec)
@@ -1344,14 +1342,14 @@ def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
else:
return None
def specs_deprecated_by(self, spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
def specs_deprecated_by(self, spec):
"""Return all specs deprecated in favor of the given spec"""
with self.read_transaction():
return [
rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash()
]
def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
def _deprecate(self, spec, deprecator):
spec_key = self._get_matching_spec_key(spec)
spec_rec = self._data[spec_key]
@@ -1369,17 +1367,17 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
self._data[spec_key] = spec_rec
@_autospec
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
def mark(self, spec, key, value):
"""Mark an arbitrary record on a spec."""
with self.write_transaction():
return self._mark(spec, key, value)
def _mark(self, spec: "spack.spec.Spec", key, value) -> None:
def _mark(self, spec, key, value):
record = self._data[self._get_matching_spec_key(spec)]
setattr(record, key, value)
@_autospec
def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
def deprecate(self, spec, deprecator):
"""Marks a spec as deprecated in favor of its deprecator"""
with self.write_transaction():
return self._deprecate(spec, deprecator)
@@ -1387,16 +1385,16 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
@_autospec
def installed_relatives(
self,
spec: "spack.spec.Spec",
direction: str = "children",
transitive: bool = True,
spec,
direction="children",
transitive=True,
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
) -> Set["spack.spec.Spec"]:
):
"""Return installed specs related to this one."""
if direction not in ("parents", "children"):
raise ValueError("Invalid direction: %s" % direction)
relatives: Set[spack.spec.Spec] = set()
relatives = set()
for spec in self.query(spec):
if transitive:
to_add = spec.traverse(direction=direction, root=False, deptype=deptype)
@@ -1407,13 +1405,17 @@ def installed_relatives(
for relative in to_add:
hash_key = relative.dag_hash()
_, record = self.query_by_spec_hash(hash_key)
upstream, record = self.query_by_spec_hash(hash_key)
if not record:
tty.warn(
f"Inconsistent state: "
f"{'dependent' if direction == 'parents' else 'dependency'} {hash_key} of "
f"{spec.dag_hash()} not in DB"
reltype = "Dependent" if direction == "parents" else "Dependency"
msg = "Inconsistent state! %s %s of %s not in DB" % (
reltype,
hash_key,
spec.dag_hash(),
)
if self._fail_when_missing_deps:
raise MissingDependenciesError(msg)
tty.warn(msg)
continue
if not record.installed:
@@ -1423,7 +1425,7 @@ def installed_relatives(
return relatives
@_autospec
def installed_extensions_for(self, extendee_spec: "spack.spec.Spec"):
def installed_extensions_for(self, extendee_spec):
"""Returns the specs of all packages that extend the given spec"""
for spec in self.query():
if spec.package.extends(extendee_spec):
@@ -1682,7 +1684,7 @@ def unused_specs(
self,
root_hashes: Optional[Container[str]] = None,
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.LINK | dt.RUN,
) -> List["spack.spec.Spec"]:
) -> "List[spack.spec.Spec]":
"""Return all specs that are currently installed but not needed by root specs.
By default, roots are all explicit specs in the database. If a set of root
@@ -1726,33 +1728,6 @@ def update_explicit(self, spec, explicit):
rec.explicit = explicit
class NoUpstreamVisitor:
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
def __init__(
self,
upstream_hashes: Set[str],
on_visit: Callable[["spack.spec.DependencySpec", bool], None],
):
self.upstream_hashes = upstream_hashes
self.on_visit = on_visit
def accept(self, item: tr.EdgeAndDepth) -> bool:
self.on_visit(item.edge, self.is_upstream(item))
return True
def is_upstream(self, item: tr.EdgeAndDepth) -> bool:
return item.edge.spec.dag_hash() in self.upstream_hashes
def neighbors(self, item: tr.EdgeAndDepth):
# Prune edges from upstream nodes, only follow database tracked dependencies
return (
[]
if self.is_upstream(item)
else item.edge.spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES)
)
class UpstreamDatabaseLockingError(SpackError):
"""Raised when an operation would need to lock an upstream database"""

View File

@@ -2,11 +2,17 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from .common import executable_prefix, set_virtuals_nonbuildable, update_configuration
from .common import (
DetectedPackage,
executable_prefix,
set_virtuals_nonbuildable,
update_configuration,
)
from .path import by_path, executables_in_path
from .test import detection_tests
__all__ = [
"DetectedPackage",
"by_path",
"executables_in_path",
"executable_prefix",

View File

@@ -6,9 +6,9 @@
function to update packages.yaml given a list of detected packages.
Ideally, each detection method should be placed in a specific subpackage
and implement at least a function that returns a list of specs.
The update in packages.yaml can then be done using the function provided here.
and implement at least a function that returns a list of DetectedPackage
objects. The update in packages.yaml can then be done using the function
provided here.
The module also contains other functions that might be useful across different
detection mechanisms.
@@ -17,10 +17,9 @@
import itertools
import os
import os.path
import pathlib
import re
import sys
from typing import Dict, List, Optional, Set, Tuple, Union
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.tty
@@ -31,6 +30,25 @@
import spack.util.windows_registry
class DetectedPackage(NamedTuple):
"""Information on a package that has been detected."""
#: Spec that was detected
spec: spack.spec.Spec
#: Prefix of the spec
prefix: str
def __reduce__(self):
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
@staticmethod
def restore(
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
) -> "DetectedPackage":
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
return DetectedPackage(spec=spec, prefix=prefix)
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
"""Returns all the specs mentioned as externals in packages.yaml"""
packages_yaml = spack.config.get("packages")
@@ -45,7 +63,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
def _pkg_config_dict(
external_pkg_entries: List["spack.spec.Spec"],
external_pkg_entries: List[DetectedPackage],
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
"""Generate a package specific config dict according to the packages.yaml schema.
@@ -65,19 +83,22 @@ def _pkg_config_dict(
pkg_dict = spack.util.spack_yaml.syaml_dict()
pkg_dict["externals"] = []
for e in external_pkg_entries:
if not _spec_is_valid(e):
if not _spec_is_valid(e.spec):
continue
external_items: List[Tuple[str, ExternalEntryType]] = [
("spec", str(e)),
("prefix", pathlib.Path(e.external_path).as_posix()),
("spec", str(e.spec)),
("prefix", e.prefix),
]
if e.external_modules:
external_items.append(("modules", e.external_modules))
if e.spec.external_modules:
external_items.append(("modules", e.spec.external_modules))
if e.extra_attributes:
if e.spec.extra_attributes:
external_items.append(
("extra_attributes", spack.util.spack_yaml.syaml_dict(e.extra_attributes.items()))
(
"extra_attributes",
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()),
)
)
# external_items.extend(e.spec.extra_attributes.items())
@@ -198,32 +219,33 @@ def library_prefix(library_dir: str) -> str:
def update_configuration(
detected_packages: Dict[str, List["spack.spec.Spec"]],
detected_packages: Dict[str, List[DetectedPackage]],
scope: Optional[str] = None,
buildable: bool = True,
) -> List[spack.spec.Spec]:
"""Add the packages passed as arguments to packages.yaml
Args:
detected_packages: list of specs to be added
detected_packages: list of DetectedPackage objects to be added
scope: configuration scope where to add the detected packages
buildable: whether the detected packages are buildable or not
"""
predefined_external_specs = _externals_in_packages_yaml()
pkg_to_cfg, all_new_specs = {}, []
for package_name, entries in detected_packages.items():
new_entries = [s for s in entries if s not in predefined_external_specs]
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
pkg_config = _pkg_config_dict(new_entries)
external_entries = pkg_config.get("externals", [])
assert not isinstance(external_entries, bool), "unexpected value for external entry"
all_new_specs.extend(new_entries)
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
if buildable is False:
pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config
pkgs_cfg = spack.config.get("packages", scope=scope)
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set("packages", pkgs_cfg, scope=scope)

View File

@@ -24,6 +24,7 @@
import spack.util.ld_so_conf
from .common import (
DetectedPackage,
WindowsCompilerExternalPaths,
WindowsKitExternalPaths,
_convert_to_iterable,
@@ -61,7 +62,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
def file_identifier(path):
s = os.stat(path)
return s.st_dev, s.st_ino
return (s.st_dev, s.st_ino)
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
@@ -79,8 +80,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
constructed based on the PATH environment variable.
"""
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
# Make use we don't doubly list /usr/lib and /lib etc
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
return path_to_dict(search_paths)
@@ -228,7 +227,7 @@ def prefix_from_path(self, *, path: str) -> str:
def detect_specs(
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
) -> List["spack.spec.Spec"]:
) -> List[DetectedPackage]:
"""Given a list of files matching the search patterns, returns a list of detected specs.
Args:
@@ -294,16 +293,16 @@ def detect_specs(
warnings.warn(msg)
continue
if not spec.external_path:
spec.external_path = prefix
if spec.external_path:
prefix = spec.external_path
result.append(spec)
result.append(DetectedPackage(spec=spec, prefix=prefix))
return result
def find(
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
) -> List["spack.spec.Spec"]:
) -> List[DetectedPackage]:
"""For a given package, returns a list of detected specs.
Args:
@@ -387,7 +386,7 @@ def by_path(
*,
path_hints: Optional[List[str]] = None,
max_workers: Optional[int] = None,
) -> Dict[str, List["spack.spec.Spec"]]:
) -> Dict[str, List[DetectedPackage]]:
"""Return the list of packages that have been detected on the system, keyed by
unqualified package name.

View File

@@ -68,7 +68,7 @@ def execute(self) -> List[spack.spec.Spec]:
with self._mock_layout() as path_hints:
entries = by_path([self.test.pkg_name], path_hints=path_hints)
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
specs = set(entries[unqualified_name])
specs = set(x.spec for x in entries[unqualified_name])
return list(specs)
@contextlib.contextmanager
@@ -104,9 +104,7 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
@property
def expected_specs(self) -> List[spack.spec.Spec]:
return [
spack.spec.Spec.from_detection(
item.spec, external_path=self.tmpdir.name, extra_attributes=item.extra_attributes
)
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
for item in self.test.results
]

View File

@@ -32,9 +32,10 @@ class OpenMpi(Package):
"""
import collections
import collections.abc
import functools
import os.path
import re
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
import llnl.util.lang
import llnl.util.tty.color
@@ -47,7 +48,6 @@ class OpenMpi(Package):
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency
from spack.directives_meta import DirectiveError, DirectiveMeta
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
@@ -80,6 +80,22 @@ class OpenMpi(Package):
"redistribute",
]
#: These are variant names used by Spack internally; packages can't use them
reserved_names = [
"arch",
"architecture",
"dev_path",
"namespace",
"operating_system",
"os",
"patches",
"platform",
"target",
]
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
_patch_order_index = 0
@@ -139,6 +155,219 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
return spack.spec.Spec(value)
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
directive = DirectiveMeta.directive
@@ -617,7 +846,7 @@ def format_error(msg, pkg):
msg += " @*r{{[{0}, variant '{1}']}}"
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
if name in spack.variant.reserved_names:
if name in reserved_names:
def _raise_reserved_name(pkg):
msg = "The name '%s' is reserved by Spack" % name
@@ -881,6 +1110,10 @@ def _execute_languages(pkg: "spack.package_base.PackageBase"):
return _execute_languages
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
class DependencyError(DirectiveError):
"""This is raised when a dependency specification is invalid."""

View File

@@ -1,234 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import functools
from typing import List, Set
import llnl.util.lang
import spack.error
import spack.spec
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -4,14 +4,17 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import glob
import os
import posixpath
import re
import shutil
import sys
from contextlib import contextmanager
from pathlib import Path
from typing import List, Optional, Tuple
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.config
@@ -20,8 +23,13 @@
import spack.util.spack_json as sjson
from spack.error import SpackError
# Note: Posixpath is used here as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
default_projections = {
"all": "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
"all": posixpath.join(
"{architecture}", "{compiler.name}-{compiler.version}", "{name}-{version}-{hash}"
)
}
@@ -31,42 +39,6 @@ def _check_concrete(spec):
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
def _get_spec(prefix: str) -> Optional["spack.spec.Spec"]:
"""Returns a spec if the prefix contains a spec file in the .spack subdir"""
for f in ("spec.json", "spec.yaml"):
try:
return spack.spec.Spec.from_specfile(os.path.join(prefix, ".spack", f))
except Exception:
continue
return None
def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
stack = [root]
specs = []
while stack:
prefix = stack.pop()
spec = _get_spec(prefix)
if spec:
spec.prefix = prefix
specs.append(spec)
continue
try:
scandir = os.scandir(prefix)
except OSError:
continue
with scandir as entries:
for entry in entries:
if entry.is_dir(follow_symlinks=False):
stack.append(entry.path)
return specs
class DirectoryLayout:
"""A directory layout is used to associate unique paths with specs.
Different installations are going to want different layouts for their
@@ -180,9 +152,20 @@ def read_spec(self, path):
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
# Attempts to convert to JSON if possible.
# Otherwise just returns the YAML.
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
return yaml_path if os.path.exists(yaml_path) else json_path
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
self.write_spec(spec, json_path)
try:
os.remove(yaml_path)
except OSError as err:
tty.debug("Could not remove deprecated {0}".format(yaml_path))
tty.debug(err)
elif os.path.exists(yaml_path):
return yaml_path
return json_path
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
"""Gets full path to spec file for deprecated spec
@@ -216,7 +199,23 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
deprecated_spec.dag_hash() + "_" + self.spec_file_name,
)
return yaml_path if os.path.exists(yaml_path) else json_path
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
self.write_spec(deprecated_spec, json_path)
try:
os.remove(yaml_path)
except (IOError, OSError) as err:
tty.debug("Could not remove deprecated {0}".format(yaml_path))
tty.debug(err)
elif os.path.exists(yaml_path):
return yaml_path
return json_path
@contextmanager
def disable_upstream_check(self):
self.check_upstream = False
yield
self.check_upstream = True
def metadata_path(self, spec):
return os.path.join(spec.prefix, self.metadata_dir)
@@ -272,6 +271,53 @@ def ensure_installed(self, spec):
"Spec file in %s does not match hash!" % spec_file_path
)
def all_specs(self):
if not os.path.isdir(self.root):
return []
specs = []
for _, path_scheme in self.projections.items():
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
# NOTE: Does not validate filename extension; should happen later
path_elems += [self.metadata_dir, "spec.json"]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
if not spec_files: # we're probably looking at legacy yaml...
path_elems += [self.metadata_dir, "spec.yaml"]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
specs.extend([self.read_spec(s) for s in spec_files])
return specs
def all_deprecated_specs(self):
if not os.path.isdir(self.root):
return []
deprecated_specs = set()
for _, path_scheme in self.projections.items():
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
# NOTE: Does not validate filename extension; should happen later
path_elems += [
self.metadata_dir,
self.deprecated_dir,
"*_spec.*",
] # + self.spec_file_name]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
get_depr_spec_file = lambda x: os.path.join(
os.path.dirname(os.path.dirname(x)), self.spec_file_name
)
deprecated_specs |= set(
(self.read_spec(s), self.read_spec(get_depr_spec_file(s))) for s in spec_files
)
return deprecated_specs
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
def path_for_spec(self, spec):
"""Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@@ -337,35 +383,6 @@ def remove_install_directory(self, spec, deprecated=False):
raise e
path = os.path.dirname(path)
def all_specs(self) -> List["spack.spec.Spec"]:
"""Returns a list of all specs detected in self.root, detected by `.spack` directories.
Their prefix is set to the directory containing the `.spack` directory. Note that these
specs may follow a different layout than the current layout if it was changed after
installation."""
return specs_from_metadata_dirs(self.root)
def deprecated_for(
self, specs: List["spack.spec.Spec"]
) -> List[Tuple["spack.spec.Spec", "spack.spec.Spec"]]:
"""Returns a list of tuples of specs (new, old) where new is deprecated for old"""
spec_with_deprecated = []
for spec in specs:
try:
deprecated = os.scandir(
os.path.join(str(spec.prefix), self.metadata_dir, self.deprecated_dir)
)
except OSError:
continue
with deprecated as entries:
for entry in entries:
try:
deprecated_spec = spack.spec.Spec.from_specfile(entry.path)
spec_with_deprecated.append((spec, deprecated_spec))
except Exception:
continue
return spec_with_deprecated
class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors."""

View File

@@ -58,8 +58,9 @@
from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec
from spack.spec_list import SpecList
from spack.spec_list import InvalidSpecConstraintError, SpecList
from spack.util.path import substitute_path_variables
from spack.variant import UnknownVariantError
#: environment variable used to indicate the active environment
spack_env_var = "SPACK_ENV"
@@ -1213,6 +1214,7 @@ def scope_name(self):
def include_concrete_envs(self):
"""Copy and save the included envs' specs internally"""
lockfile_meta = None
root_hash_seen = set()
concrete_hash_seen = set()
self.included_concrete_spec_data = {}
@@ -1223,26 +1225,37 @@ def include_concrete_envs(self):
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
env = Environment(env_path)
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
with open(env.lock_path) as f:
lockfile_as_dict = env._read_lockfile(f)
# Lockfile_meta must match each env and use at least format version 5
if lockfile_meta is None:
lockfile_meta = lockfile_as_dict["_meta"]
elif lockfile_meta != lockfile_as_dict["_meta"]:
raise SpackEnvironmentError("All lockfile _meta values must match")
elif lockfile_meta["lockfile-version"] < 5:
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
# Copy unique root specs from env
for root_dict in env._concrete_roots_dict():
self.included_concrete_spec_data[env_path] = {"roots": []}
for root_dict in lockfile_as_dict["roots"]:
if root_dict["hash"] not in root_hash_seen:
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
root_hash_seen.add(root_dict["hash"])
# Copy unique concrete specs from env
for dag_hash, spec_details in env._concrete_specs_dict().items():
if dag_hash not in concrete_hash_seen:
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
{dag_hash: spec_details}
for concrete_spec in lockfile_as_dict["concrete_specs"]:
if concrete_spec not in concrete_hash_seen:
self.included_concrete_spec_data[env_path].update(
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
)
concrete_hash_seen.add(dag_hash)
concrete_hash_seen.add(concrete_spec)
# Copy transitive include data
transitive = env.included_concrete_spec_data
if transitive:
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
if "include_concrete" in lockfile_as_dict.keys():
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
"include_concrete"
]
self._read_lockfile_dict(self._to_lockfile_dict())
self.write()
@@ -1624,10 +1637,10 @@ def _concretize_separately(self, tests=False):
# Concretize any new user specs that we haven't concretized yet
args, root_specs, i = [], [], 0
for uspec in self.user_specs:
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
if uspec not in old_concretized_user_specs:
root_specs.append(uspec)
args.append((i, str(uspec), tests))
args.append((i, [str(x) for x in uspec_constraints], tests))
i += 1
# Ensure we don't try to bootstrap clingo in parallel
@@ -1643,7 +1656,7 @@ def _concretize_separately(self, tests=False):
# Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
# Early return if there is nothing to do
if len(args) == 0:
@@ -2160,23 +2173,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
return specs
def _concrete_specs_dict(self):
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {}
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
# Assumes no legacy formats, since this was just created.
spec_dict[ht.dag_hash.name] = s.dag_hash()
concrete_specs[s.dag_hash()] = spec_dict
return concrete_specs
def _concrete_roots_dict(self):
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = self._concrete_specs_dict()
root_specs = self._concrete_roots_dict()
spack_dict = {"version": spack.spack_version}
spack_commit = spack.main.get_spack_commit()
@@ -2197,7 +2203,7 @@ def _to_lockfile_dict(self):
# spack version information
"spack": spack_dict,
# users specs + hashes are the 'roots' of the environment
"roots": root_specs,
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
"concrete_specs": concrete_specs,
}
@@ -2507,11 +2513,52 @@ def display_specs(specs):
print(tree_string)
def _concretize_from_constraints(spec_constraints, tests=False):
# Accept only valid constraints from list and concretize spec
# Get the named spec even if out of order
root_spec = [s for s in spec_constraints if s.name]
if len(root_spec) != 1:
m = "The constraints %s are not a valid spec " % spec_constraints
m += "concretization target. all specs must have a single name "
m += "constraint for concretization."
raise InvalidSpecConstraintError(m)
spec_constraints.remove(root_spec[0])
invalid_constraints = []
while True:
# Attach all anonymous constraints to one named spec
s = root_spec[0].copy()
for c in spec_constraints:
if c not in invalid_constraints:
s.constrain(c)
try:
return s.concretized(tests=tests)
except spack.spec.InvalidDependencyError as e:
invalid_deps_string = ["^" + d for d in e.invalid_deps]
invalid_deps = [
c
for c in spec_constraints
if any(c.satisfies(invd) for invd in invalid_deps_string)
]
if len(invalid_deps) != len(invalid_deps_string):
raise e
invalid_constraints.extend(invalid_deps)
except UnknownVariantError as e:
invalid_variants = e.unknown_variants
inv_variant_constraints = [
c for c in spec_constraints if any(name in c.variants for name in invalid_variants)
]
if len(inv_variant_constraints) != len(invalid_variants):
raise e
invalid_constraints.extend(inv_variant_constraints)
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
index, spec_str, tests = packed_arguments
index, spec_constraints, tests = packed_arguments
spec_constraints = [Spec(x) for x in spec_constraints]
with tty.SuppressOutput(msg_enabled=False):
start = time.time()
spec = Spec(spec_str).concretized(tests=tests)
spec = _concretize_from_constraints(spec_constraints, tests)
return index, spec, time.time() - start

View File

@@ -24,7 +24,6 @@
"""
import copy
import functools
import http.client
import os
import os.path
import re
@@ -55,11 +54,24 @@
import spack.version
import spack.version.git_ref_lookup
from spack.util.compression import decompressor_for
from spack.util.executable import CommandNotFoundError, Executable, which
from spack.util.executable import CommandNotFoundError, which
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
"The contents of {subject} look like {content_type}. Either the URL"
" you are trying to use does not exist or you have an internet gateway"
" issue. You can remove the bad archive using 'spack clean"
" <package>', then try again using the correct URL."
)
def warn_content_type_mismatch(subject, content_type="HTML"):
tty.warn(
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE.format(subject=subject, content_type=content_type)
)
def _needs_stage(fun):
"""Many methods on fetch strategies require a stage to be set
@@ -234,29 +246,33 @@ class URLFetchStrategy(FetchStrategy):
# these are checksum types. The generic 'checksum' is deprecated for
# specific hash names, but we need it for backward compatibility
optional_attrs = [*crypto.hashes.keys(), "checksum"]
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(**kwargs)
self.url = url
# Prefer values in kwargs to the positionals.
self.url = kwargs.get("url", url)
self.mirrors = kwargs.get("mirrors", [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
self.digest: Optional[str] = checksum
self.digest = kwargs.get("checksum", checksum)
for h in self.optional_attrs:
if h in kwargs:
self.digest = kwargs[h]
self.expand_archive: bool = kwargs.get("expand", True)
self.extra_options: dict = kwargs.get("fetch_options", {})
self._curl: Optional[Executable] = None
self.extension: Optional[str] = kwargs.get("extension", None)
self._effective_url: Optional[str] = None
self.expand_archive = kwargs.get("expand", True)
self.extra_options = kwargs.get("fetch_options", {})
self._curl = None
self.extension = kwargs.get("extension", None)
if not self.url:
raise ValueError("URLFetchStrategy requires a url for fetching.")
@property
def curl(self) -> Executable:
def curl(self):
if not self._curl:
self._curl = web_util.require_curl()
return self._curl
@@ -309,13 +325,7 @@ def _check_headers(self, headers):
# redirects properly.
content_types = re.findall(r"Content-Type:[^\r\n]+", headers, flags=re.IGNORECASE)
if content_types and "text/html" in content_types[-1]:
msg = (
f"The contents of {self.archive_file or 'the archive'} fetched from {self.url} "
" looks like HTML. This can indicate a broken URL, or an internet gateway issue."
)
if self._effective_url != self.url:
msg += f" The URL redirected to {self._effective_url}."
tty.warn(msg)
warn_content_type_mismatch(self.archive_file or "the archive")
@_needs_stage
def _fetch_urllib(self, url):
@@ -338,14 +348,8 @@ def _fetch_urllib(self, url):
if os.path.lexists(save_file):
os.remove(save_file)
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
# mirror that is broken, leading to spurious download failures. In that case it's helpful
# for users to know which URL was actually fetched.
if isinstance(response, http.client.HTTPResponse):
self._effective_url = response.geturl()
with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(response.headers))
@@ -464,9 +468,9 @@ def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if not self.digest:
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
verify_checksum(self.archive_file, self.digest, self.url, self._effective_url)
verify_checksum(self.archive_file, self.digest)
@_needs_stage
def reset(self):
@@ -475,8 +479,8 @@ def reset(self):
"""
if not self.archive_file:
raise NoArchiveFileError(
f"Tried to reset {self.__class__.__name__} before fetching",
f"Failed on reset() for URL{self.url}",
"Tried to reset URLFetchStrategy before fetching",
"Failed on reset() for URL %s" % self.url,
)
# Remove everything but the archive from the stage
@@ -489,10 +493,14 @@ def reset(self):
self.expand()
def __repr__(self):
return f"{self.__class__.__name__}<{self.url}>"
url = self.url if self.url else "no url"
return "%s<%s>" % (self.__class__.__name__, url)
def __str__(self):
return self.url
if self.url:
return self.url
else:
return "[no url]"
@fetcher
@@ -505,7 +513,7 @@ def fetch(self):
# check whether the cache file exists.
if not os.path.isfile(path):
raise NoCacheError(f"No cache of {path}")
raise NoCacheError("No cache of %s" % path)
# remove old symlink if one is there.
filename = self.stage.save_filename
@@ -515,8 +523,8 @@ def fetch(self):
# Symlink to local cached archive.
symlink(path, filename)
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
# download.
# Remove link if checksum fails, or subsequent fetchers
# will assume they don't need to download.
if self.digest:
try:
self.check()
@@ -525,12 +533,12 @@ def fetch(self):
raise
# Notify the user how we fetched.
tty.msg(f"Using cached archive: {path}")
tty.msg("Using cached archive: {0}".format(path))
class OCIRegistryFetchStrategy(URLFetchStrategy):
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
super().__init__(url=url, checksum=checksum, **kwargs)
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(url, checksum, **kwargs)
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
@@ -575,18 +583,18 @@ def __init__(self, **kwargs):
# Set a URL based on the type of fetch strategy.
self.url = kwargs.get(self.url_attr, None)
if not self.url:
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
for attr in self.optional_attrs:
setattr(self, attr, kwargs.get(attr, None))
@_needs_stage
def check(self):
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
@_needs_stage
def expand(self):
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
@_needs_stage
def archive(self, destination, *, exclude: Optional[str] = None):
@@ -606,10 +614,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
)
def __str__(self):
return f"VCS: {self.url}"
return "VCS: %s" % self.url
def __repr__(self):
return f"{self.__class__}<{self.url}>"
return "%s<%s>" % (self.__class__, self.url)
@fetcher
@@ -712,17 +720,11 @@ class GitFetchStrategy(VCSFetchStrategy):
"submodules",
"get_full_repo",
"submodules_delete",
"git_sparse_paths",
]
git_version_re = r"git version (\S+)"
def __init__(self, **kwargs):
self.commit: Optional[str] = None
self.tag: Optional[str] = None
self.branch: Optional[str] = None
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
@@ -733,7 +735,6 @@ def __init__(self, **kwargs):
self.submodules = kwargs.get("submodules", False)
self.submodules_delete = kwargs.get("submodules_delete", False)
self.get_full_repo = kwargs.get("get_full_repo", False)
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
@property
def git_version(self):
@@ -771,71 +772,68 @@ def git(self):
@property
def cachable(self):
return self.cache_enabled and bool(self.commit)
return self.cache_enabled and bool(self.commit or self.tag)
def source_id(self):
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
return self.commit
return self.commit or self.tag
def mirror_id(self):
if self.commit:
repo_ref = self.commit or self.tag or self.branch
if repo_ref:
repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["git", repo_path, self.commit])
result = os.path.sep.join(["git", repo_path, repo_ref])
return result
def _repo_info(self):
args = ""
if self.commit:
args = f" at commit {self.commit}"
elif self.tag:
args = f" at tag {self.tag}"
elif self.branch:
args = f" on branch {self.branch}"
return f"{self.url}{args}"
if self.commit:
args = " at commit {0}".format(self.commit)
elif self.tag:
args = " at tag {0}".format(self.tag)
elif self.branch:
args = " on branch {0}".format(self.branch)
return "{0}{1}".format(self.url, args)
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug(f"Already fetched {self.stage.source_path}")
tty.debug("Already fetched {0}".format(self.stage.source_path))
return
if self.git_sparse_paths:
self._sparse_clone_src()
else:
self._clone_src()
self.submodule_operations()
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
def bare_clone(self, dest: str) -> None:
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
"""
Execute a bare clone for metadata only
Clone a repository to a path.
Requires a destination since bare cloning does not provide source
and shouldn't be used for staging.
This method handles cloning from git, but does not require a stage.
Arguments:
dest (str or None): The path into which the code is cloned. If None,
requires a stage and uses the stage's source path.
commit (str or None): A commit to fetch from the remote. Only one of
commit, branch, and tag may be non-None.
branch (str or None): A branch to fetch from the remote.
tag (str or None): A tag to fetch from the remote.
bare (bool): Execute a "bare" git clone (--bare option to git)
"""
# Default to spack source path
tty.debug(f"Cloning git repository: {self._repo_info()}")
dest = dest or self.stage.source_path
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
debug = spack.config.get("config:debug")
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
def _clone_src(self) -> None:
"""Clone a repository to a path using git."""
# Default to spack source path
dest = self.stage.source_path
tty.debug(f"Cloning git repository: {self._repo_info()}")
git = self.git
debug = spack.config.get("config:debug")
if self.commit:
if bare:
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
elif commit:
# Need to do a regular clone and check out everything if
# they asked for a particular commit.
clone_args = ["clone", self.url]
@@ -854,7 +852,7 @@ def _clone_src(self) -> None:
)
with working_dir(dest):
checkout_args = ["checkout", self.commit]
checkout_args = ["checkout", commit]
if not debug:
checkout_args.insert(1, "--quiet")
git(*checkout_args)
@@ -866,10 +864,10 @@ def _clone_src(self) -> None:
args.append("--quiet")
# If we want a particular branch ask for it.
if self.branch:
args.extend(["--branch", self.branch])
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", self.tag])
if branch:
args.extend(["--branch", branch])
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", tag])
# Try to be efficient if we're using a new enough git.
# This checks out only one branch's history
@@ -901,7 +899,7 @@ def _clone_src(self) -> None:
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
if tag and self.git_version < spack.version.Version("1.8.5.2"):
# pull --tags returns a "special" error code of 1 in
# older versions that we have to ignore.
# see: https://github.com/git/git/commit/19d122b
@@ -914,79 +912,6 @@ def _clone_src(self) -> None:
git(*pull_args, ignore_errors=1)
git(*co_args)
def _sparse_clone_src(self, **kwargs):
"""Use git's sparse checkout feature to clone portions of a git repository"""
dest = self.stage.source_path
git = self.git
if self.git_version < spack.version.Version("2.26.0"):
# technically this should be supported for 2.25, but bumping for OS issues
# see https://github.com/spack/spack/issues/45771
# code paths exist where the package is not set. Assure some indentifier for the
# package that was configured for sparse checkout exists in the error message
identifier = str(self.url)
if self.package:
identifier += f" ({self.package.name})"
tty.warn(
(
f"{identifier} is configured for git sparse-checkout "
"but the git version is too old to support sparse cloning. "
"Cloning the full repository instead."
)
)
self._clone_src()
else:
# default to depth=2 to allow for retention of some git properties
depth = kwargs.get("depth", 2)
needs_fetch = self.branch or self.tag
git_ref = self.branch or self.tag or self.commit
assert git_ref
clone_args = ["clone"]
if needs_fetch:
clone_args.extend(["--branch", git_ref])
if self.get_full_repo:
clone_args.append("--no-single-branch")
else:
clone_args.append("--single-branch")
clone_args.extend(
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
)
sparse_args = ["sparse-checkout", "set"]
if callable(self.git_sparse_paths):
sparse_args.extend(self.git_sparse_paths())
else:
sparse_args.extend([p for p in self.git_sparse_paths])
sparse_args.append("--cone")
checkout_args = ["checkout", git_ref]
if not spack.config.get("config:debug"):
clone_args.insert(1, "--quiet")
checkout_args.insert(1, "--quiet")
with temp_cwd():
git(*clone_args)
repo_name = get_single_file(".")
if self.stage:
self.stage.srcdir = repo_name
shutil.move(repo_name, dest)
with working_dir(dest):
git(*sparse_args)
git(*checkout_args)
def submodule_operations(self):
dest = self.stage.source_path
git = self.git
if self.submodules_delete:
with working_dir(dest):
for submodule_to_delete in self.submodules_delete:
@@ -1039,7 +964,7 @@ def protocol_supports_shallow_clone(self):
return not (self.url.startswith("http://") or self.url.startswith("/"))
def __str__(self):
return f"[git] {self._repo_info()}"
return "[git] {0}".format(self._repo_info())
@fetcher
@@ -1363,7 +1288,7 @@ def reset(self):
shutil.move(scrubbed, source_path)
def __str__(self):
return f"[hg] {self.url}"
return "[hg] %s" % self.url
@fetcher
@@ -1372,16 +1297,47 @@ class S3FetchStrategy(URLFetchStrategy):
url_attr = "s3"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("S3FetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("s3://"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from s3:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
return
self._fetch_urllib(self.url)
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(response, f)
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
@@ -1394,17 +1350,46 @@ class GCSFetchStrategy(URLFetchStrategy):
url_attr = "gs"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("GCSFetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("gs"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from gs:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
tty.debug("Already downloaded {0}".format(self.archive_file))
return
self._fetch_urllib(self.url)
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(response, f)
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(
@@ -1418,7 +1403,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
as well as after expanding it."""
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
super().__init__(url=url, checksum=archive_sha256)
super().__init__(url, archive_sha256)
self.expanded_sha256 = expanded_sha256
def expand(self):
@@ -1434,26 +1419,21 @@ def expand(self):
if len(files) != 1:
raise ChecksumError(self, f"Expected a single file in {src_dir}.")
verify_checksum(
os.path.join(src_dir, files[0]), self.expanded_sha256, self.url, self._effective_url
)
verify_checksum(os.path.join(src_dir, files[0]), self.expanded_sha256)
def verify_checksum(file: str, digest: str, url: str, effective_url: Optional[str]) -> None:
def verify_checksum(file, digest):
checker = crypto.Checker(digest)
if not checker.check(file):
# On failure, provide some information about the file size and
# contents, so that we can quickly see what the issue is (redirect
# was not followed, empty file, text instead of binary, ...)
size, contents = fs.filesummary(file)
long_msg = (
raise ChecksumError(
f"{checker.hash_name} checksum failed for {file}",
f"Expected {digest} but got {checker.sum}. "
f"File size = {size} bytes. Contents = {contents!r}. "
f"URL = {url}"
f"File size = {size} bytes. Contents = {contents!r}",
)
if effective_url and effective_url != url:
long_msg += f", redirected to = {effective_url}"
raise ChecksumError(f"{checker.hash_name} checksum failed for {file}", long_msg)
def stable_target(fetcher):
@@ -1465,14 +1445,14 @@ def stable_target(fetcher):
return False
def from_url(url: str) -> URLFetchStrategy:
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
Currently just gives you a URLFetchStrategy that uses curl.
TODO: make this return appropriate fetch strategies for other
types of URLs.
"""
return URLFetchStrategy(url=url)
return URLFetchStrategy(url)
def from_kwargs(**kwargs):
@@ -1541,12 +1521,10 @@ def _check_version_attributes(fetcher, pkg, version):
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
except spack.package_base.NoURLError:
raise ExtrapolationError(
f"Can't extrapolate a URL for version {version} because "
f"package {pkg.name} defines no URLs"
)
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
raise ExtrapolationError(msg % (version, pkg.name))
def _from_merged_attrs(fetcher, pkg, version):
@@ -1563,11 +1541,8 @@ def _from_merged_attrs(fetcher, pkg, version):
attrs["fetch_options"] = pkg.fetch_options
attrs.update(pkg.versions[version])
if fetcher.url_attr == "git":
pkg_attr_list = ["submodules", "git_sparse_paths"]
for pkg_attr in pkg_attr_list:
if hasattr(pkg, pkg_attr):
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
attrs.setdefault("submodules", pkg.submodules)
return fetcher(**attrs)
@@ -1662,9 +1637,11 @@ def for_package_version(pkg, version=None):
raise InvalidArgsError(pkg, version, **args)
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
def from_url_scheme(url, *args, **kwargs):
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
in the given url."""
url = kwargs.get("url", url)
parsed_url = urllib.parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or {
@@ -1681,9 +1658,11 @@ def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
for fetcher in all_strategies:
url_attr = getattr(fetcher, "url_attr", None)
if url_attr and url_attr == scheme:
return fetcher(url=url, **kwargs)
return fetcher(url, *args, **kwargs)
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
raise ValueError(
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
)
def from_list_url(pkg):
@@ -1708,9 +1687,7 @@ def from_list_url(pkg):
)
# construct a fetcher
return URLFetchStrategy(
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
)
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
@@ -1738,10 +1715,10 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
def fetcher(self, target_path, digest, **kwargs):
path = os.path.join(self.root, target_path)
url = url_util.path_to_file_url(path)
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)

View File

@@ -20,7 +20,6 @@
systems (e.g. modules, lmod, etc.) or to add other custom
features.
"""
import importlib
from llnl.util.lang import ensure_last, list_modules
@@ -47,7 +46,11 @@ def _populate_hooks(cls):
for name in relative_names:
module_name = __name__ + "." + name
module_obj = importlib.import_module(module_name)
# When importing a module from a package, __import__('A.B', ...)
# returns package A when 'fromlist' is empty. If fromlist is not
# empty it returns the submodule B instead
# See: https://stackoverflow.com/a/2725668/771663
module_obj = __import__(module_name, fromlist=[None])
cls._hooks.append((module_name, module_obj))
@property

View File

@@ -24,6 +24,5 @@ def post_install(spec, explicit):
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True)
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@@ -276,6 +276,52 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
dump_packages(pkg.spec, packages_dir)
def _packages_needed_to_bootstrap_compiler(
compiler: "spack.spec.CompilerSpec", architecture: "spack.spec.ArchSpec", pkgs: list
) -> List[Tuple["spack.package_base.PackageBase", bool]]:
"""
Return a list of packages required to bootstrap `pkg`s compiler
Checks Spack's compiler configuration for a compiler that
matches the package spec.
Args:
compiler: the compiler to bootstrap
architecture: the architecture for which to boostrap the compiler
pkgs: the packages that may need their compiler installed
Return:
list of tuples of packages and a boolean, for concretized compiler-related
packages that need to be installed and bool values specify whether the
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
tty.debug(f"Bootstrapping {compiler} compiler")
compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
if compilers:
return []
dep = spack.compilers.pkg_spec_for_compiler(compiler)
# Set the architecture for the compiler package in a way that allows the
# concretizer to back off if needed for the older bootstrapping compiler
dep.constrain(f"platform={str(architecture.platform)}")
dep.constrain(f"os={str(architecture.os)}")
dep.constrain(f"target={architecture.target.microarchitecture.family.name}:")
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
)
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
packages.append((dep.package, True))
return packages
def _hms(seconds: int) -> str:
"""
Convert seconds to hours, minutes, seconds
@@ -405,7 +451,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
# Add to the DB
tty.debug(f"{pre} registering into DB")
spack.store.STORE.db.add(spec, explicit=explicit)
spack.store.STORE.db.add(spec, None, explicit=explicit)
def _process_binary_cache_tarball(
@@ -442,12 +488,13 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
pkg.windows_establish_runtime_linkage()
if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook()
pkg.installed_from_binary_cache = True
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
return True
@@ -921,6 +968,26 @@ def __init__(
if package_id(d) != self.pkg_id
)
# Handle bootstrapped compiler
#
# The bootstrapped compiler is not a dependency in the spec, but it is
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
strict = spack.concretize.Concretizer().check_for_compiler_existence
if (
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
and not strict
):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
dep.constrain(f"platform={str(arch_spec.platform)}")
dep.constrain(f"os={str(arch_spec.os)}")
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
dep.concretize()
dep_id = package_id(dep)
self.dependencies.add(dep_id)
# List of uninstalled dependencies, which is used to establish
# the priority of the build task.
#
@@ -1099,6 +1166,53 @@ def __str__(self) -> str:
installed = f"installed ({len(self.installed)}) = {self.installed}"
return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}"
def _add_bootstrap_compilers(
self,
compiler: "spack.spec.CompilerSpec",
architecture: "spack.spec.ArchSpec",
pkgs: List["spack.package_base.PackageBase"],
request: BuildRequest,
all_deps,
) -> None:
"""
Add bootstrap compilers and dependencies to the build queue.
Args:
compiler: the compiler to boostrap
architecture: the architecture for which to bootstrap the compiler
pkgs: the package list with possible compiler dependencies
request: the associated install request
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for comp_pkg, is_compiler in packages:
pkgid = package_id(comp_pkg.spec)
if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
elif is_compiler:
# ensure it's queued as a compiler
self._modify_existing_task(pkgid, "compiler", True)
def _modify_existing_task(self, pkgid: str, attr, value) -> None:
"""
Update a task in-place to modify its behavior.
Currently used to update the ``compiler`` field on tasks
that were originally created as a dependency of a compiler,
but are compilers in their own right.
For example, ``intel-oneapi-compilers-classic`` depends on
``intel-oneapi-compilers``, which can cause the latter to be
queued first as a non-compiler, and only later as a compiler.
"""
for i, tup in enumerate(self.build_pq):
key, task = tup
if task.pkg_id == pkgid:
tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2)
setattr(task, attr, value)
self.build_pq[i] = (key, task)
def _add_init_task(
self,
pkg: "spack.package_base.PackageBase",
@@ -1428,7 +1542,42 @@ def _add_tasks(self, request: BuildRequest, all_deps):
tty.warn(f"Installation request refused: {str(err)}")
return
install_compilers = spack.config.get("config:install_missing_compilers", False)
install_deps = request.install_args.get("install_deps")
# Bootstrap compilers first
if install_deps and install_compilers:
packages_per_compiler: Dict[
"spack.spec.CompilerSpec",
Dict["spack.spec.ArchSpec", List["spack.package_base.PackageBase"]],
] = {}
for dep in request.traverse_dependencies():
dep_pkg = dep.package
compiler = dep_pkg.spec.compiler
arch = dep_pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(dep_pkg)
compiler = request.pkg.spec.compiler
arch = request.pkg.spec.architecture
if compiler not in packages_per_compiler:
packages_per_compiler[compiler] = {}
if arch not in packages_per_compiler[compiler]:
packages_per_compiler[compiler][arch] = []
packages_per_compiler[compiler][arch].append(request.pkg)
for compiler, archs in packages_per_compiler.items():
for arch, packages in archs.items():
self._add_bootstrap_compilers(compiler, arch, packages, request, all_deps)
if install_deps:
for dep in request.traverse_dependencies():
@@ -1460,6 +1609,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
fail_fast = bool(request.install_args.get("fail_fast"))
self.fail_fast = self.fail_fast or fail_fast
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([compiler_search_prefix])
)
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
"""
Perform the installation of the requested spec and/or dependency
@@ -1487,6 +1642,8 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
if use_cache:
if _install_from_cache(pkg, explicit, unsigned):
self._update_installed(task)
if task.compiler:
self._add_compiler_package_to_config(pkg)
return
elif cache_only:
raise InstallError("No binary found when cache-only was specified", pkg=pkg)
@@ -1514,8 +1671,11 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
)
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
# If a compiler, ensure it is added to the configuration
if task.compiler:
self._add_compiler_package_to_config(pkg)
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
@@ -1916,6 +2076,10 @@ def install(self) -> None:
path = spack.util.path.debug_padded_filter(pkg.prefix)
_print_installed_pkg(path)
# It's an already installed compiler, add it to the config
if task.compiler:
self._add_compiler_package_to_config(pkg)
else:
# At this point we've failed to get a write or a read
# lock, which means another process has taken a write

View File

@@ -21,7 +21,6 @@
from typing import List, Optional, Union
import llnl.url
import llnl.util.symlink
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -29,8 +28,8 @@
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.oci.image
import spack.repo
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
@@ -427,74 +426,51 @@ def _determine_extension(fetcher):
return ext
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
class MirrorReference:
"""A ``MirrorReference`` stores the relative paths where you can store a
package/resource in a mirror directory.
def __init__(self, path: str) -> None:
self.path = path
The appropriate storage location is given by ``storage_path``. The
``cosmetic_path`` property provides a reference that a human could generate
themselves based on reading the details of the package.
A user can iterate over a ``MirrorReference`` object to get all the
possible names that might be used to refer to the resource in a mirror;
this includes names generated by previous naming schemes that are no-longer
reported by ``storage_path`` or ``cosmetic_path``.
"""
def __init__(self, cosmetic_path, global_path=None):
self.global_path = global_path
self.cosmetic_path = cosmetic_path
@property
def storage_path(self):
if self.global_path:
return self.global_path
else:
return self.cosmetic_path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
if self.global_path:
yield self.global_path
yield self.cosmetic_path
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
class OCIImageLayout:
"""Follow the OCI Image Layout Specification to archive blobs
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
Paths are of the form `blobs/<algorithm>/<digest>`
"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
def __iter__(self):
yield self.storage_path
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
@@ -518,7 +494,7 @@ def default_mirror_layout(
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)
return MirrorReference(per_package_ref, global_ref)
def get_all_versions(specs):

View File

@@ -46,6 +46,7 @@
import spack.deptypes as dt
import spack.environment
import spack.error
import spack.modules.common
import spack.paths
import spack.projections as proj
import spack.repo
@@ -351,7 +352,7 @@ def get_module(module_type, spec, get_full_path, module_set_name="default", requ
except spack.repo.UnknownPackageError:
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
if upstream:
module = upstream_module_index.upstream_module(spec, module_type)
module = spack.modules.common.upstream_module_index.upstream_module(spec, module_type)
if not module:
return None

View File

@@ -25,11 +25,14 @@
so package authors should use their judgement.
"""
import functools
import inspect
from contextlib import contextmanager
import spack.directives_meta
from llnl.util.lang import caller_locals
import spack.directives
import spack.error
import spack.spec
from spack.spec import Spec
class MultiMethodMeta(type):
@@ -132,7 +135,7 @@ def __call__(self, package_or_builder_self, *args, **kwargs):
# its superclasses for successive calls. We don't have that
# information within `SpecMultiMethod`, because it is not
# associated with the package class.
for cls in package_or_builder_self.__class__.__mro__[1:]:
for cls in inspect.getmro(package_or_builder_self.__class__)[1:]:
superself = cls.__dict__.get(self.__name__, None)
if isinstance(superself, SpecMultiMethod):
@@ -162,9 +165,9 @@ def __init__(self, condition):
condition (str): condition to be met
"""
if isinstance(condition, bool):
self.spec = spack.spec.Spec() if condition else None
self.spec = Spec() if condition else None
else:
self.spec = spack.spec.Spec(condition)
self.spec = Spec(condition)
def __call__(self, method):
"""This annotation lets packages declare multiple versions of
@@ -226,9 +229,11 @@ def install(self, prefix):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
assert (
MultiMethodMeta._locals is not None
), "cannot use multimethod, missing MultiMethodMeta metaclass?"
# In Python 2, Get the first definition of the method in the
# calling scope by looking at the caller's locals. In Python 3,
# we handle this using MultiMethodMeta.__prepare__.
if MultiMethodMeta._locals is None:
MultiMethodMeta._locals = caller_locals()
# Create a multimethod with this name if there is not one already
original_method = MultiMethodMeta._locals.get(method.__name__)
@@ -261,17 +266,17 @@ def __enter__(self):
and add their constraint to whatever may be already present in the directive
`when=` argument.
"""
spack.directives_meta.DirectiveMeta.push_to_context(str(self.spec))
spack.directives.DirectiveMeta.push_to_context(str(self.spec))
def __exit__(self, exc_type, exc_val, exc_tb):
spack.directives_meta.DirectiveMeta.pop_from_context()
spack.directives.DirectiveMeta.pop_from_context()
@contextmanager
def default_args(**kwargs):
spack.directives_meta.DirectiveMeta.push_default_args(kwargs)
spack.directives.DirectiveMeta.push_default_args(kwargs)
yield
spack.directives_meta.DirectiveMeta.pop_default_args()
spack.directives.DirectiveMeta.pop_default_args()
class MultiMethodError(spack.error.SpackError):

View File

@@ -390,12 +390,15 @@ def make_stage(
) -> spack.stage.Stage:
_urlopen = _urlopen or spack.oci.opener.urlopen
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
url=url, checksum=digest.digest, _urlopen=_urlopen
url, checksum=digest.digest, _urlopen=_urlopen
)
# Use blobs/<alg>/<encoded> as the cache path, which follows
# the OCI Image Layout Specification. What's missing though,
# is the `oci-layout` and `index.json` files, which are
# required by the spec.
return spack.stage.Stage(
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
fetch_strategy,
mirror_paths=spack.mirror.OCIImageLayout(digest),
name=digest.digest,
keep=keep,
)

View File

@@ -104,7 +104,6 @@
from spack.spec import InvalidSpecDetected, Spec
from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
from spack.variant import (
any_combination_of,
auto_or_any_combination_of,

View File

@@ -15,7 +15,7 @@
import functools
import glob
import hashlib
import importlib
import inspect
import io
import os
import re
@@ -116,10 +116,11 @@ def preferred_version(pkg: "PackageBase"):
Arguments:
pkg: The package whose versions are to be assessed.
"""
from spack.solver.asp import concretization_version_order
version, _ = max(pkg.versions.items(), key=concretization_version_order)
return version
# Here we sort first on the fact that a version is marked
# as preferred in the package, then on the fact that the
# version is not develop, then lexicographically
key_fn = lambda v: (pkg.versions[v].get("preferred", False), not v.isdevelop(), v)
return max(pkg.versions, key=key_fn)
class WindowsRPath:
@@ -244,7 +245,10 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
if version_str:
objs_by_version[version_str].append(obj)
except Exception as e:
tty.debug(f"Cannot detect the version of '{obj}' [{str(e)}]")
msg = (
"An error occurred when trying to detect " 'the version of "{0}" [{1}]'
)
tty.debug(msg.format(obj, str(e)))
specs = []
for version_str, objs in objs_by_version.items():
@@ -257,23 +261,27 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
if isinstance(variant, str):
variant = (variant, {})
variant_str, extra_attributes = variant
spec_str = f"{cls.name}@{version_str} {variant_str}"
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
# Pop a few reserved keys from extra attributes, since
# they have a different semantics
external_path = extra_attributes.pop("prefix", None)
external_modules = extra_attributes.pop("modules", None)
try:
spec = spack.spec.Spec.from_detection(
spec = spack.spec.Spec(
spec_str,
external_path=external_path,
external_modules=external_modules,
extra_attributes=extra_attributes,
)
except Exception as e:
tty.debug(f'Parsing failed [spec_str="{spec_str}", error={str(e)}]')
msg = 'Parsing failed [spec_str="{0}", error={1}]'
tty.debug(msg.format(spec_str, str(e)))
else:
specs.append(spec)
specs.append(
spack.spec.Spec.from_detection(
spec, extra_attributes=extra_attributes
)
)
return sorted(specs)
@@ -732,7 +740,7 @@ def __init__(self, spec):
raise ValueError(msg.format(self))
# init internal variables
self._stage: Optional[StageComposite] = None
self._stage = None
self._fetcher = None
self._tester: Optional["PackageTest"] = None
@@ -860,7 +868,7 @@ def module(cls):
We use this to add variables to package modules. This makes
install() methods easier to write (e.g., can call configure())
"""
return importlib.import_module(cls.__module__)
return __import__(cls.__module__, fromlist=[cls.__name__])
@classproperty
def namespace(cls):
@@ -876,7 +884,7 @@ def fullname(cls):
def fullnames(cls):
"""Fullnames for this package and any packages from which it inherits."""
fullnames = []
for cls in cls.__mro__:
for cls in inspect.getmro(cls):
namespace = getattr(cls, "namespace", None)
if namespace:
fullnames.append("%s.%s" % (namespace, cls.name))
@@ -1090,10 +1098,9 @@ def _make_resource_stage(self, root_stage, resource):
root=root_stage,
resource=resource,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.default_mirror_layout(
mirror_paths=spack.mirror.mirror_archive_paths(
resource.fetcher, os.path.join(self.name, pretty_resource_name)
),
mirrors=spack.mirror.MirrorCollection(source=True).values(),
path=self.path,
)
@@ -1105,7 +1112,7 @@ def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
format_string = "{name}-{version}"
pretty_name = self.spec.format_path(format_string)
mirror_paths = spack.mirror.default_mirror_layout(
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher, os.path.join(self.name, pretty_name), self.spec
)
# Construct a path where the stage should build..
@@ -1114,7 +1121,6 @@ def _make_root_stage(self, fetcher):
stage = Stage(
fetcher,
mirror_paths=mirror_paths,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
name=stage_name,
path=self.path,
search_fn=self._download_search,
@@ -1171,7 +1177,7 @@ def stage(self):
return self._stage
@stage.setter
def stage(self, stage: StageComposite):
def stage(self, stage):
"""Allow a stage object to be set to override the default."""
self._stage = stage
@@ -1465,7 +1471,6 @@ def do_fetch(self, mirror_only=False):
checksum
and (self.version not in self.versions)
and (not isinstance(self.version, GitVersion))
and ("dev_path" not in self.spec.variants)
):
tty.warn(
"There is no checksum on file to fetch %s safely."
@@ -1742,7 +1747,7 @@ def _has_make_target(self, target):
bool: True if 'target' is found, else False
"""
# Prevent altering LC_ALL for 'make' outside this function
make = copy.deepcopy(self.module.make)
make = copy.deepcopy(inspect.getmodule(self).make)
# Use English locale for missing target message comparison
make.add_default_env("LC_ALL", "C")
@@ -1792,7 +1797,7 @@ def _if_make_target_execute(self, target, *args, **kwargs):
"""
if self._has_make_target(target):
# Execute target
self.module.make(target, *args, **kwargs)
inspect.getmodule(self).make(target, *args, **kwargs)
def _has_ninja_target(self, target):
"""Checks to see if 'target' is a valid target in a Ninja build script.
@@ -1803,7 +1808,7 @@ def _has_ninja_target(self, target):
Returns:
bool: True if 'target' is found, else False
"""
ninja = self.module.ninja
ninja = inspect.getmodule(self).ninja
# Check if we have a Ninja build script
if not os.path.exists("build.ninja"):
@@ -1832,7 +1837,7 @@ def _if_ninja_target_execute(self, target, *args, **kwargs):
"""
if self._has_ninja_target(target):
# Execute target
self.module.ninja(target, *args, **kwargs)
inspect.getmodule(self).ninja(target, *args, **kwargs)
def _get_needed_resources(self):
# We use intersects here cause it would also work if self.spec is abstract

View File

@@ -5,11 +5,10 @@
import stat
import warnings
import spack.config
import spack.error
import spack.repo
import spack.spec
from spack.config import ConfigError
from spack.util.path import canonicalize_path
from spack.version import Version
_lesser_spec_types = {"compiler": spack.spec.CompilerSpec, "version": Version}
@@ -155,6 +154,44 @@ def preferred_variants(cls, pkg_name):
)
def spec_externals(spec):
"""Return a list of external specs (w/external directory path filled in),
one for each known external installation.
"""
# break circular import.
from spack.util.module_cmd import path_from_modules # noqa: F401
def _package(maybe_abstract_spec):
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
return pkg_cls(maybe_abstract_spec)
allpkgs = spack.config.get("packages")
names = set([spec.name])
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
external_specs = []
for name in names:
pkg_config = allpkgs.get(name, {})
pkg_externals = pkg_config.get("externals", [])
for entry in pkg_externals:
spec_str = entry["spec"]
external_path = entry.get("prefix", None)
if external_path:
external_path = canonicalize_path(external_path)
external_modules = entry.get("modules", None)
external_spec = spack.spec.Spec.from_detection(
spack.spec.Spec(
spec_str, external_path=external_path, external_modules=external_modules
),
extra_attributes=entry.get("extra_attributes", {}),
)
if external_spec.intersects(spec):
external_specs.append(external_spec)
# Defensively copy returned specs
return [s.copy() for s in external_specs]
def is_spec_buildable(spec):
"""Return true if the spec is configured as buildable"""
allpkgs = spack.config.get("packages")

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import hashlib
import inspect
import os
import os.path
import pathlib
@@ -184,8 +185,8 @@ def __init__(
# search mro to look for the file
abs_path: Optional[str] = None
# At different times we call FilePatch on instances and classes
pkg_cls = pkg if isinstance(pkg, type) else pkg.__class__
for cls in pkg_cls.__mro__: # type: ignore
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
for cls in inspect.getmro(pkg_cls): # type: ignore
if not hasattr(cls, "module"):
# We've gone too far up the MRO
break
@@ -318,19 +319,18 @@ def stage(self) -> "spack.stage.Stage":
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
)
else:
fetcher = fs.URLFetchStrategy(url=self.url, sha256=self.sha256, expand=False)
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
)
return self._stage

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
from ._functions import _host, by_name, platforms, reset
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
from ._platform import Platform
from .darwin import Darwin
from .freebsd import FreeBSD
@@ -23,6 +23,7 @@
"host",
"by_name",
"reset",
"prevent_cray_detection",
]
#: The "real" platform of the host running Spack. This should not be changed

Some files were not shown because too many files have changed in this diff Show More