Compare commits
5 Commits
hs/ci/regr
...
python/add
Author | SHA1 | Date | |
---|---|---|---|
![]() |
474cca3005 | ||
![]() |
868cb442e9 | ||
![]() |
c0e64718c7 | ||
![]() |
8b2749f95c | ||
![]() |
fbfbb9710d |
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"name": "Ubuntu 20.04",
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"name": "Ubuntu 22.04",
|
||||
"image": "ghcr.io/spack/ubuntu-22.04:v2024-05-07",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
73
.github/workflows/audit.yaml
vendored
Normal file
73
.github/workflows/audit.yaml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
name: audit
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
python_version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.system.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
system:
|
||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||
- { os: ubuntu-latest, shell: bash }
|
||||
- { os: macos-latest, shell: bash }
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Setup for Windows run
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
170
.github/workflows/bootstrap.yml
vendored
Normal file
170
.github/workflows/bootstrap.yml
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
name: Bootstrapping
|
||||
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
distros-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||
steps:
|
||||
- name: Setup Fedora
|
||||
if: ${{ matrix.image == 'fedora:latest' }}
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||
zypper update -y || zypper update -y
|
||||
zypper install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
clingo-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
from-binaries:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
3.9
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
echo "Testing $ver_dir"
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
if (($not_found)) ; then
|
||||
echo Required python version $ver not found in runner!
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
133
.github/workflows/build-containers.yml
vendored
Normal file
133
.github/workflows/build-containers.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Containers
|
||||
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Run on pull requests that modify this file
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/spack/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
fail-fast: false
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
tags: |
|
||||
type=schedule,pattern=nightly
|
||||
type=schedule,pattern=develop
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@16ebe778df0e7752d2cfcbd924afdbbd89c1a755
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
merge-dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
delete-merged: true
|
16
.github/workflows/ci.yaml
vendored
16
.github/workflows/ci.yaml
vendored
@@ -74,3 +74,19 @@ jobs:
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
bootstrap:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
|
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
31
.github/workflows/nightly-win-builds.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
261
.github/workflows/unit_tests.yaml
vendored
Normal file
261
.github/workflows/unit_tests.yaml
vendored
Normal file
@@ -0,0 +1,261 @@
|
||||
name: unit tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Run unit tests with different configurations on linux
|
||||
ubuntu:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap now
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
38
.github/workflows/valid-style.yml
vendored
38
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -54,4 +54,36 @@ jobs:
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
@@ -115,6 +115,12 @@ config:
|
||||
suppress_gpg_warnings: false
|
||||
|
||||
|
||||
# If set to true, Spack will attempt to build any compiler on the spec
|
||||
# that is not already available. If set to False, Spack will only use
|
||||
# compilers already configured in compilers.yaml
|
||||
install_missing_compilers: false
|
||||
|
||||
|
||||
# If set to true, Spack will always check checksums after downloading
|
||||
# archives. If false, Spack skips the checksum step.
|
||||
checksum: true
|
||||
@@ -164,6 +170,23 @@ config:
|
||||
# If set to true, Spack will use ccache to cache C compiles.
|
||||
ccache: false
|
||||
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
|
@@ -20,14 +20,12 @@ packages:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -72,13 +70,3 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -1,5 +1,6 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -206,7 +206,6 @@ def setup(sphinx):
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
("py:class", "concurrent.futures._base.Executor"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
@@ -218,7 +217,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -181,6 +181,10 @@ Spec-related modules
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||
|
@@ -863,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
|
||||
spack:
|
||||
definitions:
|
||||
- compilers: ['%gcc', '%clang']
|
||||
- when: arch.satisfies('target=x86_64:')
|
||||
- when: arch.satisfies('x86_64:')
|
||||
compilers: ['%intel']
|
||||
|
||||
.. note::
|
||||
@@ -893,9 +893,8 @@ The valid variables for a ``when`` clause are:
|
||||
|
||||
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
||||
|
||||
#. ``hostname``. The hostname of the system.
|
||||
|
||||
#. ``full_hostname``. The fully qualified hostname of the system.
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
SpecLists as Constraints
|
||||
|
@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
|
||||
option ``--depth 1`` will be used if the version of git and the specified
|
||||
transport protocol support it, and ``--single-branch`` will be used if the
|
||||
version of git supports it.
|
||||
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1366,41 +1361,6 @@ Submodules
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
If ``git_sparse_paths`` is supplied and the git version is too old
|
||||
then a warning will be issued and that package will use the standard cloning operations instead.
|
||||
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
|
||||
or a more complex package attribute using the ``@property`` decorator. The return value should be
|
||||
a list for a callable implementation of ``git_sparse_paths``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def sparse_path_function(package)
|
||||
"""a callable function that can be used in side a version"""
|
||||
# paths can be directories or functions, all subdirectories and files are included
|
||||
paths = ["doe", "rae", "me/file.cpp"]
|
||||
if package.spec.version > Version("1.2.0"):
|
||||
paths.extend(["fae"])
|
||||
return paths
|
||||
|
||||
class MyPackage(package):
|
||||
# can also be a package attribute that will be used if not specified in versions
|
||||
git_sparse_paths = ["doe", "rae"]
|
||||
|
||||
# use the package attribute
|
||||
version("1.0.0")
|
||||
version("1.1.0")
|
||||
# use the function
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.0", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
^^^^^^
|
||||
|
@@ -663,7 +663,11 @@ build the package.
|
||||
|
||||
When including a bootstrapping phase as in the example above, the result is that
|
||||
the bootstrapped compiler packages will be pushed to the binary mirror (and the
|
||||
local artifacts mirror) before the actual release specs are built.
|
||||
local artifacts mirror) before the actual release specs are built. In this case,
|
||||
the jobs corresponding to subsequent release specs are configured to
|
||||
``install_missing_compilers``, so that if spack is asked to install a package
|
||||
with a compiler it doesn't know about, it can be quickly installed from the
|
||||
binary mirror first.
|
||||
|
||||
Since bootstrapping compilers is optional, those items can be left out of the
|
||||
environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
|
@@ -5,8 +5,8 @@ sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
urllib3==2.2.2
|
||||
pytest==8.3.2
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
flake8==7.1.1
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
24
lib/spack/external/_vendoring/distro/distro.py
vendored
24
lib/spack/external/_vendoring/distro/distro.py
vendored
@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
with os.scandir(self.etc_dir) as it:
|
||||
etc_files = [
|
||||
p.path for p in it
|
||||
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
]
|
||||
basenames = [
|
||||
basename
|
||||
for basename in os.listdir(self.etc_dir)
|
||||
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
etc_files.sort()
|
||||
basenames.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
etc_files = [
|
||||
os.path.join(self.etc_dir, basename)
|
||||
for basename in _DISTRO_RELEASE_BASENAMES
|
||||
]
|
||||
|
||||
for filepath in etc_files:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
basenames = _DISTRO_RELEASE_BASENAMES
|
||||
for basename in basenames:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if match is None:
|
||||
continue
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
||||
|
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
@@ -231,6 +231,96 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
import idna
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
def is_idn_host_name(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
idna.encode(instance)
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3986_validator import validate_rfc3986
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(name="uri")
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI_reference")
|
||||
|
||||
else:
|
||||
@_checks_drafts(draft7="iri", raises=ValueError)
|
||||
def is_iri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI")
|
||||
|
||||
@_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
def is_iri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI_reference")
|
||||
|
||||
@_checks_drafts(name="uri", raises=ValueError)
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI_reference")
|
||||
|
||||
|
||||
try:
|
||||
from strict_rfc3339 import validate_rfc3339
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3339_validator import validate_rfc3339
|
||||
except ImportError:
|
||||
validate_rfc3339 = None
|
||||
|
||||
if validate_rfc3339:
|
||||
@_checks_drafts(name="date-time")
|
||||
def is_datetime(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3339(instance)
|
||||
|
||||
@_checks_drafts(draft7="time")
|
||||
def is_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return is_datetime("1970-01-01T" + instance)
|
||||
|
||||
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
|
||||
|
||||
try:
|
||||
import webcolors
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def is_css_color_code(instance):
|
||||
return webcolors.normalize_hex(instance)
|
||||
|
||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
def is_css21_color(instance):
|
||||
if (
|
||||
not isinstance(instance, str_types) or
|
||||
instance.lower() in webcolors.css21_names_to_hex
|
||||
):
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
def is_css3_color(instance):
|
||||
if instance.lower() in webcolors.css3_names_to_hex:
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
|
||||
try:
|
||||
import jsonpointer
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="json-pointer",
|
||||
draft7="json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_json_pointer(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return jsonpointer.JsonPointer(instance)
|
||||
|
||||
# TODO: I don't want to maintain this, so it
|
||||
# needs to go either into jsonpointer (pending
|
||||
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
# into a new external library.
|
||||
@_checks_drafts(
|
||||
draft7="relative-json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_relative_json_pointer(instance):
|
||||
# Definition taken from:
|
||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
non_negative_integer, rest = [], ""
|
||||
for i, character in enumerate(instance):
|
||||
if character.isdigit():
|
||||
non_negative_integer.append(character)
|
||||
continue
|
||||
|
||||
if not non_negative_integer:
|
||||
return False
|
||||
|
||||
rest = instance[i:]
|
||||
break
|
||||
return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
|
||||
|
||||
try:
|
||||
import uritemplate.exceptions
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="uri-template",
|
||||
draft7="uri-template",
|
||||
raises=uritemplate.exceptions.InvalidTemplate,
|
||||
)
|
||||
def is_uri_template(
|
||||
instance,
|
||||
template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
):
|
||||
template = uritemplate.URITemplate(instance)
|
||||
return template_validator.validate(template)
|
||||
|
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,11 +47,7 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -61,7 +57,6 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -351,10 +345,6 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
|
@@ -2,7 +2,9 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -63,24 +65,21 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
# pylint: disable=too-many-arguments
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@@ -112,7 +111,6 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
@coerce_target_names
|
||||
@@ -145,8 +143,7 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -193,7 +190,6 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
@@ -2225,14 +2225,10 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:23.8",
|
||||
"versions": "21.11:",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2715,8 +2711,7 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
}
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2784,8 +2779,7 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
}
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2822,8 +2816,7 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
}
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2909,8 +2902,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
}
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2934,6 +2926,8 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -3034,8 +3028,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
}
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3059,10 +3052,13 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
@@ -3070,12 +3066,18 @@
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
"bf16",
|
||||
"dgh"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3100,19 +3102,15 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.3.99",
|
||||
"versions": "10.0:11.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.2.99",
|
||||
"versions": "12.0:12.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "12.3:",
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3147,113 +3145,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3319,8 +3211,7 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
}
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3398,8 +3289,7 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
}
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
@@ -52,9 +52,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -110,4 +107,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
45
lib/spack/external/patches/distro.patch
vendored
45
lib/spack/external/patches/distro.patch
vendored
@@ -1,45 +0,0 @@
|
||||
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
|
||||
index 89e1868047..50c3b18d4d 100644
|
||||
--- a/lib/spack/external/_vendoring/distro/distro.py
|
||||
+++ b/lib/spack/external/_vendoring/distro/distro.py
|
||||
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
- basenames = [
|
||||
- basename
|
||||
- for basename in os.listdir(self.etc_dir)
|
||||
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
- and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
- ]
|
||||
+ with os.scandir(self.etc_dir) as it:
|
||||
+ etc_files = [
|
||||
+ p.path for p in it
|
||||
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
+ ]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
- basenames.sort()
|
||||
+ etc_files.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
- basenames = _DISTRO_RELEASE_BASENAMES
|
||||
- for basename in basenames:
|
||||
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
+ etc_files = [
|
||||
+ os.path.join(self.etc_dir, basename)
|
||||
+ for basename in _DISTRO_RELEASE_BASENAMES
|
||||
+ ]
|
||||
+
|
||||
+ for filepath in etc_files:
|
||||
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
if match is None:
|
||||
continue
|
||||
- filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
188
lib/spack/external/patches/jsonschema.patch
vendored
188
lib/spack/external/patches/jsonschema.patch
vendored
@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
|
||||
-__version__ = metadata.version("jsonschema")
|
||||
+
|
||||
+__version__ = "3.2.0"
|
||||
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
index 281a7cfcff..29061e3661 100644
|
||||
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
@@ -231,96 +231,6 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
-try:
|
||||
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
- import idna
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
- def is_idn_host_name(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- idna.encode(instance)
|
||||
- return True
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import rfc3987
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3986_validator import validate_rfc3986
|
||||
- except ImportError:
|
||||
- pass
|
||||
- else:
|
||||
- @_checks_drafts(name="uri")
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI_reference")
|
||||
-
|
||||
-else:
|
||||
- @_checks_drafts(draft7="iri", raises=ValueError)
|
||||
- def is_iri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI")
|
||||
-
|
||||
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
- def is_iri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI_reference")
|
||||
-
|
||||
- @_checks_drafts(name="uri", raises=ValueError)
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI_reference")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- from strict_rfc3339 import validate_rfc3339
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3339_validator import validate_rfc3339
|
||||
- except ImportError:
|
||||
- validate_rfc3339 = None
|
||||
-
|
||||
-if validate_rfc3339:
|
||||
- @_checks_drafts(name="date-time")
|
||||
- def is_datetime(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3339(instance)
|
||||
-
|
||||
- @_checks_drafts(draft7="time")
|
||||
- def is_time(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return is_datetime("1970-01-01T" + instance)
|
||||
-
|
||||
-
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import webcolors
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- def is_css_color_code(instance):
|
||||
- return webcolors.normalize_hex(instance)
|
||||
-
|
||||
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
- def is_css21_color(instance):
|
||||
- if (
|
||||
- not isinstance(instance, str_types) or
|
||||
- instance.lower() in webcolors.css21_names_to_hex
|
||||
- ):
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
- def is_css3_color(instance):
|
||||
- if instance.lower() in webcolors.css3_names_to_hex:
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import jsonpointer
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="json-pointer",
|
||||
- draft7="json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_json_pointer(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return jsonpointer.JsonPointer(instance)
|
||||
-
|
||||
- # TODO: I don't want to maintain this, so it
|
||||
- # needs to go either into jsonpointer (pending
|
||||
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
- # into a new external library.
|
||||
- @_checks_drafts(
|
||||
- draft7="relative-json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_relative_json_pointer(instance):
|
||||
- # Definition taken from:
|
||||
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- non_negative_integer, rest = [], ""
|
||||
- for i, character in enumerate(instance):
|
||||
- if character.isdigit():
|
||||
- non_negative_integer.append(character)
|
||||
- continue
|
||||
-
|
||||
- if not non_negative_integer:
|
||||
- return False
|
||||
-
|
||||
- rest = instance[i:]
|
||||
- break
|
||||
- return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import uritemplate.exceptions
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-template",
|
||||
- draft7="uri-template",
|
||||
- raises=uritemplate.exceptions.InvalidTemplate,
|
||||
- )
|
||||
- def is_uri_template(
|
||||
- instance,
|
||||
- template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
- ):
|
||||
- template = uritemplate.URITemplate(instance)
|
||||
- return template_validator.validate(template)
|
||||
|
@@ -27,6 +27,8 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
@@ -51,6 +53,7 @@
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"fix_darwin_install_name",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"getuid",
|
||||
@@ -245,6 +248,42 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
file_cmd = which("file", required=True)
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -1585,12 +1624,6 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
# Adding this prefix allows shutil to remove long paths on windows
|
||||
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
|
||||
long_path_pfx = "\\\\?\\"
|
||||
if not path.startswith(long_path_pfx):
|
||||
path = long_path_pfx + path
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
@@ -1640,6 +1673,41 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
There are two parts of this task:
|
||||
|
||||
1. Use ``install_name('-id', ...)`` to change install name of a single lib
|
||||
2. Use ``install_name('-change', ...)`` to change the cross linking between
|
||||
libs. The function assumes that all libraries are in one folder and
|
||||
currently won't follow subfolders.
|
||||
|
||||
Parameters:
|
||||
path (str): directory in which .dylib files are located
|
||||
"""
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
install_name_tool("-id", lib, lib)
|
||||
otool = Executable("otool")
|
||||
long_deps = otool("-L", lib, output=str).split("\n")
|
||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
# We really want to check for either
|
||||
# dep == os.path.basename(loc) or
|
||||
# dep == join_path(builddir, os.path.basename(loc)),
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
install_name_tool("-change", dep, loc, lib)
|
||||
break
|
||||
|
||||
|
||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||
"""Find the first file matching a pattern.
|
||||
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
@@ -15,7 +16,7 @@
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
ignore_modules = [r"^\.#", "~$"]
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
@@ -83,6 +84,20 @@ def index_by(objects, *funcs):
|
||||
return result
|
||||
|
||||
|
||||
def caller_locals():
|
||||
"""This will return the locals of the *parent* of the caller.
|
||||
This allows a function to insert variables into its caller's
|
||||
scope. Yes, this is some black magic, and yes it's useful
|
||||
for implementing things like depends_on and provides.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
try:
|
||||
return stack[2][0].f_locals
|
||||
finally:
|
||||
del stack
|
||||
|
||||
|
||||
def attr_setdefault(obj, name, value):
|
||||
"""Like dict.setdefault, but for objects."""
|
||||
if not hasattr(obj, name):
|
||||
@@ -90,6 +105,15 @@ def attr_setdefault(obj, name, value):
|
||||
return getattr(obj, name)
|
||||
|
||||
|
||||
def has_method(cls, name):
|
||||
for base in inspect.getmro(cls):
|
||||
if base is object:
|
||||
continue
|
||||
if name in base.__dict__:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def union_dicts(*dicts):
|
||||
"""Use update() to combine all dicts into one.
|
||||
|
||||
@@ -154,22 +178,19 @@ def list_modules(directory, **kwargs):
|
||||
order."""
|
||||
list_directories = kwargs.setdefault("directories", True)
|
||||
|
||||
ignore = re.compile(ignore_modules)
|
||||
for name in os.listdir(directory):
|
||||
if name == "__init__.py":
|
||||
continue
|
||||
|
||||
with os.scandir(directory) as it:
|
||||
for entry in it:
|
||||
if entry.name == "__init__.py" or entry.name == "__pycache__":
|
||||
continue
|
||||
path = os.path.join(directory, name)
|
||||
if list_directories and os.path.isdir(path):
|
||||
init_py = os.path.join(path, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
yield name
|
||||
|
||||
if (
|
||||
list_directories
|
||||
and entry.is_dir()
|
||||
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
|
||||
):
|
||||
yield entry.name
|
||||
|
||||
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
|
||||
yield entry.name[:-3] # strip .py
|
||||
elif name.endswith(".py"):
|
||||
if not any(re.search(pattern, name) for pattern in ignore_modules):
|
||||
yield re.sub(".py$", "", name)
|
||||
|
||||
|
||||
def decorator_with_or_without_args(decorator):
|
||||
@@ -216,8 +237,8 @@ def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not hasattr(cls, "_cmp_key"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
|
||||
if not has_method(cls, "_cmp_key"):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
@@ -367,8 +388,8 @@ def cd_fun():
|
||||
TypeError: If the class does not have a ``_cmp_iter`` method
|
||||
|
||||
"""
|
||||
if not hasattr(cls, "_cmp_iter"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
|
||||
if not has_method(cls, "_cmp_iter"):
|
||||
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
|
||||
|
||||
# comparison operators are implemented in terms of lazy_eq and lazy_lt
|
||||
def eq(self, other):
|
||||
@@ -843,19 +864,20 @@ def uniq(sequence):
|
||||
return uniq_list
|
||||
|
||||
|
||||
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list(["1", "2", "3", "4", "5", "6"], 4)
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
["1", "2", "3", "...", "6"]
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return [*line_list[: max_num - 1], "...", line_list[-1]]
|
||||
return line_list
|
||||
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import multiprocessing.connection
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
|
131
lib/spack/spack/abi.py
Normal file
131
lib/spack/spack/abi.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
as architecture ABI compatible to constraint."""
|
||||
return (
|
||||
not target.architecture
|
||||
or not constraint.architecture
|
||||
or target.architecture.intersects(constraint.architecture)
|
||||
)
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
"""Returns gcc ABI compatibility info by getting the library version of
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
compiler = compilers[0]
|
||||
rungcc = None
|
||||
libname = None
|
||||
output = None
|
||||
if compiler.cxx:
|
||||
rungcc = Executable(compiler.cxx)
|
||||
libname = "libstdc++." + dso_suffix
|
||||
elif compiler.cc:
|
||||
rungcc = Executable(compiler.cc)
|
||||
libname = "libgcc_s." + dso_suffix
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
# Some gcc's are actually clang and don't respond properly to
|
||||
# --print-file-name (they just print the filename, not the
|
||||
# full path). Ignore these and expect them to be handled as clang.
|
||||
if Clang.default_version(rungcc.exe[0]) != "unknown":
|
||||
return None
|
||||
|
||||
output = rungcc("--print-file-name=%s" % libname, output=str)
|
||||
except ProcessError:
|
||||
return None
|
||||
if not output:
|
||||
return None
|
||||
libpath = os.path.realpath(output.strip())
|
||||
if not libpath:
|
||||
return None
|
||||
return os.path.basename(libpath)
|
||||
|
||||
@memoized
|
||||
def _gcc_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the gcc version pversion and cversion
|
||||
are ABI compatible."""
|
||||
plib = self._gcc_get_libstdcxx_version(pversion)
|
||||
clib = self._gcc_get_libstdcxx_version(cversion)
|
||||
if not plib or not clib:
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
|
||||
if parent.compiler.name != child.compiler.name:
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if loose:
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
# TODO: into compiler classes?
|
||||
for pversion in parent.compiler.versions:
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.intersects(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loose
|
||||
)
|
@@ -39,21 +39,19 @@ def _search_duplicate_compilers(error_cls):
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from typing import Iterable, List, Set, Tuple
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.config
|
||||
import spack.patch
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.crypto
|
||||
@@ -75,9 +73,7 @@ def __init__(self, summary, details):
|
||||
self.details = tuple(details)
|
||||
|
||||
def __str__(self):
|
||||
if self.details:
|
||||
return f"{self.summary}\n" + "\n".join(f" {detail}" for detail in self.details)
|
||||
return self.summary
|
||||
return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.summary != other.summary or self.details != other.details:
|
||||
@@ -214,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
|
||||
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
|
||||
)
|
||||
|
||||
#: Sanity checks on packages.yaml
|
||||
config_repos = AuditClass(
|
||||
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
|
||||
)
|
||||
|
||||
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
@@ -261,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(attribute_name, config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
||||
|
||||
for package_name in packages_yaml:
|
||||
if package_name == "all":
|
||||
continue
|
||||
|
||||
package_conf = packages_yaml[package_name]
|
||||
for attribute in ("compiler", "providers", "target"):
|
||||
if attribute not in package_conf:
|
||||
continue
|
||||
summary = (
|
||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
||||
)
|
||||
errors.append(make_error(attribute, package_conf, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _avoid_mismatched_variants(error_cls):
|
||||
"""Warns if variant preferences have mismatched types or names."""
|
||||
@@ -342,27 +367,6 @@ def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
]
|
||||
|
||||
|
||||
@config_repos
|
||||
def _ensure_no_folders_without_package_py(error_cls):
|
||||
"""Check that we don't leave any folder without a package.py in repos"""
|
||||
errors = []
|
||||
for repository in spack.repo.PATH.repos:
|
||||
missing = []
|
||||
for entry in os.scandir(repository.packages_path):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
|
||||
if not package_py.exists():
|
||||
missing.append(entry.path)
|
||||
if missing:
|
||||
summary = (
|
||||
f"The '{repository.namespace}' repository misses a package.py file"
|
||||
f" in the following folders"
|
||||
)
|
||||
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -494,7 +498,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in pkg_cls.__mro__:
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
current_value = cls_item.__dict__.get(name)
|
||||
if current_value is None:
|
||||
@@ -523,7 +527,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
|
||||
errors.append(error_cls(error_msg, []))
|
||||
return errors
|
||||
|
||||
@@ -683,88 +687,6 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
class DeprecatedMagicGlobals(ast.NodeVisitor):
|
||||
def __init__(self, magic_globals: Iterable[str]):
|
||||
super().__init__()
|
||||
|
||||
self.magic_globals: Set[str] = set(magic_globals)
|
||||
|
||||
# State to track whether we're in a class function
|
||||
self.depth: int = 0
|
||||
self.in_function: bool = False
|
||||
self.path = (ast.Module, ast.ClassDef, ast.FunctionDef)
|
||||
|
||||
# Defined locals in the current function (heuristically at least)
|
||||
self.locals: Set[str] = set()
|
||||
|
||||
# List of (name, lineno) tuples for references to magic globals
|
||||
self.references_to_globals: List[Tuple[str, int]] = []
|
||||
|
||||
def descend_in_function_def(self, node: ast.AST) -> None:
|
||||
if not isinstance(node, self.path[self.depth]):
|
||||
return
|
||||
self.depth += 1
|
||||
if self.depth == len(self.path):
|
||||
self.in_function = True
|
||||
super().generic_visit(node)
|
||||
if self.depth == len(self.path):
|
||||
self.in_function = False
|
||||
self.locals.clear()
|
||||
self.depth -= 1
|
||||
|
||||
def generic_visit(self, node: ast.AST) -> None:
|
||||
# Recurse into function definitions
|
||||
if self.depth < len(self.path):
|
||||
return self.descend_in_function_def(node)
|
||||
elif not self.in_function:
|
||||
return
|
||||
elif isinstance(node, ast.Global):
|
||||
for name in node.names:
|
||||
if name in self.magic_globals:
|
||||
self.references_to_globals.append((name, node.lineno))
|
||||
elif isinstance(node, ast.Assign):
|
||||
# visit the rhs before lhs
|
||||
super().visit(node.value)
|
||||
for target in node.targets:
|
||||
super().visit(target)
|
||||
elif isinstance(node, ast.Name) and node.id in self.magic_globals:
|
||||
if isinstance(node.ctx, ast.Load) and node.id not in self.locals:
|
||||
self.references_to_globals.append((node.id, node.lineno))
|
||||
elif isinstance(node.ctx, ast.Store):
|
||||
self.locals.add(node.id)
|
||||
else:
|
||||
super().generic_visit(node)
|
||||
|
||||
|
||||
@package_properties
|
||||
def _uses_deprecated_globals(pkgs, error_cls):
|
||||
"""Ensure that packages do not use deprecated globals"""
|
||||
errors = []
|
||||
|
||||
for pkg_name in pkgs:
|
||||
# some packages scheduled to be removed in v0.23 are not worth fixing.
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if all(v.get("deprecated", False) for v in pkg_cls.versions.values()):
|
||||
continue
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"Package '{pkg_name}' uses deprecated globals",
|
||||
[
|
||||
f"{file}:{line} references '{name}'"
|
||||
for name, line in visitor.references_to_globals
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,6 @@
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_file_in_path_or_raise,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
@@ -20,7 +19,6 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_file_in_path_or_raise",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
@@ -29,7 +28,7 @@
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
try:
|
||||
importlib.import_module(module)
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
@@ -1,154 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap concrete specs for clingo
|
||||
|
||||
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
|
||||
we need to rely on another mechanism to get a concrete spec that fits the current host.
|
||||
|
||||
This module contains the logic to get a concrete spec for clingo, starting from a prototype
|
||||
JSON file for a similar platform.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
if str(self.host_platform) == "linux":
|
||||
self.host_libc = self.libc_external_spec()
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
compiler_name = "apple-clang"
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
self, configuration: "spack.config.Configuration"
|
||||
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
|
||||
packages_yaml = configuration.get("packages")
|
||||
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
|
||||
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
|
||||
for pkg_name in ["cmake", "bison"]:
|
||||
if pkg_name not in packages_yaml:
|
||||
continue
|
||||
|
||||
candidates = packages_yaml[pkg_name].get("externals", [])
|
||||
for candidate in candidates:
|
||||
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
selected[pkg_name] = self._external_spec(s)
|
||||
break
|
||||
return selected["cmake"], selected["bison"]
|
||||
|
||||
def prototype_path(self) -> pathlib.Path:
|
||||
"""Path to a prototype concrete specfile for clingo"""
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
|
||||
if str(self.host_platform) == "linux":
|
||||
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
|
||||
if not result.exists():
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
|
||||
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
|
||||
|
||||
elif not result.exists():
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
return result
|
||||
|
||||
def concretize(self) -> "spack.spec.Spec":
|
||||
# Read the prototype and mark it NOT concrete
|
||||
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
|
||||
s._mark_concrete(False)
|
||||
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
edge.spec = self.host_python
|
||||
|
||||
if edge.spec.name == "bison" and self.external_bison:
|
||||
edge.spec = self.external_bison
|
||||
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
s._finalize_concretization()
|
||||
|
||||
# Work around the fact that the installer calls Spec.dependents() and
|
||||
# we modified edges inconsistently
|
||||
return s.copy()
|
||||
|
||||
def python_external_spec(self) -> "spack.spec.Spec":
|
||||
"""Python external spec corresponding to the current running interpreter"""
|
||||
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
return spack.spec.parse_with_version_concrete(initial_spec)
|
@@ -143,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -152,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.use_platform(
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
# Default configuration scopes excluding command line
|
||||
|
@@ -54,7 +54,6 @@
|
||||
import spack.version
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -269,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
@@ -302,7 +303,14 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
@@ -472,8 +480,7 @@ def ensure_clingo_importable_or_raise() -> None:
|
||||
|
||||
def gnupg_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
|
||||
return _root_spec(f"{root_spec_name}@2.3:")
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise() -> None:
|
||||
@@ -483,19 +490,6 @@ def ensure_gpg_in_path_or_raise() -> None:
|
||||
)
|
||||
|
||||
|
||||
def file_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap file"""
|
||||
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||
return _root_spec(root_spec_name)
|
||||
|
||||
|
||||
def ensure_file_in_path_or_raise() -> None:
|
||||
"""Ensure file is in the PATH or raise"""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["file"], abstract_spec=file_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
@@ -579,15 +573,14 @@ def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
elif sys.platform == "win32":
|
||||
ensure_file_in_path_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.load()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
|
@@ -457,12 +457,9 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
else:
|
||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -1553,21 +1550,21 @@ class ModuleChangePropagator:
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
def __init__(self, package):
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in package.__class__.__mro__:
|
||||
module = getattr(cls, "module", None)
|
||||
for cls in inspect.getmro(type(package)):
|
||||
module = cls.module
|
||||
|
||||
if module is None or module is spack.package_base:
|
||||
break
|
||||
|
||||
if module is self.current_module:
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
break
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
@@ -548,12 +549,13 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
m = inspect.getmodule(self.pkg)
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
m.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
@@ -577,9 +579,7 @@ def set_configure_or_die(self):
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
@@ -596,7 +596,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
inspect.getmodule(self.pkg).configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
@@ -604,12 +604,12 @@ def build(self, pkg, spec, prefix):
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
inspect.getmodule(self.pkg).make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -70,7 +72,9 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
inspect.getmodule(pkg).cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.build_args
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
@@ -82,4 +86,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg.module.cargo("test", *self.check_args)
|
||||
inspect.getmodule(self.pkg).cargo("test", *self.check_args)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
@@ -107,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
@@ -543,24 +539,24 @@ def cmake(self, pkg, spec, prefix):
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
inspect.getmodule(self.pkg).cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
elif self.generator == "Ninja":
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -3,9 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
from typing import Iterable, List
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
@@ -47,7 +44,6 @@ class CudaPackage(PackageBase):
|
||||
"87",
|
||||
"89",
|
||||
"90",
|
||||
"90a",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -74,27 +70,6 @@ def cuda_flags(arch_list):
|
||||
for s in arch_list
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
"""Adds a decimal place to each CUDA arch.
|
||||
|
||||
>>> compute_capabilities(['90', '90a'])
|
||||
['9.0', '9.0a']
|
||||
|
||||
Args:
|
||||
arch_list: A list of integer strings, optionally followed by a suffix.
|
||||
|
||||
Returns:
|
||||
A list of float strings, optionally followed by a suffix
|
||||
"""
|
||||
pattern = re.compile(r"(\d+)")
|
||||
capabilities = []
|
||||
for arch in arch_list:
|
||||
_, number, letter = re.split(pattern, arch)
|
||||
number = "{0:.1f}".format(float(number) / 10.0)
|
||||
capabilities.append(number + letter)
|
||||
return capabilities
|
||||
|
||||
depends_on("cuda", when="+cuda")
|
||||
|
||||
# CUDA version vs Architecture
|
||||
@@ -163,7 +138,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -171,7 +146,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -80,7 +82,7 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
inspect.getmodule(pkg).go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install built binaries into prefix bin."""
|
||||
@@ -93,4 +95,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg.module.go("test", *self.check_args)
|
||||
inspect.getmodule(self.pkg).go("test", *self.check_args)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -102,12 +103,12 @@ def edit(self, pkg, spec, prefix):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
@@ -194,19 +195,19 @@ def meson(self, pkg, spec, prefix):
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
inspect.getmodule(self.pkg).meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
inspect.getmodule(self.pkg).ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -103,7 +104,7 @@ def msbuild_install_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
*self.std_msbuild_args,
|
||||
*self.msbuild_args(),
|
||||
self.define_targets(*self.build_targets),
|
||||
@@ -113,6 +114,6 @@ def install(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||
)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -131,7 +132,9 @@ def build(self, pkg, spec, prefix):
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
@@ -143,4 +146,6 @@ def install(self, pkg, spec, prefix):
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
@@ -45,7 +47,7 @@ class OctaveBuilder(BaseBuilder):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
inspect.getmodule(self.pkg).octave(
|
||||
"--quiet",
|
||||
"--norc",
|
||||
"--built-in-docstrings-file=/dev/null",
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common utilities for managing intel oneapi packages."""
|
||||
import getpass
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
@@ -12,7 +13,6 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
|
||||
# with other install depends on the userid. For root, we
|
||||
# delete the installercache before and after install. For
|
||||
# non root we redefine the HOME environment variable.
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
bash = Executable("bash")
|
||||
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
|
||||
self.prefix,
|
||||
)
|
||||
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
@@ -133,7 +134,7 @@ def build_method(self):
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = self.pkg.module.make
|
||||
build_executable = inspect.getmodule(self.pkg).make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
@@ -157,7 +158,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options = ["Build.PL", "--install_base", prefix]
|
||||
options += self.configure_args()
|
||||
|
||||
pkg.module.perl(*options)
|
||||
inspect.getmodule(self.pkg).perl(*options)
|
||||
|
||||
# It is possible that the shebang in the Build script that is created from
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
@@ -16,7 +17,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -119,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def bindir(self) -> str:
|
||||
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
|
||||
windows = self.spec.satisfies("platform=windows")
|
||||
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
|
||||
|
||||
def view_file_conflicts(self, view, merge_map):
|
||||
"""Report all file conflicts, excepting special cases for python.
|
||||
Specifically, this does not report errors for duplicate
|
||||
@@ -227,7 +222,7 @@ def test_imports(self) -> None:
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = self.module.python
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -314,9 +309,9 @@ def get_external_python_for_prefix(self):
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
spec
|
||||
for spec in python_externals_detection.get("python", [])
|
||||
if spec.external_path == self.spec.external_path
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
@@ -359,10 +354,18 @@ def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
def urls(cls) -> Optional[List[str]]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
urls = [f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"]
|
||||
assert cls.pypi.count("/") == 1, "PyPI class attribute must include a single slash"
|
||||
name, file = cls.pypi.split("/")
|
||||
name_dash_count = name.count("-")
|
||||
if name_dash_count > 0:
|
||||
# replace all but last dash with underscores for pypi.org listing changes
|
||||
pypi = "/".join([name, file.replace("-", "_", name_dash_count)])
|
||||
urls.append(f"https://files.pythonhosted.org/packages/source/{pypi[0]}/{pypi}")
|
||||
return urls
|
||||
return [None]
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -64,17 +66,17 @@ def qmake_args(self):
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
inspect.getmodule(self.pkg).make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
inspect.getmodule(self.pkg).make("install")
|
||||
|
||||
def check(self):
|
||||
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||
|
@@ -2,10 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@@ -37,7 +37,6 @@ def configure_vars(self):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Installs an R package."""
|
||||
mkdirp(pkg.module.r_lib_dir)
|
||||
|
||||
config_args = self.configure_args()
|
||||
config_vars = self.configure_vars()
|
||||
@@ -45,14 +44,14 @@ def install(self, pkg, spec, prefix):
|
||||
args = ["--vanilla", "CMD", "INSTALL"]
|
||||
|
||||
if config_args:
|
||||
args.append(f"--configure-args={' '.join(config_args)}")
|
||||
args.append("--configure-args={0}".format(" ".join(config_args)))
|
||||
|
||||
if config_vars:
|
||||
args.append(f"--configure-vars={' '.join(config_vars)}")
|
||||
args.append("--configure-vars={0}".format(" ".join(config_vars)))
|
||||
|
||||
args.extend([f"--library={pkg.module.r_lib_dir}", self.stage.source_path])
|
||||
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
|
||||
|
||||
pkg.module.R(*args)
|
||||
inspect.getmodule(self.pkg).R(*args)
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
@@ -81,21 +80,27 @@ class RPackage(Package):
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
return "https://cloud.r-project.org/package=" + cls.cran
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return "https://bioconductor.org/packages/" + cls.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return (
|
||||
"https://cloud.r-project.org/src/contrib/"
|
||||
+ cls.cran
|
||||
+ "_"
|
||||
+ str(list(cls.versions)[0])
|
||||
+ ".tar.gz"
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||
return "https://git.bioconductor.org/packages/" + self.bioc
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
@@ -51,10 +52,10 @@ def build(self, pkg, spec, prefix):
|
||||
gemspecs = glob.glob("*.gemspec")
|
||||
rakefiles = glob.glob("Rakefile")
|
||||
if gemspecs:
|
||||
pkg.module.gem("build", "--norc", gemspecs[0])
|
||||
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
|
||||
elif rakefiles:
|
||||
jobs = pkg.module.make_jobs
|
||||
pkg.module.rake("package", "-j{0}".format(jobs))
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
|
||||
else:
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
@@ -69,6 +70,6 @@ def install(self, pkg, spec, prefix):
|
||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||
# environement, and Gems required to build native extensions will
|
||||
# not be found. Those extensions are built during `gem install`.
|
||||
pkg.module.gem(
|
||||
inspect.getmodule(self.pkg).gem(
|
||||
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
||||
)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on
|
||||
@@ -61,7 +63,8 @@ def build_args(self, spec, prefix):
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
@@ -69,7 +72,9 @@ def install_args(self, spec, prefix):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
|
||||
def build_test(self):
|
||||
"""Run unit tests after build.
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -85,13 +86,14 @@ def import_modules(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
@@ -99,7 +101,7 @@ def test_imports(self):
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
):
|
||||
self.python("-c", "import {0}".format(module))
|
||||
python("-c", "import {0}".format(module))
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
@@ -134,7 +136,7 @@ def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", pkg.module.python_platlib]
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
@@ -153,7 +155,7 @@ def build(self, pkg, spec, prefix):
|
||||
args = self.build_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make(*args)
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -164,7 +166,7 @@ def install(self, pkg, spec, prefix):
|
||||
args = self.install_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install", *args)
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -88,11 +90,11 @@ def build_directory(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
inspect.getmodule(self.pkg).python(*args, **kwargs)
|
||||
|
||||
def waf(self, *args, **kwargs):
|
||||
"""Runs the waf ``Executable``."""
|
||||
jobs = self.pkg.module.make_jobs
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
@@ -6,12 +6,12 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
import inspect
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.build_environment
|
||||
import spack.multimethod
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
@@ -96,10 +96,11 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
"""
|
||||
package_module = inspect.getmodule(pkg)
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
builder_cls = getattr(package_module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
|
||||
@@ -294,11 +295,7 @@ def _decorator(fn):
|
||||
return _decorator
|
||||
|
||||
|
||||
class BuilderMeta(
|
||||
PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
|
||||
pass
|
||||
|
||||
|
||||
|
@@ -9,11 +9,11 @@
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.file_cache
|
||||
import spack.util.path
|
||||
@@ -74,6 +74,23 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def symlink(self, mirror_ref):
|
||||
"""Symlink a human readible path in our mirror to the actual
|
||||
storage location."""
|
||||
|
||||
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
|
||||
storage_path = os.path.join(self.root, mirror_ref.storage_path)
|
||||
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
|
||||
|
||||
if not os.path.exists(cosmetic_path):
|
||||
if os.path.lexists(cosmetic_path):
|
||||
# In this case the link itself exists but it is broken: remove
|
||||
# it and recreate it (in order to fix any symlinks broken prior
|
||||
# to https://github.com/spack/spack/pull/13908)
|
||||
os.unlink(cosmetic_path)
|
||||
mkdirp(os.path.dirname(cosmetic_path))
|
||||
symlink(relative_dst, cosmetic_path)
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
|
@@ -38,7 +38,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -1108,10 +1107,9 @@ def main_script_replacements(cmd):
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
elif cdash_config:
|
||||
# warn only if there was actually a CDash configuration.
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
||||
service_job_retries = {
|
||||
@@ -1372,6 +1370,15 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
spack.mirror.Mirror.from_url(mirror_url).push_url,
|
||||
bindist.PushOptions(force=True, unsigned=not sign_binaries),
|
||||
)
|
||||
|
||||
|
||||
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
|
||||
"""Push one or more binary packages to the mirror.
|
||||
|
||||
@@ -1382,15 +1389,20 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
sign_binaries: If True, spack will attempt to sign binary package before pushing.
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
mirror = spack.mirror.Mirror.from_url(mirror_url)
|
||||
try:
|
||||
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
|
||||
uploader.push_or_raise([spec])
|
||||
_push_to_build_cache(spec, sign_binaries, mirror_url)
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(f"Problem writing to {mirror_url}: {e}")
|
||||
tty.error(str(e))
|
||||
return False
|
||||
except Exception as e:
|
||||
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
|
||||
# exception instead of parsing str(e)...
|
||||
msg = str(e)
|
||||
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
@@ -1436,6 +1448,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
if not job_spec:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
@@ -2067,7 +2083,7 @@ def read_broken_spec(broken_spec_url):
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except web_util.SpackWebError:
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -115,8 +114,8 @@ def get_module(cmd_name):
|
||||
|
||||
try:
|
||||
# Try to import the command from the built-in directory
|
||||
module_name = f"{__name__}.{pname}"
|
||||
module = importlib.import_module(module_name)
|
||||
module_name = "%s.%s" % (__name__, pname)
|
||||
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
|
||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||
except ImportError:
|
||||
module = spack.extensions.get_module(cmd_name)
|
||||
|
@@ -115,11 +115,15 @@ def audit(parser, args):
|
||||
def _process_reports(reports):
|
||||
for check, errors in reports:
|
||||
if errors:
|
||||
status = f"{len(errors)} issue{'' if len(errors) == 1 else 's'} found"
|
||||
print(cl.colorize(f"{check}: @*r{{{status}}}"))
|
||||
numdigits = len(str(len(errors)))
|
||||
msg = "{0}: {1} issue{2} found".format(
|
||||
check, len(errors), "" if len(errors) == 1 else "s"
|
||||
)
|
||||
header = "@*b{" + msg + "}"
|
||||
print(cl.colorize(header))
|
||||
for idx, error in enumerate(errors):
|
||||
print(f"{idx + 1:>{numdigits}}. {error}")
|
||||
print(str(idx + 1) + ". " + str(error))
|
||||
raise SystemExit(1)
|
||||
else:
|
||||
print(cl.colorize(f"{check}: @*g{{passed}}"))
|
||||
msg = "{0}: 0 issues found.".format(check)
|
||||
header = "@*b{" + msg + "}"
|
||||
print(cl.colorize(header))
|
||||
|
@@ -3,24 +3,28 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list, stable_partition
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -31,12 +35,28 @@
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -92,17 +112,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
with_or_without_build_deps = push.add_mutually_exclusive_group()
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--with-build-dependencies",
|
||||
action="store_true",
|
||||
help="include build dependencies in the buildcache",
|
||||
)
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--without-build-dependencies",
|
||||
action="store_true",
|
||||
help="exclude build dependencies from the buildcache",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
@@ -320,6 +329,39 @@ def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
class NoPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
|
||||
|
||||
|
||||
def _make_pool() -> MaybePool:
|
||||
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
|
||||
That leaves only forking"""
|
||||
if multiprocessing.get_start_method() == "fork":
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
else:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
@@ -339,45 +381,6 @@ def _skip_no_redistribute_for_public(specs):
|
||||
return remaining_specs
|
||||
|
||||
|
||||
class PackagesAreNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a list of specs is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
|
||||
)
|
||||
|
||||
|
||||
class PackageNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
|
||||
def _specs_to_be_packaged(
|
||||
requested: List[Spec], things_to_install: str, build_deps: bool
|
||||
) -> List[Spec]:
|
||||
"""Collect all non-external with or without roots and dependencies"""
|
||||
if "dependencies" not in things_to_install:
|
||||
deptype = dt.NONE
|
||||
elif build_deps:
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.RUN | dt.LINK | dt.TEST
|
||||
specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
requested,
|
||||
root="package" in things_to_install,
|
||||
deptype=deptype,
|
||||
order="breadth",
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
specs.reverse()
|
||||
return specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -391,8 +394,13 @@ def push_fn(args):
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
mirror = args.mirror
|
||||
assert isinstance(mirror, spack.mirror.Mirror)
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
target_image = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
target_image = None
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
@@ -403,52 +411,92 @@ def push_fn(args):
|
||||
unsigned = not (args.key or args.signed)
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if mirror.push_url.startswith("oci://") and not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
unsigned = True
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
|
||||
# Select a signing key, or None if unsigned.
|
||||
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
|
||||
|
||||
specs = _specs_to_be_packaged(
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
roots,
|
||||
things_to_install=args.things_to_install,
|
||||
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
# Pushing not installed specs is an error. Either fail fast or populate the error list and
|
||||
# push installed package in best effort mode.
|
||||
failed: List[Tuple[Spec, BaseException]] = []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if any(not s.installed for s in specs):
|
||||
specs, not_installed = stable_partition(specs, lambda s: s.installed)
|
||||
if args.fail_fast:
|
||||
raise PackagesAreNotInstalledError(not_installed)
|
||||
else:
|
||||
failed.extend(
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
failed = []
|
||||
|
||||
# TODO: unify this logic in the future.
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
if args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
with bindist.make_uploader(
|
||||
mirror=mirror,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
base_image=args.base_image,
|
||||
) as uploader:
|
||||
skipped, upload_errors = uploader.push(specs=specs)
|
||||
failed.extend(upload_errors)
|
||||
if not upload_errors and args.tag:
|
||||
uploader.tag(args.tag, roots)
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -460,7 +508,7 @@ def push_fn(args):
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
" {}\n"
|
||||
" Use --force to overwrite them.".format(
|
||||
len(skipped), ", ".join(elide_list([_format_spec(s) for s in skipped], 5))
|
||||
len(skipped), ", ".join(elide_list(skipped, 5))
|
||||
)
|
||||
)
|
||||
|
||||
@@ -471,16 +519,390 @@ def push_fn(args):
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list(
|
||||
[
|
||||
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
|
||||
for spec, e in failed
|
||||
],
|
||||
5,
|
||||
)
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
|
||||
# This is because Singularity / Apptainer is very strict about not mixing them.
|
||||
base_manifest_mediaType = base_manifest.get(
|
||||
"mediaType", "application/vnd.oci.image.manifest.v1+json"
|
||||
)
|
||||
use_docker_format = (
|
||||
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
|
||||
)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
manifest = {
|
||||
"mediaType": base_manifest_mediaType,
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": (
|
||||
"application/vnd.docker.image.rootfs.diff.tar.gzip"
|
||||
if use_docker_format
|
||||
else "application/vnd.oci.image.layer.v1.tar+gzip"
|
||||
),
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
if not use_docker_format and annotations:
|
||||
manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref, manifest=manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: MaybePool,
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -760,15 +1182,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(image_ref, tmpdir, executor)
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist._url_generate_package_index(url, tmpdir)
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
@@ -776,8 +1197,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
@@ -19,7 +18,6 @@
|
||||
import spack.cmd
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
from spack.main import section_descriptions
|
||||
|
||||
description = "list available spack commands"
|
||||
@@ -141,7 +139,7 @@ def usage(self, usage: str) -> str:
|
||||
|
||||
cmd = self.parser.prog.replace(" ", "-")
|
||||
if cmd in self.documented:
|
||||
string = f"{string}\n:ref:`More documentation <cmd-{cmd}>`\n"
|
||||
string += "\n:ref:`More documentation <cmd-{0}>`\n".format(cmd)
|
||||
|
||||
return string
|
||||
|
||||
@@ -251,27 +249,33 @@ def body(
|
||||
Function body.
|
||||
"""
|
||||
if positionals:
|
||||
return f"""
|
||||
return """
|
||||
if $list_options
|
||||
then
|
||||
{self.optionals(optionals)}
|
||||
{0}
|
||||
else
|
||||
{self.positionals(positionals)}
|
||||
{1}
|
||||
fi
|
||||
"""
|
||||
""".format(
|
||||
self.optionals(optionals), self.positionals(positionals)
|
||||
)
|
||||
elif subcommands:
|
||||
return f"""
|
||||
return """
|
||||
if $list_options
|
||||
then
|
||||
{self.optionals(optionals)}
|
||||
{0}
|
||||
else
|
||||
{self.subcommands(subcommands)}
|
||||
{1}
|
||||
fi
|
||||
"""
|
||||
""".format(
|
||||
self.optionals(optionals), self.subcommands(subcommands)
|
||||
)
|
||||
else:
|
||||
return f"""
|
||||
{self.optionals(optionals)}
|
||||
"""
|
||||
return """
|
||||
{0}
|
||||
""".format(
|
||||
self.optionals(optionals)
|
||||
)
|
||||
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
@@ -300,7 +304,7 @@ def optionals(self, optionals: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return f'SPACK_COMPREPLY="{" ".join(optionals)}"'
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
@@ -311,7 +315,7 @@ def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return f'SPACK_COMPREPLY="{" ".join(subcommands)}"'
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
|
||||
|
||||
# Map argument destination names to their complete commands
|
||||
@@ -391,7 +395,7 @@ def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
|
||||
if subcmd.startswith(prog_key) and re.match(f"^{pos_key}$", dest):
|
||||
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
|
||||
return value
|
||||
return None
|
||||
|
||||
@@ -423,6 +427,24 @@ def format(self, cmd: Command) -> str:
|
||||
+ self.complete(cmd.prog, positionals, optionals, subcommands)
|
||||
)
|
||||
|
||||
def _quote(self, string: str) -> str:
|
||||
"""Quote string and escape special characters if necessary.
|
||||
|
||||
Args:
|
||||
string: Input string.
|
||||
|
||||
Returns:
|
||||
Quoted string.
|
||||
"""
|
||||
# Goal here is to match fish_indent behavior
|
||||
|
||||
# Strings without spaces (or other special characters) do not need to be escaped
|
||||
if not any([sub in string for sub in [" ", "'", '"']]):
|
||||
return string
|
||||
|
||||
string = string.replace("'", r"\'")
|
||||
return f"'{string}'"
|
||||
|
||||
def optspecs(
|
||||
self,
|
||||
prog: str,
|
||||
@@ -441,7 +463,7 @@ def optspecs(
|
||||
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
|
||||
|
||||
if optionals is None:
|
||||
return f"set -g {optspec_var}\n"
|
||||
return "set -g %s\n" % optspec_var
|
||||
|
||||
# Build optspec by iterating over options
|
||||
args = []
|
||||
@@ -468,11 +490,11 @@ def optspecs(
|
||||
long = [f[2:] for f in flags if f.startswith("--")]
|
||||
|
||||
while len(short) > 0 and len(long) > 0:
|
||||
arg = f"{short.pop()}/{long.pop()}{required}"
|
||||
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
|
||||
while len(short) > 0:
|
||||
arg = f"{short.pop()}/{required}"
|
||||
arg = "%s/%s" % (short.pop(), required)
|
||||
while len(long) > 0:
|
||||
arg = f"{long.pop()}{required}"
|
||||
arg = "%s%s" % (long.pop(), required)
|
||||
|
||||
args.append(arg)
|
||||
|
||||
@@ -481,7 +503,7 @@ def optspecs(
|
||||
# indicate that such subcommand exists.
|
||||
args = " ".join(args)
|
||||
|
||||
return f"set -g {optspec_var} {args}\n"
|
||||
return "set -g %s %s\n" % (optspec_var, args)
|
||||
|
||||
@staticmethod
|
||||
def complete_head(
|
||||
@@ -502,14 +524,12 @@ def complete_head(
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
if index is None:
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command {subcmd}'"
|
||||
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
|
||||
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
|
||||
return (
|
||||
f"complete -c {s[0]} -n '__fish_spack_using_command_pos_remainder "
|
||||
f"{index} {subcmd}'"
|
||||
)
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
|
||||
else:
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command_pos {index} {subcmd}'"
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
|
||||
return head % (s[0], index, subcmd)
|
||||
|
||||
def complete(
|
||||
self,
|
||||
@@ -577,18 +597,25 @@ def positionals(
|
||||
|
||||
if choices is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(f"{head} -f -a {shlex.quote(' '.join(choices))}")
|
||||
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, args)
|
||||
if value is not None:
|
||||
commands.append(f"{head} {value}")
|
||||
commands.append(head + " " + value)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def prog_comment(self, prog: str) -> str:
|
||||
"""Return a comment line for the command."""
|
||||
return f"\n# {prog}\n"
|
||||
"""Return a comment line for the command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Comment line.
|
||||
"""
|
||||
return "\n# %s\n" % prog
|
||||
|
||||
def optionals(
|
||||
self,
|
||||
@@ -631,28 +658,28 @@ def optionals(
|
||||
for f in flags:
|
||||
if f.startswith("--"):
|
||||
long = f[2:]
|
||||
prefix = f"{prefix} -l {long}"
|
||||
prefix += " -l %s" % long
|
||||
elif f.startswith("-"):
|
||||
short = f[1:]
|
||||
assert len(short) == 1
|
||||
prefix = f"{prefix} -s {short}"
|
||||
prefix += " -s %s" % short
|
||||
|
||||
# Check if option require argument.
|
||||
# Currently multi-argument options are not supported, so we treat it like one argument.
|
||||
if nargs != 0:
|
||||
prefix = f"{prefix} -r"
|
||||
prefix += " -r"
|
||||
|
||||
if dest is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(f"{prefix} -f -a {shlex.quote(' '.join(dest))}")
|
||||
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, dest)
|
||||
if value is not None:
|
||||
commands.append(f"{prefix} {value}")
|
||||
commands.append(prefix + " " + value)
|
||||
|
||||
if help:
|
||||
commands.append(f"{prefix} -d {shlex.quote(help)}")
|
||||
commands.append(prefix + " -d %s" % self._quote(help))
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
@@ -670,11 +697,11 @@ def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, st
|
||||
head = self.complete_head(prog, 0)
|
||||
|
||||
for _, subcommand, help in subcommands:
|
||||
command = f"{head} -f -a {shlex.quote(subcommand)}"
|
||||
command = head + " -f -a %s" % self._quote(subcommand)
|
||||
|
||||
if help is not None and len(help) > 0:
|
||||
help = help.split("\n")[0]
|
||||
command = f"{command} -d {shlex.quote(help)}"
|
||||
command += " -d %s" % self._quote(help)
|
||||
|
||||
commands.append(command)
|
||||
|
||||
@@ -720,7 +747,7 @@ def rst_index(out: IO) -> None:
|
||||
|
||||
for i, cmd in enumerate(sorted(commands)):
|
||||
description = description.capitalize() if i == 0 else ""
|
||||
ref = f":ref:`{cmd} <spack-{cmd}>`"
|
||||
ref = ":ref:`%s <spack-%s>`" % (cmd, cmd)
|
||||
comma = "," if i != len(commands) - 1 else ""
|
||||
bar = "| " if i % 8 == 0 else " "
|
||||
out.write(line % (description, bar + ref + comma))
|
||||
@@ -831,10 +858,10 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
|
||||
# check header first so we don't open out files unnecessarily
|
||||
if args.header and not os.path.exists(args.header):
|
||||
tty.die(f"No such file: '{args.header}'")
|
||||
tty.die("No such file: '%s'" % args.header)
|
||||
|
||||
if args.update:
|
||||
tty.msg(f"Updating file: {args.update}")
|
||||
tty.msg("Updating file: %s" % args.update)
|
||||
with open(args.update, "w") as f:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
@@ -50,7 +50,6 @@ def setup_parser(subparser):
|
||||
default=lambda: spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_common_arguments(find_parser, ["jobs"])
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
|
||||
@@ -79,21 +78,25 @@ def setup_parser(subparser):
|
||||
def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
|
||||
"""
|
||||
# None signals spack.compiler.find_compilers to use its default logic
|
||||
paths = args.add_paths or None
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
|
||||
# Below scope=None because we want new compilers that don't appear
|
||||
# in any other configuration.
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
|
||||
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
|
||||
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
|
||||
colify(reversed(compiler_strs), indent=4)
|
||||
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -15,15 +14,9 @@
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
find_versions_of_archive,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
@@ -96,20 +89,14 @@ class BundlePackageTemplate:
|
||||
url_def = " # There is no URL since there is no code to download."
|
||||
body_def = " # There is no need for install() since there is no code."
|
||||
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
def __init__(self, name, versions):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
def write(self, pkg_path):
|
||||
"""Writes the new package file."""
|
||||
|
||||
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
|
||||
if all_deps and self.dependencies:
|
||||
all_deps.append("")
|
||||
all_deps.append(self.dependencies)
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
pkg_file.write(
|
||||
@@ -119,7 +106,7 @@ def write(self, pkg_path):
|
||||
base_class_name=self.base_class_name,
|
||||
url_def=self.url_def,
|
||||
versions=self.versions,
|
||||
dependencies="\n".join(all_deps),
|
||||
dependencies=self.dependencies,
|
||||
body_def=self.body_def,
|
||||
)
|
||||
)
|
||||
@@ -138,8 +125,8 @@ def install(self, spec, prefix):
|
||||
|
||||
url_line = ' url = "{url}"'
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
super().__init__(name, versions, languages)
|
||||
def __init__(self, name, url, versions):
|
||||
super().__init__(name, versions)
|
||||
|
||||
self.url_def = self.url_line.format(url=url)
|
||||
|
||||
@@ -227,13 +214,13 @@ def luarocks_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith("lua-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = "lua-{0}".format(name)
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
@@ -334,14 +321,14 @@ class RacketPackageTemplate(PackageTemplate):
|
||||
# subdirectory = None
|
||||
"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
|
||||
if not name.startswith("rkt-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = "rkt-{0}".format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
@@ -374,7 +361,7 @@ def config_settings(self, spec, prefix):
|
||||
settings = {}
|
||||
return settings"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
|
||||
if not name.startswith("py-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
@@ -428,7 +415,7 @@ def __init__(self, name, url, versions, languages: List[str]):
|
||||
+ self.url_line
|
||||
)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class RPackageTemplate(PackageTemplate):
|
||||
@@ -447,7 +434,7 @@ def configure_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
|
||||
if not name.startswith("r-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
@@ -467,7 +454,7 @@ def __init__(self, name, url, versions, languages: List[str]):
|
||||
if bioc:
|
||||
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlmakePackageTemplate(PackageTemplate):
|
||||
@@ -487,14 +474,14 @@ def configure_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
|
||||
if not name.startswith("perl-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
||||
name = "perl-{0}".format(name)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
||||
@@ -519,7 +506,7 @@ class OctavePackageTemplate(PackageTemplate):
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on("octave-foo", type=("build", "run"))"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
# If the user provided `--name octave-splines`, don't rename it
|
||||
# octave-octave-splines
|
||||
if not name.startswith("octave-"):
|
||||
@@ -527,7 +514,7 @@ def __init__(self, name, url, versions, languages: List[str]):
|
||||
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
||||
name = "octave-{0}".format(name)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class RubyPackageTemplate(PackageTemplate):
|
||||
@@ -547,7 +534,7 @@ def build(self, spec, prefix):
|
||||
# FIXME: If not needed delete this function
|
||||
pass"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
# If the user provided `--name ruby-numpy`, don't rename it
|
||||
# ruby-ruby-numpy
|
||||
if not name.startswith("ruby-"):
|
||||
@@ -555,7 +542,7 @@ def __init__(self, name, url, versions, languages: List[str]):
|
||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||
name = "ruby-{0}".format(name)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class MakefilePackageTemplate(PackageTemplate):
|
||||
@@ -593,14 +580,14 @@ def configure_args(self, spec, prefix):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
|
||||
if not name.startswith("py-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||
name = "py-{0}".format(name)
|
||||
|
||||
super().__init__(name, url, versions, languages)
|
||||
super().__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
templates = {
|
||||
@@ -671,48 +658,8 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
#: C file extensions
|
||||
C_EXT = {".c"}
|
||||
|
||||
#: C++ file extensions
|
||||
CXX_EXT = {
|
||||
".C",
|
||||
".c++",
|
||||
".cc",
|
||||
".ccm",
|
||||
".cpp",
|
||||
".CPP",
|
||||
".cxx",
|
||||
".h++",
|
||||
".hh",
|
||||
".hpp",
|
||||
".hxx",
|
||||
".inl",
|
||||
".ipp",
|
||||
".ixx",
|
||||
".tcc",
|
||||
".tpp",
|
||||
}
|
||||
|
||||
#: Fortran file extensions
|
||||
FORTRAN_EXT = {
|
||||
".f77",
|
||||
".F77",
|
||||
".f90",
|
||||
".F90",
|
||||
".f95",
|
||||
".F95",
|
||||
".f",
|
||||
".F",
|
||||
".for",
|
||||
".FOR",
|
||||
".ftn",
|
||||
".FTN",
|
||||
}
|
||||
|
||||
|
||||
class BuildSystemAndLanguageGuesser:
|
||||
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
|
||||
class BuildSystemGuesser:
|
||||
"""An instance of BuildSystemGuesser provides a callable object to be used
|
||||
during ``spack create``. By passing this object to ``spack checksum``, we
|
||||
can take a peek at the fetched tarball and discern the build system it uses
|
||||
"""
|
||||
@@ -720,119 +667,81 @@ class BuildSystemAndLanguageGuesser:
|
||||
def __init__(self):
|
||||
"""Sets the default build system."""
|
||||
self.build_system = "generic"
|
||||
self._c = False
|
||||
self._cxx = False
|
||||
self._fortran = False
|
||||
|
||||
# List of files in the archive ordered by their depth in the directory tree.
|
||||
self._file_entries: List[str] = []
|
||||
|
||||
def __call__(self, archive: str, url: str) -> None:
|
||||
def __call__(self, stage, url):
|
||||
"""Try to guess the type of build system used by a project based on
|
||||
the contents of its archive or the URL it was downloaded from."""
|
||||
|
||||
if url is not None:
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if "downloads.sourceforge.net/octave/" in url:
|
||||
self.build_system = "octave"
|
||||
return
|
||||
if url.endswith(".gem"):
|
||||
self.build_system = "ruby"
|
||||
return
|
||||
if url.endswith(".whl") or ".whl#" in url:
|
||||
self.build_system = "python"
|
||||
return
|
||||
if url.endswith(".rock"):
|
||||
self.build_system = "lua"
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
# NOTE: Order is important here. If a package supports multiple
|
||||
# build systems, we choose the first match in this list.
|
||||
clues = [
|
||||
(r"/CMakeLists\.txt$", "cmake"),
|
||||
(r"/NAMESPACE$", "r"),
|
||||
(r"/Cargo\.toml$", "cargo"),
|
||||
(r"/go\.mod$", "go"),
|
||||
(r"/configure$", "autotools"),
|
||||
(r"/configure\.(in|ac)$", "autoreconf"),
|
||||
(r"/Makefile\.am$", "autoreconf"),
|
||||
(r"/pom\.xml$", "maven"),
|
||||
(r"/SConstruct$", "scons"),
|
||||
(r"/waf$", "waf"),
|
||||
(r"/pyproject.toml", "python"),
|
||||
(r"/setup\.(py|cfg)$", "python"),
|
||||
(r"/WORKSPACE$", "bazel"),
|
||||
(r"/Build\.PL$", "perlbuild"),
|
||||
(r"/Makefile\.PL$", "perlmake"),
|
||||
(r"/.*\.gemspec$", "ruby"),
|
||||
(r"/Rakefile$", "ruby"),
|
||||
(r"/setup\.rb$", "ruby"),
|
||||
(r"/.*\.pro$", "qmake"),
|
||||
(r"/.*\.rockspec$", "lua"),
|
||||
(r"/(GNU)?[Mm]akefile$", "makefile"),
|
||||
(r"/DESCRIPTION$", "octave"),
|
||||
(r"/meson\.build$", "meson"),
|
||||
(r"/configure\.py$", "sip"),
|
||||
]
|
||||
|
||||
# Peek inside the compressed file.
|
||||
if archive.endswith(".zip") or ".zip#" in archive:
|
||||
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
|
||||
try:
|
||||
unzip = which("unzip")
|
||||
assert unzip is not None
|
||||
output = unzip("-lq", archive, output=str)
|
||||
except Exception:
|
||||
output = unzip("-lq", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
else:
|
||||
try:
|
||||
tar = which("tar")
|
||||
assert tar is not None
|
||||
output = tar("tf", archive, output=str)
|
||||
except Exception:
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
self._file_entries[:] = output.splitlines()
|
||||
lines = output.splitlines()
|
||||
|
||||
# Files closest to the root should be considered first when determining build system.
|
||||
self._file_entries.sort(key=lambda p: p.count("/"))
|
||||
|
||||
self._determine_build_system(url)
|
||||
self._determine_language()
|
||||
|
||||
def _determine_build_system(self, url: str) -> None:
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if "downloads.sourceforge.net/octave/" in url:
|
||||
self.build_system = "octave"
|
||||
elif url.endswith(".gem"):
|
||||
self.build_system = "ruby"
|
||||
elif url.endswith(".whl") or ".whl#" in url:
|
||||
self.build_system = "python"
|
||||
elif url.endswith(".rock"):
|
||||
self.build_system = "lua"
|
||||
elif self._file_entries:
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
# NOTE: Order is important here. If a package supports multiple
|
||||
# build systems, we choose the first match in this list.
|
||||
clues = [
|
||||
(re.compile(pattern), build_system)
|
||||
for pattern, build_system in (
|
||||
(r"/CMakeLists\.txt$", "cmake"),
|
||||
(r"/NAMESPACE$", "r"),
|
||||
(r"/Cargo\.toml$", "cargo"),
|
||||
(r"/go\.mod$", "go"),
|
||||
(r"/configure$", "autotools"),
|
||||
(r"/configure\.(in|ac)$", "autoreconf"),
|
||||
(r"/Makefile\.am$", "autoreconf"),
|
||||
(r"/pom\.xml$", "maven"),
|
||||
(r"/SConstruct$", "scons"),
|
||||
(r"/waf$", "waf"),
|
||||
(r"/pyproject.toml", "python"),
|
||||
(r"/setup\.(py|cfg)$", "python"),
|
||||
(r"/WORKSPACE$", "bazel"),
|
||||
(r"/Build\.PL$", "perlbuild"),
|
||||
(r"/Makefile\.PL$", "perlmake"),
|
||||
(r"/.*\.gemspec$", "ruby"),
|
||||
(r"/Rakefile$", "ruby"),
|
||||
(r"/setup\.rb$", "ruby"),
|
||||
(r"/.*\.pro$", "qmake"),
|
||||
(r"/.*\.rockspec$", "lua"),
|
||||
(r"/(GNU)?[Mm]akefile$", "makefile"),
|
||||
(r"/DESCRIPTION$", "octave"),
|
||||
(r"/meson\.build$", "meson"),
|
||||
(r"/configure\.py$", "sip"),
|
||||
)
|
||||
]
|
||||
|
||||
# Determine the build system based on the files contained in the archive.
|
||||
for file in self._file_entries:
|
||||
for pattern, build_system in clues:
|
||||
if pattern.search(file):
|
||||
self.build_system = build_system
|
||||
return
|
||||
|
||||
def _determine_language(self):
|
||||
for entry in self._file_entries:
|
||||
_, ext = os.path.splitext(entry)
|
||||
|
||||
if not self._c and ext in C_EXT:
|
||||
self._c = True
|
||||
elif not self._cxx and ext in CXX_EXT:
|
||||
self._cxx = True
|
||||
elif not self._fortran and ext in FORTRAN_EXT:
|
||||
self._fortran = True
|
||||
|
||||
if self._c and self._cxx and self._fortran:
|
||||
return
|
||||
|
||||
@property
|
||||
def languages(self) -> List[str]:
|
||||
langs: List[str] = []
|
||||
if self._c:
|
||||
langs.append("c")
|
||||
if self._cxx:
|
||||
langs.append("cxx")
|
||||
if self._fortran:
|
||||
langs.append("fortran")
|
||||
return langs
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
for pattern, bs in clues:
|
||||
if any(re.search(pattern, line) for line in lines):
|
||||
self.build_system = bs
|
||||
break
|
||||
|
||||
|
||||
def get_name(name, url):
|
||||
@@ -902,7 +811,7 @@ def get_url(url):
|
||||
def get_versions(args, name):
|
||||
"""Returns a list of versions and hashes for a package.
|
||||
|
||||
Also returns a BuildSystemAndLanguageGuesser object.
|
||||
Also returns a BuildSystemGuesser object.
|
||||
|
||||
Returns default values if no URL is provided.
|
||||
|
||||
@@ -911,7 +820,7 @@ def get_versions(args, name):
|
||||
name (str): The name of the package
|
||||
|
||||
Returns:
|
||||
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
|
||||
tuple: versions and hashes, and a BuildSystemGuesser object
|
||||
"""
|
||||
|
||||
# Default version with hash
|
||||
@@ -925,7 +834,7 @@ def get_versions(args, name):
|
||||
# version("1.2.4")"""
|
||||
|
||||
# Default guesser
|
||||
guesser = BuildSystemAndLanguageGuesser()
|
||||
guesser = BuildSystemGuesser()
|
||||
|
||||
valid_url = True
|
||||
try:
|
||||
@@ -938,7 +847,7 @@ def get_versions(args, name):
|
||||
if args.url is not None and args.template != "bundle" and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = find_versions_of_archive(args.url)
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
|
||||
if url_dict_filtered is None:
|
||||
@@ -965,7 +874,7 @@ def get_versions(args, name):
|
||||
return versions, guesser
|
||||
|
||||
|
||||
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
|
||||
def get_build_system(template, url, guesser):
|
||||
"""Determine the build system template.
|
||||
|
||||
If a template is specified, always use that. Otherwise, if a URL
|
||||
@@ -973,10 +882,11 @@ def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGue
|
||||
build system it uses. Otherwise, use a generic template by default.
|
||||
|
||||
Args:
|
||||
template: ``--template`` argument given to ``spack create``
|
||||
url: ``url`` argument given to ``spack create``
|
||||
guesser: The first_stage_function given to ``spack checksum`` which records the build
|
||||
system it detects
|
||||
template (str): ``--template`` argument given to ``spack create``
|
||||
url (str): ``url`` argument given to ``spack create``
|
||||
args (argparse.Namespace): The arguments given to ``spack create``
|
||||
guesser (BuildSystemGuesser): The first_stage_function given to
|
||||
``spack checksum`` which records the build system it detects
|
||||
|
||||
Returns:
|
||||
str: The name of the build system template to use
|
||||
@@ -1050,7 +960,7 @@ def create(parser, args):
|
||||
build_system = get_build_system(args.template, url, guesser)
|
||||
|
||||
# Create the package template object
|
||||
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
|
||||
constr_args = {"name": name, "versions": versions}
|
||||
package_class = templates[build_system]
|
||||
if package_class != BundlePackageTemplate:
|
||||
constr_args["url"] = url
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
@@ -63,10 +62,9 @@ def create_db_tarball(args):
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
# Currently --transform and -s are not supported by Windows native tar
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
elif sys.platform != "win32":
|
||||
else:
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
@@ -92,6 +90,7 @@ def report(args):
|
||||
print("* **Spack:**", get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
print("* **Platform:**", architecture)
|
||||
print("* **Concretizer:**", spack.config.get("config:concretizer"))
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
|
@@ -14,6 +14,7 @@
|
||||
installation and its deprecator.
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
@@ -75,7 +76,12 @@ def setup_parser(sp):
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
"-l", "--link-type", type=str, default=None, choices=["soft", "hard"], help="(deprecated)"
|
||||
"-l",
|
||||
"--link-type",
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -85,9 +91,6 @@ def setup_parser(sp):
|
||||
|
||||
def deprecate(parser, args):
|
||||
"""Deprecate one spec in favor of another"""
|
||||
if args.link_type is not None:
|
||||
tty.warn("The --link-type option is deprecated and will be removed in a future release.")
|
||||
|
||||
env = ev.active_environment()
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
@@ -141,5 +144,7 @@ def deprecate(parser, args):
|
||||
if not answer:
|
||||
tty.die("Will not deprecate any packages.")
|
||||
|
||||
link_fn = os.link if args.link_type == "hard" else symlink
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, symlink)
|
||||
dcate.package.do_deprecate(dcator, link_fn)
|
||||
|
@@ -10,10 +10,8 @@
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
@@ -64,7 +62,7 @@ def change_fn(section):
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
def _retrieve_develop_source(spec, abspath):
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
@@ -73,13 +71,13 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
source_stage: spack.stage.Stage = package.stage[0]
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
source_stage.default_fetcher_only = True
|
||||
source_stage.disable_mirrors()
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
@@ -468,30 +468,32 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
remove_envs = []
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
if name == env_name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
@@ -504,7 +506,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
for env in read_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
|
@@ -5,12 +5,10 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
@@ -117,7 +115,6 @@ def setup_parser(subparser):
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
@@ -223,10 +220,9 @@ def gpg_publish(args):
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution._url_push_keys(
|
||||
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
|
||||
)
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, regenerate_index=args.rebuild_index
|
||||
)
|
||||
|
||||
|
||||
def gpg(parser, args):
|
||||
|
@@ -48,7 +48,6 @@ def setup_parser(subparser):
|
||||
options = [
|
||||
("--detectable", print_detectable.__doc__),
|
||||
("--maintainers", print_maintainers.__doc__),
|
||||
("--namespace", print_namespace.__doc__),
|
||||
("--no-dependencies", "do not " + print_dependencies.__doc__),
|
||||
("--no-variants", "do not " + print_variants.__doc__),
|
||||
("--no-versions", "do not " + print_versions.__doc__),
|
||||
@@ -190,15 +189,6 @@ def print_maintainers(pkg, args):
|
||||
color.cprint(section_title("Maintainers: ") + mnt)
|
||||
|
||||
|
||||
def print_namespace(pkg, args):
|
||||
"""output package namespace"""
|
||||
|
||||
repo = spack.repo.PATH.get_repo(pkg.namespace)
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Namespace:"))
|
||||
color.cprint(f" @c{{{repo.namespace}}} at {repo.root}")
|
||||
|
||||
|
||||
def print_phases(pkg, args):
|
||||
"""output installation phases"""
|
||||
|
||||
@@ -512,7 +502,7 @@ def print_licenses(pkg, args):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
@@ -532,7 +522,6 @@ def info(parser, args):
|
||||
# Now output optional information in expected order
|
||||
sections = [
|
||||
(args.all or args.maintainers, print_maintainers),
|
||||
(args.all or args.namespace, print_namespace),
|
||||
(args.all or args.detectable, print_detectable),
|
||||
(args.all or args.tags, print_tags),
|
||||
(args.all or not args.no_versions, print_versions),
|
||||
|
@@ -23,6 +23,11 @@ def setup_parser(subparser):
|
||||
output.add_argument(
|
||||
"-s", "--safe", action="store_true", help="only list safe versions of the package"
|
||||
)
|
||||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
)
|
||||
@@ -42,13 +47,17 @@ def versions(parser, args):
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
if args.safe_only:
|
||||
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
|
||||
args.safe = args.safe_only
|
||||
|
||||
if not (args.remote or args.new):
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Safe versions (already checksummed):")
|
||||
|
||||
if not safe_versions:
|
||||
if sys.stdout.isatty():
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.debug("Manually add versions to the package.")
|
||||
else:
|
||||
colify(sorted(safe_versions, reverse=True), indent=2)
|
||||
@@ -74,12 +83,12 @@ def versions(parser, args):
|
||||
if not remote_versions:
|
||||
if sys.stdout.isatty():
|
||||
if not fetched_versions:
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.debug(
|
||||
"Check the list_url and list_depth attributes of "
|
||||
"the package to help Spack find versions."
|
||||
)
|
||||
else:
|
||||
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
|
||||
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
|
||||
else:
|
||||
colify(sorted(remote_versions, reverse=True), indent=2)
|
||||
|
@@ -29,9 +29,6 @@
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
|
||||
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
@@ -703,30 +700,6 @@ def compiler_environment(self):
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
|
||||
def to_dict(self):
|
||||
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
|
||||
flags_dict.update(
|
||||
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
|
||||
)
|
||||
result = {
|
||||
"spec": str(self.spec),
|
||||
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
|
||||
"flags": flags_dict,
|
||||
"operating_system": str(self.operating_system),
|
||||
"target": str(self.target),
|
||||
"modules": self.modules or [],
|
||||
"environment": self.environment or {},
|
||||
"extra_rpaths": self.extra_rpaths or [],
|
||||
}
|
||||
|
||||
if self.enable_implicit_rpaths is not None:
|
||||
result["implicit_rpaths"] = self.enable_implicit_rpaths
|
||||
|
||||
if self.alias:
|
||||
result["alias"] = self.alias
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
|
@@ -6,11 +6,12 @@
|
||||
"""This module contains functions related to finding compilers on the
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import importlib
|
||||
import collections
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -21,15 +22,16 @@
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.operating_systems
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_path_instance_vars = ["cc", "cxx", "f77", "fc"]
|
||||
_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
_other_instance_vars = [
|
||||
"modules",
|
||||
"operating_system",
|
||||
@@ -61,10 +63,6 @@
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
|
||||
def pkg_spec_for_compiler(cspec):
|
||||
"""Return the spec of the package that provides the compiler."""
|
||||
for spec, package in _compiler_to_pkg.items():
|
||||
@@ -87,7 +85,29 @@ def converter(cspec_like, *args, **kwargs):
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
return {"compiler": compiler.to_dict()}
|
||||
d = {}
|
||||
d["spec"] = str(compiler.spec)
|
||||
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
|
||||
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
|
||||
d["flags"].update(
|
||||
dict(
|
||||
(attr, getattr(compiler, attr, None))
|
||||
for attr in _flags_instance_vars
|
||||
if hasattr(compiler, attr)
|
||||
)
|
||||
)
|
||||
d["operating_system"] = str(compiler.operating_system)
|
||||
d["target"] = str(compiler.target)
|
||||
d["modules"] = compiler.modules or []
|
||||
d["environment"] = compiler.environment or {}
|
||||
d["extra_rpaths"] = compiler.extra_rpaths or []
|
||||
if compiler.enable_implicit_rpaths is not None:
|
||||
d["implicit_rpaths"] = compiler.enable_implicit_rpaths
|
||||
|
||||
if compiler.alias:
|
||||
d["alias"] = compiler.alias
|
||||
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(
|
||||
@@ -107,7 +127,7 @@ def get_compiler_config(
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
find_compilers(scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
@@ -116,8 +136,125 @@ def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
packages_yaml = configuration.get("packages", scope=scope)
|
||||
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
packages = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in config.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
packages.extend(_compiler_config_from_package_config(externals_config))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _compiler_config_from_package_config(config):
|
||||
compilers = []
|
||||
for entry in config:
|
||||
compiler = _compiler_config_from_external(entry)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
extra_attributes_key = "extra_attributes"
|
||||
compilers_key = "compilers"
|
||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if extra_attributes_key not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||
return None
|
||||
extra_attributes = config[extra_attributes_key]
|
||||
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||
if compilers_key not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[compilers_key]
|
||||
|
||||
if c_key not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{extra_attributes_key}:{compilers_key}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[c_key]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if cxx_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if fortran_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
paths = {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(cxx_key, None),
|
||||
"fc": attribute_compilers.get(fortran_key, None),
|
||||
"f77": attribute_compilers.get(fortran_key, None),
|
||||
}
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": config.get("modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -141,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -190,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -239,77 +380,79 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
mixed_toolchain: bool = False,
|
||||
max_workers: Optional[int] = None,
|
||||
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
"""Return the list of compilers found in the paths given as arguments.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: configuration scope to modify
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
import spack.detection
|
||||
|
||||
known_compilers = set(all_compilers(init_config=False))
|
||||
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
default_paths = fs.search_paths_for_executables(*path_hints)
|
||||
if sys.platform == "win32":
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
# To detect the version of the compilers, we dispatch a certain number
|
||||
# of function calls to different workers. Here we construct the list
|
||||
# of arguments for each call.
|
||||
arguments = []
|
||||
for o in all_os_classes():
|
||||
search_paths = getattr(o, "compiler_search_paths", default_paths)
|
||||
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
|
||||
|
||||
# Here we map the function arguments to the corresponding calls
|
||||
tp = multiprocessing.pool.ThreadPool()
|
||||
try:
|
||||
detected_versions = tp.map(detect_version, arguments)
|
||||
finally:
|
||||
tp.close()
|
||||
|
||||
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
|
||||
value, error = item
|
||||
if error is None:
|
||||
return True
|
||||
try:
|
||||
# This will fail on Python 2.6 if a non ascii
|
||||
# character is in the error
|
||||
tty.debug(error)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def remove_errors(
|
||||
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
|
||||
) -> DetectVersionArgs:
|
||||
value, _ = item
|
||||
assert value is not None
|
||||
return value
|
||||
|
||||
return make_compiler_list(
|
||||
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
|
||||
mixed_toolchain=mixed_toolchain,
|
||||
)
|
||||
|
||||
valid_compilers = {}
|
||||
for name, detected in detected_packages.items():
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
|
||||
if not compilers:
|
||||
continue
|
||||
valid_compilers[name] = compilers
|
||||
|
||||
def _has_fortran_compilers(x):
|
||||
if "compilers" not in x.extra_attributes:
|
||||
return False
|
||||
def find_new_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
scope: Optional[str] = None,
|
||||
*,
|
||||
mixed_toolchain: bool = False,
|
||||
):
|
||||
"""Same as ``find_compilers`` but return only the compilers that are not
|
||||
already in compilers.yaml.
|
||||
|
||||
return "fortran" in x.extra_attributes["compilers"]
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: scope to look for a compiler. If None consider the merged configuration.
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
"""
|
||||
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
|
||||
|
||||
if mixed_toolchain:
|
||||
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
|
||||
if gccs:
|
||||
best_gcc = sorted(
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
|
||||
)[-1]
|
||||
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
|
||||
for name in ("llvm", "apple-clang"):
|
||||
if name not in valid_compilers:
|
||||
continue
|
||||
candidates = valid_compilers[name]
|
||||
for candidate in candidates:
|
||||
if _has_fortran_compilers(candidate):
|
||||
continue
|
||||
candidate.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
|
||||
new_compilers = []
|
||||
for name, detected in valid_compilers.items():
|
||||
for config in CompilerConfigFactory.from_specs(detected):
|
||||
c = _compiler_from_config_entry(config["compiler"])
|
||||
if c in known_compilers:
|
||||
continue
|
||||
new_compilers.append(c)
|
||||
|
||||
add_compilers_to_config(new_compilers, scope=scope)
|
||||
return new_compilers
|
||||
return select_new_compilers(compilers, scope)
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
@@ -319,9 +462,7 @@ def select_new_compilers(compilers, scope=None):
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = compilers_for_spec(
|
||||
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
|
||||
)
|
||||
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
|
||||
if not same_specs:
|
||||
compilers_not_in_config.append(c)
|
||||
|
||||
@@ -369,9 +510,8 @@ def replace_apple_clang(name):
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
|
||||
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@@ -391,12 +531,7 @@ def find(compiler_spec, scope=None, init_config=True):
|
||||
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [
|
||||
c.spec
|
||||
for c in compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
|
||||
)
|
||||
]
|
||||
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
@@ -418,11 +553,14 @@ def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
|
||||
def compilers_for_spec(
|
||||
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
|
||||
):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
@@ -431,7 +569,7 @@ def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -463,15 +601,13 @@ def compiler_from_dict(items):
|
||||
os = items.get("operating_system", None)
|
||||
target = items.get("target", None)
|
||||
|
||||
if not (
|
||||
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
|
||||
):
|
||||
if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
compiler_paths = []
|
||||
for c in spack.compiler.PATH_INSTANCE_VARS:
|
||||
for c in _path_instance_vars:
|
||||
compiler_path = items["paths"][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
@@ -599,6 +735,24 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
return compilers[0]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
|
||||
if compilers:
|
||||
scope_to_compilers[scope] = compilers
|
||||
|
||||
cfg_file_to_duplicates = {}
|
||||
for scope, compilers in scope_to_compilers.items():
|
||||
config_file = config.get_config_filename(scope, "compilers")
|
||||
cfg_file_to_duplicates[config_file] = compilers
|
||||
|
||||
return cfg_file_to_duplicates
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
@@ -612,7 +766,7 @@ def class_for_compiler_name(compiler_name):
|
||||
submodule_name = compiler_name.replace("-", "_")
|
||||
|
||||
module_name = ".".join(["spack", "compilers", submodule_name])
|
||||
module_obj = importlib.import_module(module_name)
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
@@ -621,10 +775,272 @@ def class_for_compiler_name(compiler_name):
|
||||
return cls
|
||||
|
||||
|
||||
def all_os_classes():
|
||||
"""
|
||||
Return the list of classes for all operating systems available on
|
||||
this platform
|
||||
"""
|
||||
classes = []
|
||||
|
||||
platform = spack.platforms.host()
|
||||
for os_class in platform.operating_sys.values():
|
||||
classes.append(os_class)
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
def all_compiler_types():
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
#: Gathers the attribute values by which a detected compiler is considered
|
||||
#: unique in Spack.
|
||||
#:
|
||||
#: - os: the operating system
|
||||
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
|
||||
#: - version: the version of the compiler
|
||||
#:
|
||||
CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
|
||||
|
||||
#: Variations on a matched compiler name
|
||||
NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
|
||||
|
||||
#: Groups together the arguments needed by `detect_version`. The four entries
|
||||
#: in the tuple are:
|
||||
#:
|
||||
#: - id: An instance of the CompilerID named tuple (version can be set to None
|
||||
#: as it will be detected later)
|
||||
#: - variation: a NameVariation for file being tested
|
||||
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
|
||||
#: - path: full path to the executable being tested
|
||||
#:
|
||||
DetectVersionArgs = collections.namedtuple(
|
||||
"DetectVersionArgs", ["id", "variation", "language", "path"]
|
||||
)
|
||||
|
||||
|
||||
def arguments_to_detect_version_fn(
|
||||
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
|
||||
) -> List[DetectVersionArgs]:
|
||||
"""Returns a list of DetectVersionArgs tuples to be used in a
|
||||
corresponding function to detect compiler versions.
|
||||
|
||||
The ``operating_system`` instance can customize the behavior of this
|
||||
function by providing a method called with the same name.
|
||||
|
||||
Args:
|
||||
operating_system: the operating system on which we are looking for compilers
|
||||
paths: paths to search for compilers
|
||||
|
||||
Returns:
|
||||
List of DetectVersionArgs tuples. Each item in the list will be later
|
||||
mapped to the corresponding function call to detect the version of the
|
||||
compilers in this OS.
|
||||
"""
|
||||
|
||||
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
|
||||
command_arguments: List[DetectVersionArgs] = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
# Select only the files matching a regexp
|
||||
for (file, full_path), regexp in itertools.product(
|
||||
files_to_be_tested, compiler_cls.search_regexps(language)
|
||||
):
|
||||
match = regexp.match(file)
|
||||
if match:
|
||||
compiler_id = CompilerID(operating_system, compiler_name, None)
|
||||
detect_version_args = DetectVersionArgs(
|
||||
id=compiler_id,
|
||||
variation=NameVariation(*match.groups()),
|
||||
language=language,
|
||||
path=full_path,
|
||||
)
|
||||
command_arguments.append(detect_version_args)
|
||||
|
||||
return command_arguments
|
||||
|
||||
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
|
||||
return fn(paths)
|
||||
|
||||
|
||||
def detect_version(
|
||||
detect_version_args: DetectVersionArgs,
|
||||
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
|
||||
"""Computes the version of a compiler and adds it to the information
|
||||
passed as input.
|
||||
|
||||
As this function is meant to be executed by worker processes it won't
|
||||
raise any exception but instead will return a (value, error) tuple that
|
||||
needs to be checked by the code dispatching the calls.
|
||||
|
||||
Args:
|
||||
detect_version_args: information on the compiler for which we should detect the version.
|
||||
|
||||
Returns:
|
||||
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
|
||||
version of the compiler was computed correctly and the first argument
|
||||
of the tuple will contain it. Otherwise ``error`` is a string
|
||||
containing an explanation on why the version couldn't be computed.
|
||||
"""
|
||||
|
||||
def _default(fn_args):
|
||||
compiler_id = fn_args.id
|
||||
language = fn_args.language
|
||||
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
|
||||
path = fn_args.path
|
||||
|
||||
# Get compiler names and the callback to detect their versions
|
||||
callback = getattr(compiler_cls, f"{language}_version")
|
||||
|
||||
try:
|
||||
version = callback(path)
|
||||
if version and str(version).strip() and version != "unknown":
|
||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
error = f"Couldn't get version for compiler {path}".format(path)
|
||||
except spack.util.executable.ProcessError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except spack.util.executable.ProcessTimeoutError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
|
||||
path, e.__class__.__name__, str(e)
|
||||
)
|
||||
return None, error
|
||||
|
||||
operating_system = detect_version_args.id.os
|
||||
fn = getattr(operating_system, "detect_version", _default)
|
||||
return fn(detect_version_args)
|
||||
|
||||
|
||||
def make_compiler_list(
|
||||
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Process a list of detected versions and turn them into a list of
|
||||
compiler specs.
|
||||
|
||||
Args:
|
||||
detected_versions: list of DetectVersionArgs containing a valid version
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
|
||||
|
||||
Returns:
|
||||
list: list of Compiler objects
|
||||
"""
|
||||
group_fn = lambda x: (x.id, x.variation, x.language)
|
||||
sorted_compilers = sorted(detected_versions, key=group_fn)
|
||||
|
||||
# Gather items in a dictionary by the id, name variation and language
|
||||
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
|
||||
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
|
||||
compiler_id, name_variation, language = sort_key
|
||||
by_compiler_id = compilers_d.setdefault(compiler_id, {})
|
||||
by_name_variation = by_compiler_id.setdefault(name_variation, {})
|
||||
by_name_variation[language] = next(x.path for x in group)
|
||||
|
||||
def _default_make_compilers(cmp_id, paths):
|
||||
operating_system, compiler_name, version = cmp_id
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
# TODO: johnwparent - revist the following line as per discussion at:
|
||||
# https://github.com/spack/spack/pull/33385/files#r1040036318
|
||||
target = archspec.cpu.host()
|
||||
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
|
||||
return [compiler]
|
||||
|
||||
# For compilers with the same compiler id:
|
||||
#
|
||||
# - Prefer with C compiler to without
|
||||
# - Prefer with C++ compiler to without
|
||||
# - Prefer no variations to variations (e.g., clang to clang-gpu)
|
||||
#
|
||||
sort_fn = lambda variation: (
|
||||
"cc" not in by_compiler_id[variation], # None last
|
||||
"cxx" not in by_compiler_id[variation], # None last
|
||||
getattr(variation, "prefix", None),
|
||||
getattr(variation, "suffix", None),
|
||||
)
|
||||
|
||||
# Flatten to a list of compiler id, primary variation and compiler dictionary
|
||||
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
ordered = sorted(by_compiler_id, key=sort_fn)
|
||||
selected_variation = ordered[0]
|
||||
selected = by_compiler_id[selected_variation]
|
||||
|
||||
# Fill any missing parts from subsequent entries (without mixing toolchains)
|
||||
for lang in ["cxx", "f77", "fc"]:
|
||||
if lang not in selected:
|
||||
next_lang = next(
|
||||
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
|
||||
)
|
||||
if next_lang:
|
||||
selected[lang] = next_lang
|
||||
|
||||
flat_compilers.append((compiler_id, selected_variation, selected))
|
||||
|
||||
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
|
||||
if mixed_toolchain:
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers: List["spack.compiler.Compiler"] = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
candidates = make_compilers(compiler_id, compiler)
|
||||
compilers.extend(x for x in candidates if x.cc is not None)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
|
||||
"""Add missing compilers across toolchains when they are missing for a particular language.
|
||||
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
|
||||
fortran compiler (no flang)."""
|
||||
|
||||
# First collect the clangs that are missing a fortran compiler
|
||||
clangs_without_flang = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name in ("clang", "apple-clang")
|
||||
and "f77" not in compiler
|
||||
and "fc" not in compiler
|
||||
]
|
||||
if not clangs_without_flang:
|
||||
return
|
||||
|
||||
# Filter on GCCs with fortran compiler
|
||||
gccs_with_fortran = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
|
||||
]
|
||||
|
||||
# Sort these GCCs by "best variation" (no prefix / suffix first)
|
||||
gccs_with_fortran.sort(
|
||||
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
|
||||
)
|
||||
|
||||
# Attach the optimal GCC fortran compiler to the clangs that don't have one
|
||||
for clang_id, _, clang_compiler in clangs_without_flang:
|
||||
gcc_compiler = next(
|
||||
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
|
||||
)
|
||||
|
||||
if not gcc_compiler:
|
||||
continue
|
||||
|
||||
# Update the fc / f77 entries
|
||||
clang_compiler["f77"] = gcc_compiler["f77"]
|
||||
clang_compiler["fc"] = gcc_compiler["fc"]
|
||||
|
||||
|
||||
def is_mixed_toolchain(compiler):
|
||||
"""Returns True if the current compiler is a mixed toolchain,
|
||||
False otherwise.
|
||||
@@ -671,164 +1087,20 @@ def name_matches(name, name_list):
|
||||
return False
|
||||
|
||||
|
||||
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
|
||||
_COMPILERS_KEY = "compilers"
|
||||
_C_KEY = "c"
|
||||
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
|
||||
|
||||
|
||||
class CompilerConfigFactory:
|
||||
"""Class aggregating all ways of constructing a list of compiler config entries."""
|
||||
|
||||
@staticmethod
|
||||
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
|
||||
result = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for s in specs:
|
||||
if s.name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
candidate = CompilerConfigFactory.from_external_spec(s)
|
||||
if candidate is None:
|
||||
continue
|
||||
|
||||
result.append(candidate)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_packages_yaml(packages_yaml) -> List[dict]:
|
||||
compiler_specs = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
current_specs = []
|
||||
for current_external in externals_config:
|
||||
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
|
||||
if compiler:
|
||||
current_specs.append(compiler)
|
||||
compiler_specs.extend(current_specs)
|
||||
|
||||
return CompilerConfigFactory.from_specs(compiler_specs)
|
||||
|
||||
@staticmethod
|
||||
def _spec_from_external_config(config):
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if _EXTRA_ATTRIBUTES_KEY not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
|
||||
return None
|
||||
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
|
||||
result = spack.spec.Spec(
|
||||
str(spack.spec.parse_with_version_concrete(config["spec"])),
|
||||
external_modules=config.get("modules"),
|
||||
)
|
||||
result.extra_attributes = extra_attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
|
||||
spec = spack.spec.parse_with_version_concrete(spec)
|
||||
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
|
||||
if extra_attributes is None:
|
||||
return None
|
||||
|
||||
paths = CompilerConfigFactory._extract_compiler_paths(spec)
|
||||
if paths is None:
|
||||
return None
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": getattr(spec, "external_modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
@staticmethod
|
||||
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
extra_attributes = spec.extra_attributes
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing,
|
||||
# or we don't have a C compiler
|
||||
if _COMPILERS_KEY not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[_COMPILERS_KEY]
|
||||
|
||||
if _C_KEY not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[_C_KEY]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if _CXX_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if _FORTRAN_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
return {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(_CXX_KEY, None),
|
||||
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
return operating_system, target
|
||||
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__(
|
||||
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
|
||||
f"Compiler configuration must contain entries for "
|
||||
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
|
||||
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
|
||||
"Compiler configuration must contain entries for all compilers: %s"
|
||||
% _path_instance_vars,
|
||||
)
|
||||
|
||||
|
||||
class NoCompilersError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super().__init__("Spack could not find any compilers!")
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||
@@ -839,3 +1111,25 @@ def __init__(self, compiler_spec, target):
|
||||
super().__init__(
|
||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||
)
|
||||
|
||||
|
||||
class CompilerDuplicateError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, arch_spec):
|
||||
config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
|
||||
duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
|
||||
descriptor = lambda num: "time" if num == 1 else "times"
|
||||
duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
|
||||
cfgfile, str(count), descriptor(count)
|
||||
)
|
||||
msg = (
|
||||
"Compiler configuration contains entries with duplicate"
|
||||
+ " specification ({0}, {1})".format(compiler_spec, arch_spec)
|
||||
+ " in the following files:\n\t"
|
||||
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
|
||||
)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)
|
||||
|
@@ -223,30 +223,6 @@ def get_oneapi_root(pth: str):
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "/std:c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "/std:c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "/std:c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
return "/std:c++20"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "/std:c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
return "/std:c17"
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
"""This is the VCToolset version *NOT* the actual version of the cl compiler
|
||||
|
@@ -2,12 +2,31 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
Functions here are used to take abstract specs and make them concrete.
|
||||
For example, if a spec asks for a version between 1.8 and 1.9, these
|
||||
functions might take will take the most recent 1.9 version of the
|
||||
package available. Or, if the user didn't specify a compiler for a
|
||||
spec, then this will assign a compiler to the spec based on defaults
|
||||
or user preferences.
|
||||
|
||||
TODO: make this customizable and allow users to configure
|
||||
concretization policies.
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
from typing import Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
@@ -18,24 +37,655 @@
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
#: impements rudimentary logic for ABI compatibility
|
||||
_abi: Union[spack.abi.ABI, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
lambda: spack.abi.ABI()
|
||||
)
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class reverse_order:
|
||||
"""Helper for creating key functions.
|
||||
|
||||
This is a wrapper that inverts the sense of the natural
|
||||
comparisons on the object.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return other.value == self.value
|
||||
|
||||
def __lt__(self, other):
|
||||
return other.value < self.value
|
||||
|
||||
|
||||
class Concretizer:
|
||||
"""You can subclass this class to override some of the default
|
||||
concretization strategies, or you can override all of them.
|
||||
"""
|
||||
|
||||
#: Controls whether we check that compiler versions actually exist
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||
#: Those will not be considered as providers for virtuals.
|
||||
non_buildable_packages = {"glibc", "musl"}
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
self._adjust_target_answer_generator = None
|
||||
|
||||
def concretize_develop(self, spec):
|
||||
"""
|
||||
Add ``dev_path=*`` variant to packages built from local source.
|
||||
"""
|
||||
env = spack.environment.active_environment()
|
||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
||||
|
||||
if "dev_path" in spec.variants:
|
||||
assert spec.variants["dev_path"].value == path
|
||||
changed = False
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", vt.SingleValuedVariant("dev_path", path))
|
||||
changed = True
|
||||
changed |= spec.constrain(dev_info["spec"])
|
||||
return changed
|
||||
|
||||
def _valid_virtuals_and_externals(self, spec):
|
||||
"""Returns a list of candidate virtual dep providers and external
|
||||
packages that coiuld be used to concretize a spec.
|
||||
|
||||
Preferred specs come first in the list.
|
||||
"""
|
||||
# First construct a list of concrete candidates to replace spec with.
|
||||
candidates = [spec]
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = [
|
||||
s
|
||||
for s in spack.repo.PATH.providers_for(spec)
|
||||
if s.name not in self.non_buildable_packages
|
||||
]
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
# Find nearest spec in the DAG (up then down) that has prefs.
|
||||
spec_w_prefs = find_spec(
|
||||
spec, lambda p: PackagePrefs.has_preferred_providers(p.name, spec.name), spec
|
||||
) # default to spec itself.
|
||||
|
||||
# Create a key to sort candidates by the prefs we found
|
||||
pref_key = PackagePrefs(spec_w_prefs.name, "providers", spec.name)
|
||||
|
||||
# For each candidate package, if it has externals, add those
|
||||
# to the usable list. if it's not buildable, then *only* add
|
||||
# the externals.
|
||||
usable = []
|
||||
for cspec in candidates:
|
||||
if is_spec_buildable(cspec):
|
||||
usable.append(cspec)
|
||||
|
||||
externals = spec_externals(cspec)
|
||||
for ext in externals:
|
||||
if ext.intersects(spec):
|
||||
usable.append(ext)
|
||||
|
||||
# If nothing is in the usable list now, it's because we aren't
|
||||
# allowed to build anything.
|
||||
if not usable:
|
||||
raise NoBuildError(spec)
|
||||
|
||||
# Use a sort key to order the results
|
||||
return sorted(
|
||||
usable,
|
||||
key=lambda spec: (
|
||||
not spec.external, # prefer externals
|
||||
pref_key(spec), # respect prefs
|
||||
spec.name, # group by name
|
||||
reverse_order(spec.versions), # latest version
|
||||
spec, # natural order
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
candidates = self._valid_virtuals_and_externals(spec)
|
||||
if not candidates:
|
||||
return candidates
|
||||
|
||||
# Find the nearest spec in the dag that has a compiler. We'll
|
||||
# use that spec to calibrate compiler compatibility.
|
||||
abi_exemplar = find_spec(spec, lambda x: x.compiler)
|
||||
if abi_exemplar is None:
|
||||
abi_exemplar = spec.root
|
||||
|
||||
# Sort candidates from most to least compatibility.
|
||||
# We reverse because True > False.
|
||||
# Sort is stable, so candidates keep their order.
|
||||
return sorted(
|
||||
candidates,
|
||||
reverse=True,
|
||||
key=lambda spec: (
|
||||
_abi.compatible(spec, abi_exemplar, loose=True),
|
||||
_abi.compatible(spec, abi_exemplar),
|
||||
),
|
||||
)
|
||||
|
||||
def concretize_version(self, spec):
|
||||
"""If the spec is already concrete, return. Otherwise take
|
||||
the preferred version from spackconfig, and default to the package's
|
||||
version if there are no available versions.
|
||||
|
||||
TODO: In many cases we probably want to look for installed
|
||||
versions of each package and use an installed version
|
||||
if we can link to it. The policy implemented here will
|
||||
tend to rebuild a lot of stuff becasue it will prefer
|
||||
a compiler in the spec to any compiler already-
|
||||
installed things were built with. There is likely
|
||||
some better policy that finds some middle ground
|
||||
between these two extremes.
|
||||
"""
|
||||
# return if already concrete.
|
||||
if spec.versions.concrete:
|
||||
return False
|
||||
|
||||
# List of versions we could consider, in sorted order
|
||||
pkg_versions = spec.package_class.versions
|
||||
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
|
||||
|
||||
yaml_prefs = PackagePrefs(spec.name, "version")
|
||||
|
||||
# The keys below show the order of precedence of factors used
|
||||
# to select a version when concretizing. The item with
|
||||
# the "largest" key will be selected.
|
||||
#
|
||||
# NOTE: When COMPARING VERSIONS, the '@develop' version is always
|
||||
# larger than other versions. BUT when CONCRETIZING,
|
||||
# the largest NON-develop version is selected by default.
|
||||
keyfn = lambda v: (
|
||||
# ------- Special direction from the user
|
||||
# Respect order listed in packages.yaml
|
||||
-yaml_prefs(v),
|
||||
# The preferred=True flag (packages or packages.yaml or both?)
|
||||
pkg_versions.get(v).get("preferred", False),
|
||||
# ------- Regular case: use latest non-develop version by default.
|
||||
# Avoid @develop version, which would otherwise be the "largest"
|
||||
# in straight version comparisons
|
||||
not v.isdevelop(),
|
||||
# Compare the version itself
|
||||
# This includes the logic:
|
||||
# a) develop > everything (disabled by "not v.isdevelop() above)
|
||||
# b) numeric > non-numeric
|
||||
# c) Numeric or string comparison
|
||||
v,
|
||||
)
|
||||
usable.sort(key=keyfn, reverse=True)
|
||||
|
||||
if usable:
|
||||
spec.versions = ver([usable[0]])
|
||||
else:
|
||||
# We don't know of any SAFE versions that match the given
|
||||
# spec. Grab the spec's versions and grab the highest
|
||||
# *non-open* part of the range of versions it specifies.
|
||||
# Someone else can raise an error if this happens,
|
||||
# e.g. when we go to fetch it and don't know how. But it
|
||||
# *might* work.
|
||||
if not spec.versions or spec.versions == VersionList([":"]):
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
last = spec.versions[-1]
|
||||
if isinstance(last, ClosedOpenRange):
|
||||
range_as_version = VersionList([last]).concrete_range_as_version
|
||||
if range_as_version:
|
||||
spec.versions = ver([range_as_version])
|
||||
else:
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
spec.versions = ver([last])
|
||||
|
||||
return True # Things changed
|
||||
|
||||
def concretize_architecture(self, spec):
|
||||
"""If the spec is empty provide the defaults of the platform. If the
|
||||
architecture is not a string type, then check if either the platform,
|
||||
target or operating system are concretized. If any of the fields are
|
||||
changed then return True. If everything is concretized (i.e the
|
||||
architecture attribute is a namedtuple of classes) then return False.
|
||||
If the target is a string type, then convert the string into a
|
||||
concretized architecture. If it has no architecture and the root of the
|
||||
DAG has an architecture, then use the root otherwise use the defaults
|
||||
on the platform.
|
||||
"""
|
||||
# ensure type safety for the architecture
|
||||
if spec.architecture is None:
|
||||
spec.architecture = spack.spec.ArchSpec()
|
||||
|
||||
if spec.architecture.concrete:
|
||||
return False
|
||||
|
||||
# Get platform of nearest spec with a platform, including spec
|
||||
# If spec has a platform, easy
|
||||
if spec.architecture.platform:
|
||||
new_plat = spack.platforms.by_name(spec.architecture.platform)
|
||||
else:
|
||||
# Else if anyone else has a platform, take the closest one
|
||||
# Search up, then down, along build/link deps first
|
||||
# Then any nearest. Algorithm from compilerspec search
|
||||
platform_spec = find_spec(spec, lambda x: x.architecture and x.architecture.platform)
|
||||
if platform_spec:
|
||||
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
|
||||
else:
|
||||
# If no platform anywhere in this spec, grab the default
|
||||
new_plat = spack.platforms.host()
|
||||
|
||||
# Get nearest spec with relevant platform and an os
|
||||
# Generally, same algorithm as finding platform, except we only
|
||||
# consider specs that have a platform
|
||||
if spec.architecture.os:
|
||||
new_os = spec.architecture.os
|
||||
else:
|
||||
new_os_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.os
|
||||
),
|
||||
)
|
||||
if new_os_spec:
|
||||
new_os = new_os_spec.architecture.os
|
||||
else:
|
||||
new_os = new_plat.operating_system("default_os")
|
||||
|
||||
# Get the nearest spec with relevant platform and a target
|
||||
# Generally, same algorithm as finding os
|
||||
curr_target = None
|
||||
if spec.architecture.target:
|
||||
curr_target = spec.architecture.target
|
||||
if spec.architecture.target and spec.architecture.target_concrete:
|
||||
new_target = spec.architecture.target
|
||||
else:
|
||||
new_target_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.target
|
||||
and x.architecture.target != curr_target
|
||||
),
|
||||
)
|
||||
if new_target_spec:
|
||||
if curr_target:
|
||||
# constrain one target by the other
|
||||
new_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, new_target_spec.architecture.target)
|
||||
)
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, curr_target))
|
||||
curr_target_arch.constrain(new_target_arch)
|
||||
new_target = curr_target_arch.target
|
||||
else:
|
||||
new_target = new_target_spec.architecture.target
|
||||
else:
|
||||
# To get default platform, consider package prefs
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
new_target = self.target_from_package_preferences(spec)
|
||||
else:
|
||||
new_target = new_plat.target("default_target")
|
||||
if curr_target:
|
||||
# convert to ArchSpec to compare satisfaction
|
||||
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
||||
|
||||
if not new_target_arch.intersects(curr_target_arch):
|
||||
# new_target is an incorrect guess based on preferences
|
||||
# and/or default
|
||||
valid_target_ranges = str(curr_target).split(",")
|
||||
for target_range in valid_target_ranges:
|
||||
t_min, t_sep, t_max = target_range.partition(":")
|
||||
if not t_sep:
|
||||
new_target = t_min
|
||||
break
|
||||
elif t_max:
|
||||
new_target = t_max
|
||||
break
|
||||
elif t_min:
|
||||
# TODO: something better than picking first
|
||||
new_target = t_min
|
||||
break
|
||||
|
||||
# Construct new architecture, compute whether spec changed
|
||||
arch_spec = (str(new_plat), str(new_os), str(new_target))
|
||||
new_arch = spack.spec.ArchSpec(arch_spec)
|
||||
spec_changed = new_arch != spec.architecture
|
||||
spec.architecture = new_arch
|
||||
return spec_changed
|
||||
|
||||
def target_from_package_preferences(self, spec):
|
||||
"""Returns the preferred target from the package preferences if
|
||||
there's any.
|
||||
|
||||
Args:
|
||||
spec: abstract spec to be concretized
|
||||
"""
|
||||
target_prefs = PackagePrefs(spec.name, "target")
|
||||
target_specs = [spack.spec.Spec("target=%s" % tname) for tname in archspec.cpu.TARGETS]
|
||||
|
||||
def tspec_filter(s):
|
||||
# Filter target specs by whether the architecture
|
||||
# family is the current machine type. This ensures
|
||||
# we only consider x86_64 targets when on an
|
||||
# x86_64 machine, etc. This may need to change to
|
||||
# enable setting cross compiling as a default
|
||||
target = archspec.cpu.TARGETS[str(s.architecture.target)]
|
||||
arch_family_name = target.family.name
|
||||
return arch_family_name == platform.machine()
|
||||
|
||||
# Sort filtered targets by package prefs
|
||||
target_specs = list(filter(tspec_filter, target_specs))
|
||||
target_specs.sort(key=target_prefs)
|
||||
new_target = target_specs[0].architecture.target
|
||||
return new_target
|
||||
|
||||
def concretize_variants(self, spec):
|
||||
"""If the spec already has variants filled in, return. Otherwise, add
|
||||
the user preferences from packages.yaml or the default variants from
|
||||
the package specification.
|
||||
"""
|
||||
changed = False
|
||||
preferred_variants = PackagePrefs.preferred_variants(spec.name)
|
||||
pkg_cls = spec.package_class
|
||||
for name, entry in pkg_cls.variants.items():
|
||||
variant, when = entry
|
||||
var = spec.variants.get(name, None)
|
||||
if var and "*" in var:
|
||||
# remove variant wildcard before concretizing
|
||||
# wildcard cannot be combined with other variables in a
|
||||
# multivalue variant, a concrete variant cannot have the value
|
||||
# wildcard, and a wildcard does not constrain a variant
|
||||
spec.variants.pop(name)
|
||||
if name not in spec.variants and any(spec.satisfies(w) for w in when):
|
||||
changed = True
|
||||
if name in preferred_variants:
|
||||
spec.variants[name] = preferred_variants.get(name)
|
||||
else:
|
||||
spec.variants[name] = variant.make_default()
|
||||
if name in spec.variants and not any(spec.satisfies(w) for w in when):
|
||||
raise vt.InvalidVariantForSpecError(name, when, spec)
|
||||
|
||||
return changed
|
||||
|
||||
def concretize_compiler(self, spec):
|
||||
"""If the spec already has a compiler, we're done. If not, then take
|
||||
the compiler used for the nearest ancestor with a compiler
|
||||
spec and use that. If the ancestor's compiler is not
|
||||
concrete, then used the preferred compiler as specified in
|
||||
spackconfig.
|
||||
|
||||
Intuition: Use the spackconfig default if no package that depends on
|
||||
this one has a strict compiler requirement. Otherwise, try to
|
||||
build with the compiler that will be used by libraries that
|
||||
link to this one, to maximize compatibility.
|
||||
"""
|
||||
# Pass on concretizing the compiler if the target or operating system
|
||||
# is not yet determined
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
# Only use a matching compiler if it is of the proper style
|
||||
# Takes advantage of the proper logic already existing in
|
||||
# compiler_for_spec Should think whether this can be more
|
||||
# efficient
|
||||
def _proper_compiler_style(cspec, aspec):
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
|
||||
# If the spec passed as argument is concrete we want to check
|
||||
# the versions match exactly
|
||||
if (
|
||||
cspec.concrete
|
||||
and compilers
|
||||
and cspec.version not in [c.version for c in compilers]
|
||||
):
|
||||
return []
|
||||
|
||||
return compilers
|
||||
|
||||
if spec.compiler and spec.compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
spec.compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(spec.compiler, spec.architecture)
|
||||
return False
|
||||
|
||||
# Find another spec that has a compiler, or the root if none do
|
||||
other_spec = spec if spec.compiler else find_spec(spec, lambda x: x.compiler, spec.root)
|
||||
other_compiler = other_spec.compiler
|
||||
assert other_spec
|
||||
|
||||
# Check if the compiler is already fully specified
|
||||
if other_compiler and other_compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
other_compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
spec.compiler = other_compiler
|
||||
return True
|
||||
|
||||
if other_compiler: # Another node has abstract compiler information
|
||||
compiler_list = spack.compilers.find_specs_by_arch(other_compiler, spec.architecture)
|
||||
if not compiler_list:
|
||||
# We don't have a matching compiler installed
|
||||
if not self.check_for_compiler_existence:
|
||||
# Concretize compiler spec versions as a package to build
|
||||
cpkg_spec = spack.compilers.pkg_spec_for_compiler(other_compiler)
|
||||
self.concretize_version(cpkg_spec)
|
||||
spec.compiler = spack.spec.CompilerSpec(
|
||||
other_compiler.name, cpkg_spec.versions
|
||||
)
|
||||
return True
|
||||
else:
|
||||
# No compiler with a satisfactory spec was found
|
||||
raise UnavailableCompilerVersionError(other_compiler, spec.architecture)
|
||||
else:
|
||||
# We have no hints to go by, grab any compiler
|
||||
compiler_list = spack.compilers.all_compiler_specs()
|
||||
if not compiler_list:
|
||||
# Spack has no compilers.
|
||||
raise spack.compilers.NoCompilersError()
|
||||
|
||||
# By default, prefer later versions of compilers
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = PackagePrefs(other_spec.name, "compiler")
|
||||
matches = sorted(compiler_list, key=ppk)
|
||||
|
||||
# copy concrete version into other_compiler
|
||||
try:
|
||||
spec.compiler = next(
|
||||
c for c in matches if _proper_compiler_style(c, spec.architecture)
|
||||
).copy()
|
||||
except StopIteration:
|
||||
# No compiler with a satisfactory spec has a suitable arch
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
|
||||
assert spec.compiler.concrete
|
||||
return True # things changed.
|
||||
|
||||
def concretize_compiler_flags(self, spec):
|
||||
"""
|
||||
The compiler flags are updated to match those of the spec whose
|
||||
compiler is used, defaulting to no compiler flags in the spec.
|
||||
Default specs set at the compiler level will still be added later.
|
||||
"""
|
||||
# Pass on concretizing the compiler flags if the target or operating
|
||||
# system is not set.
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
compiler_match = lambda other: (
|
||||
spec.compiler == other.compiler and spec.architecture == other.architecture
|
||||
)
|
||||
|
||||
ret = False
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
if flag not in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = list()
|
||||
try:
|
||||
nearest = next(
|
||||
p
|
||||
for p in spec.traverse(direction="parents")
|
||||
if (compiler_match(p) and (p is not spec) and flag in p.compiler_flags)
|
||||
)
|
||||
nearest_flags = nearest.compiler_flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
if set(nearest_flags) - set(flags):
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(nearest_flags + flags))
|
||||
ret = True
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
# Include the compiler flag defaults from the config files
|
||||
# This ensures that spack will detect conflicts that stem from a change
|
||||
# in default compiler flags.
|
||||
try:
|
||||
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
|
||||
except spack.compilers.NoCompilerForSpecError:
|
||||
if self.check_for_compiler_existence:
|
||||
raise
|
||||
return ret
|
||||
for flag in compiler.flags:
|
||||
config_flags = compiler.flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(config_flags + flags))
|
||||
if set(config_flags) - set(flags):
|
||||
ret = True
|
||||
|
||||
return ret
|
||||
|
||||
def adjust_target(self, spec):
|
||||
"""Adjusts the target microarchitecture if the compiler is too old
|
||||
to support the default one.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if spec was modified, False otherwise
|
||||
"""
|
||||
# To minimize the impact on performance this function will attempt
|
||||
# to adjust the target only at the very first call once necessary
|
||||
# information is set. It will just return False on subsequent calls.
|
||||
# The way this is achieved is by initializing a generator and making
|
||||
# this function return the next answer.
|
||||
if not (spec.architecture and spec.architecture.concrete):
|
||||
# Not ready, but keep going because we have work to do later
|
||||
return True
|
||||
|
||||
def _make_only_one_call(spec):
|
||||
yield self._adjust_target(spec)
|
||||
while True:
|
||||
yield False
|
||||
|
||||
if self._adjust_target_answer_generator is None:
|
||||
self._adjust_target_answer_generator = _make_only_one_call(spec)
|
||||
|
||||
return next(self._adjust_target_answer_generator)
|
||||
|
||||
def _adjust_target(self, spec):
|
||||
"""Assumes that the architecture and the compiler have been
|
||||
set already and checks if the current target microarchitecture
|
||||
is the default and can be optimized by the compiler.
|
||||
|
||||
If not, downgrades the microarchitecture until a suitable one
|
||||
is found. If none can be found raise an error.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if any modification happened, False otherwise
|
||||
"""
|
||||
import archspec.cpu
|
||||
|
||||
# Try to adjust the target only if it is the default
|
||||
# target for this platform
|
||||
current_target = spec.architecture.target
|
||||
current_platform = spack.platforms.by_name(spec.architecture.platform)
|
||||
|
||||
default_target = current_platform.target("default_target")
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
default_target = self.target_from_package_preferences(spec)
|
||||
|
||||
if current_target != default_target or (
|
||||
self.abstract_spec
|
||||
and self.abstract_spec.architecture
|
||||
and self.abstract_spec.architecture.concrete
|
||||
):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_target.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
microarchitecture = current_target.microarchitecture
|
||||
for ancestor in microarchitecture.ancestors:
|
||||
candidate = None
|
||||
try:
|
||||
candidate = spack.target.Target(ancestor)
|
||||
candidate.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
|
||||
if candidate is not None:
|
||||
msg = (
|
||||
"{0.name}@{0.version} cannot build optimized "
|
||||
'binaries for "{1}". Using best target possible: '
|
||||
'"{2}"'
|
||||
)
|
||||
msg = msg.format(spec.compiler, current_target, candidate)
|
||||
tty.warn(msg)
|
||||
spec.architecture.target = candidate
|
||||
return True
|
||||
else:
|
||||
raise
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disable_compiler_existence_check():
|
||||
global CHECK_COMPILER_EXISTENCE
|
||||
CHECK_COMPILER_EXISTENCE, saved = False, CHECK_COMPILER_EXISTENCE
|
||||
saved = Concretizer.check_for_compiler_existence
|
||||
Concretizer.check_for_compiler_existence = False
|
||||
yield
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
Concretizer.check_for_compiler_existence = saved
|
||||
|
||||
|
||||
@contextmanager
|
||||
def enable_compiler_existence_check():
|
||||
global CHECK_COMPILER_EXISTENCE
|
||||
CHECK_COMPILER_EXISTENCE, saved = True, CHECK_COMPILER_EXISTENCE
|
||||
saved = Concretizer.check_for_compiler_existence
|
||||
Concretizer.check_for_compiler_existence = True
|
||||
yield
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
Concretizer.check_for_compiler_existence = saved
|
||||
|
||||
|
||||
def find_spec(spec, condition, default=None):
|
||||
@@ -69,6 +719,19 @@ def find_spec(spec, condition, default=None):
|
||||
return default # Nothing matched the condition; return default.
|
||||
|
||||
|
||||
def _compiler_concretization_failure(compiler_spec, arch):
|
||||
# Distinguish between the case that there are compilers for
|
||||
# the arch but not with the given compiler spec and the case that
|
||||
# there are no compilers for the arch at all
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
available_os_targets = set(
|
||||
(c.operating_system, c.target) for c in spack.compilers.all_compilers()
|
||||
)
|
||||
raise NoCompilersForArchError(arch, available_os_targets)
|
||||
else:
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
@@ -81,6 +744,12 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
if spack.config.get("config:concretizer", "clingo") == "original":
|
||||
return _concretize_specs_together_original(*abstract_specs, **kwargs)
|
||||
return _concretize_specs_together_new(*abstract_specs, **kwargs)
|
||||
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
@@ -91,6 +760,51 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
|
||||
]
|
||||
builder.add_package(
|
||||
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
|
||||
)
|
||||
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec("concretizationroot")
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [concretization_root[spec.name].copy() for spec in abstract_specs]
|
||||
|
||||
return concrete_specs
|
||||
|
||||
|
||||
class NoCompilersForArchError(spack.error.SpackError):
|
||||
def __init__(self, arch, available_os_targets):
|
||||
err_msg = (
|
||||
"No compilers found"
|
||||
" for operating system %s and target %s."
|
||||
"\nIf previous installations have succeeded, the"
|
||||
" operating system may have been updated." % (arch.os, arch.target)
|
||||
)
|
||||
|
||||
available_os_target_strs = list()
|
||||
for operating_system, t in available_os_targets:
|
||||
os_target_str = "%s-%s" % (operating_system, t) if t else operating_system
|
||||
available_os_target_strs.append(os_target_str)
|
||||
err_msg += (
|
||||
"\nCompilers are defined for the following"
|
||||
" operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
|
||||
)
|
||||
|
||||
super().__init__(err_msg, "Run 'spack compiler find' to add compilers.")
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
@@ -106,3 +820,37 @@ def __init__(self, compiler_spec, arch=None):
|
||||
"'spack compilers' to see which compilers are already recognized"
|
||||
" by spack.",
|
||||
)
|
||||
|
||||
|
||||
class NoValidVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no way to have a concrete version for a
|
||||
particular spec."""
|
||||
|
||||
def __init__(self, spec):
|
||||
super().__init__(
|
||||
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
|
||||
)
|
||||
|
||||
|
||||
class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
"""Raised when details on architecture cannot be collected from the
|
||||
system"""
|
||||
|
||||
def __init__(self, spec, archs):
|
||||
super().__init__(
|
||||
"Cannot determine necessary architecture information for '%s': %s"
|
||||
% (spec.name, str(archs))
|
||||
)
|
||||
|
||||
|
||||
class NoBuildError(spack.error.SpecError):
|
||||
"""Raised when a package is configured with the buildable option False, but
|
||||
no satisfactory external versions can be found
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
msg = (
|
||||
"The spec\n '%s'\n is configured as not buildable, "
|
||||
"and no matching external installs were found"
|
||||
)
|
||||
super().__init__(msg % spec)
|
||||
|
@@ -99,6 +99,7 @@
|
||||
"dirty": False,
|
||||
"build_jobs": min(16, cpus_available()),
|
||||
"build_stage": "$tempdir/spack-stage",
|
||||
"concretizer": "clingo",
|
||||
"license_dir": spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
@@ -1090,7 +1091,7 @@ def validate(
|
||||
|
||||
|
||||
def read_config_file(
|
||||
path: str, schema: Optional[YamlConfigDict] = None
|
||||
filename: str, schema: Optional[YamlConfigDict] = None
|
||||
) -> Optional[YamlConfigDict]:
|
||||
"""Read a YAML configuration file.
|
||||
|
||||
@@ -1100,9 +1101,21 @@ def read_config_file(
|
||||
# to preserve flexibility in calling convention (don't need to provide
|
||||
# schema when it's not necessary) while allowing us to validate against a
|
||||
# known schema when the top-level key could be incorrect.
|
||||
|
||||
if not os.path.exists(filename):
|
||||
# Ignore nonexistent files.
|
||||
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
|
||||
return None
|
||||
|
||||
elif not os.path.isfile(filename):
|
||||
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
|
||||
|
||||
elif not os.access(filename, os.R_OK):
|
||||
raise ConfigFileError(f"Config file is not readable: {filename}")
|
||||
|
||||
try:
|
||||
with open(path) as f:
|
||||
tty.debug(f"Reading config from file {path}")
|
||||
tty.debug(f"Reading config from file {filename}")
|
||||
with open(filename) as f:
|
||||
data = syaml.load_config(f)
|
||||
|
||||
if data:
|
||||
@@ -1113,20 +1126,15 @@ def read_config_file(
|
||||
|
||||
return data
|
||||
|
||||
except FileNotFoundError:
|
||||
# Ignore nonexistent files.
|
||||
tty.debug(f"Skipping nonexistent config path {path}", level=3)
|
||||
return None
|
||||
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Path is not a file or is not readable: {path}: {str(e)}") from e
|
||||
|
||||
except StopIteration as e:
|
||||
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {path}") from e
|
||||
except StopIteration:
|
||||
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {filename}")
|
||||
|
||||
except syaml.SpackYAMLError as e:
|
||||
raise ConfigFileError(str(e)) from e
|
||||
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
|
||||
|
||||
|
||||
def _override(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
@@ -308,7 +308,8 @@ def __call__(self):
|
||||
return t.render(**self.to_dict())
|
||||
|
||||
|
||||
import spack.container.writers.docker # noqa: E402
|
||||
|
||||
# Import after function definition all the modules in this package,
|
||||
# so that registration of writers will happen automatically
|
||||
from . import docker # noqa: F401 E402
|
||||
from . import singularity # noqa: F401 E402
|
||||
import spack.container.writers.singularity # noqa: E402
|
||||
|
@@ -14,14 +14,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
@@ -239,7 +237,7 @@ def read(path, apply_updates):
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec)
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
||||
|
||||
class ManifestValidationError(spack.error.SpackError):
|
||||
|
@@ -59,11 +59,7 @@
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.version as vn
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayout,
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
|
||||
@@ -207,12 +203,12 @@ class InstallRecord:
|
||||
def __init__(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
path: Optional[str],
|
||||
path: str,
|
||||
installed: bool,
|
||||
ref_count: int = 0,
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
deprecated_for: Optional[str] = None,
|
||||
deprecated_for: Optional["spack.spec.Spec"] = None,
|
||||
in_buildcache: bool = False,
|
||||
origin=None,
|
||||
):
|
||||
@@ -599,11 +595,9 @@ class Database:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
upstream_dbs: Optional[List["Database"]] = None,
|
||||
is_upstream: bool = False,
|
||||
lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
|
||||
layout: Optional[DirectoryLayout] = None,
|
||||
) -> None:
|
||||
"""Database for Spack installations.
|
||||
|
||||
@@ -626,7 +620,6 @@ def __init__(
|
||||
"""
|
||||
self.root = root
|
||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||
@@ -671,6 +664,14 @@ def __init__(
|
||||
|
||||
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
|
||||
|
||||
# whether there was an error at the start of a read transaction
|
||||
self._error = None
|
||||
|
||||
# For testing: if this is true, an exception is thrown when missing
|
||||
# dependencies are detected (rather than just printing a warning
|
||||
# message)
|
||||
self._fail_when_missing_deps = False
|
||||
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
|
||||
@@ -773,13 +774,7 @@ def query_local_by_spec_hash(self, hash_key):
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(
|
||||
self,
|
||||
spec_reader: Type["spack.spec.SpecfileReaderBase"],
|
||||
hash_key: str,
|
||||
installs: dict,
|
||||
data: Dict[str, InstallRecord],
|
||||
):
|
||||
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
@@ -792,20 +787,26 @@ def _assign_dependencies(
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations in the same order,
|
||||
# and that we always check the local installation first: if a downstream Spack
|
||||
# installs a package then dependents in that installation could be using it. If a
|
||||
# hash is installed locally and upstream, there isn't enough information to
|
||||
# determine which one a local package depends on, so the convention ensures that
|
||||
# this isn't an issue.
|
||||
_, record = self.query_by_spec_hash(dhash, data=data)
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
# then dependents in that installation could be using it.
|
||||
# If a hash is installed locally and upstream, there isn't
|
||||
# enough information to determine which one a local package
|
||||
# depends on, so the convention ensures that this isn't an
|
||||
# issue.
|
||||
upstream, record = self.query_by_spec_hash(dhash, data=data)
|
||||
child = record.spec if record else None
|
||||
|
||||
if not child:
|
||||
tty.warn(
|
||||
f"Missing dependency not in database: "
|
||||
f"{spec.cformat('{name}{/hash:7}')} needs {dname}-{dhash[:7]}"
|
||||
msg = "Missing dependency not in database: " "%s needs %s-%s" % (
|
||||
spec.cformat("{name}{/hash:7}"),
|
||||
dname,
|
||||
dhash[:7],
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
@@ -845,7 +846,7 @@ def check(cond, msg):
|
||||
):
|
||||
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
|
||||
|
||||
self.reindex()
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
|
||||
)
|
||||
@@ -872,8 +873,8 @@ def invalid_record(hash_key, error):
|
||||
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
|
||||
|
||||
# Pass 1: Iterate through database and build specs w/o dependencies
|
||||
data: Dict[str, InstallRecord] = {}
|
||||
installed_prefixes: Set[str] = set()
|
||||
data = {}
|
||||
installed_prefixes = set()
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
# This constructs a spec DAG from the list of all installs
|
||||
@@ -910,7 +911,7 @@ def invalid_record(hash_key, error):
|
||||
self._data = data
|
||||
self._installed_prefixes = installed_prefixes
|
||||
|
||||
def reindex(self):
|
||||
def reindex(self, directory_layout):
|
||||
"""Build database index from scratch based on a directory layout.
|
||||
|
||||
Locks the DB if it isn't locked already.
|
||||
@@ -925,116 +926,105 @@ def _read_suppress_error():
|
||||
if os.path.isfile(self._index_path):
|
||||
self._read_from_file(self._index_path)
|
||||
except CorruptDatabaseError as e:
|
||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||
self._error = e
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
|
||||
with lk.WriteTransaction(self.lock, acquire=_read_suppress_error, release=self._write):
|
||||
old_installed_prefixes, self._installed_prefixes = self._installed_prefixes, set()
|
||||
old_data, self._data = self._data, {}
|
||||
transaction = lk.WriteTransaction(
|
||||
self.lock, acquire=_read_suppress_error, release=self._write
|
||||
)
|
||||
|
||||
with transaction:
|
||||
if self._error:
|
||||
tty.warn("Spack database was corrupt. Will rebuild. Error was:", str(self._error))
|
||||
self._error = None
|
||||
|
||||
old_data = self._data
|
||||
old_installed_prefixes = self._installed_prefixes
|
||||
try:
|
||||
self._reindex(old_data)
|
||||
self._construct_from_directory_layout(directory_layout, old_data)
|
||||
except BaseException:
|
||||
# If anything explodes, restore old data, skip write.
|
||||
self._data = old_data
|
||||
self._installed_prefixes = old_installed_prefixes
|
||||
raise
|
||||
|
||||
def _reindex(self, old_data: Dict[str, InstallRecord]):
|
||||
# Specs on the file system are the source of truth for record.spec. The old database values
|
||||
# if available are the source of truth for the rest of the record.
|
||||
assert self.layout, "Database layout must be set to reindex"
|
||||
def _construct_entry_from_directory_layout(
|
||||
self, directory_layout, old_data, spec, deprecator=None
|
||||
):
|
||||
# Try to recover explicit value from old DB, but
|
||||
# default it to True if DB was corrupt. This is
|
||||
# just to be conservative in case a command like
|
||||
# "autoremove" is run by the user after a reindex.
|
||||
tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
|
||||
explicit = True
|
||||
inst_time = os.stat(spec.prefix).st_ctime
|
||||
if old_data is not None:
|
||||
old_info = old_data.get(spec.dag_hash())
|
||||
if old_info is not None:
|
||||
explicit = old_info.explicit
|
||||
inst_time = old_info.installation_time
|
||||
|
||||
specs_from_fs = self.layout.all_specs()
|
||||
deprecated_for = self.layout.deprecated_for(specs_from_fs)
|
||||
extra_args = {"explicit": explicit, "installation_time": inst_time}
|
||||
self._add(spec, directory_layout, **extra_args)
|
||||
if deprecator:
|
||||
self._deprecate(spec, deprecator)
|
||||
|
||||
known_specs: List[spack.spec.Spec] = [
|
||||
*specs_from_fs,
|
||||
*(deprecated for _, deprecated in deprecated_for),
|
||||
*(rec.spec for rec in old_data.values()),
|
||||
]
|
||||
def _construct_from_directory_layout(self, directory_layout, old_data):
|
||||
# Read first the `spec.yaml` files in the prefixes. They should be
|
||||
# considered authoritative with respect to DB reindexing, as
|
||||
# entries in the DB may be corrupted in a way that still makes
|
||||
# them readable. If we considered DB entries authoritative
|
||||
# instead, we would perpetuate errors over a reindex.
|
||||
with directory_layout.disable_upstream_check():
|
||||
# Initialize data in the reconstructed DB
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
|
||||
upstream_hashes = {
|
||||
dag_hash for upstream in self.upstream_dbs for dag_hash in upstream._data
|
||||
}
|
||||
upstream_hashes.difference_update(spec.dag_hash() for spec in known_specs)
|
||||
# Start inspecting the installed prefixes
|
||||
processed_specs = set()
|
||||
|
||||
def create_node(edge: spack.spec.DependencySpec, is_upstream: bool):
|
||||
if is_upstream:
|
||||
return
|
||||
for spec in directory_layout.all_specs():
|
||||
self._construct_entry_from_directory_layout(directory_layout, old_data, spec)
|
||||
processed_specs.add(spec)
|
||||
|
||||
self._data[edge.spec.dag_hash()] = InstallRecord(
|
||||
spec=edge.spec.copy(deps=False),
|
||||
path=edge.spec.external_path if edge.spec.external else None,
|
||||
installed=edge.spec.external,
|
||||
)
|
||||
|
||||
# Store all nodes of known specs, excluding ones found in upstreams
|
||||
tr.traverse_breadth_first_with_visitor(
|
||||
known_specs,
|
||||
tr.CoverNodesVisitor(
|
||||
NoUpstreamVisitor(upstream_hashes, create_node), key=tr.by_dag_hash
|
||||
),
|
||||
)
|
||||
|
||||
# Store the prefix and other information for specs were found on the file system
|
||||
for s in specs_from_fs:
|
||||
record = self._data[s.dag_hash()]
|
||||
record.path = s.prefix
|
||||
record.installed = True
|
||||
record.explicit = True # conservative assumption
|
||||
record.installation_time = os.stat(s.prefix).st_ctime
|
||||
|
||||
# Deprecate specs
|
||||
for new, old in deprecated_for:
|
||||
self._data[old.dag_hash()].deprecated_for = new.dag_hash()
|
||||
|
||||
# Copy data we have from the old database
|
||||
for old_record in old_data.values():
|
||||
record = self._data[old_record.spec.dag_hash()]
|
||||
record.explicit = old_record.explicit
|
||||
record.installation_time = old_record.installation_time
|
||||
record.origin = old_record.origin
|
||||
record.deprecated_for = old_record.deprecated_for
|
||||
|
||||
# Warn when the spec has been removed from the file system (i.e. it was not detected)
|
||||
if not record.installed and old_record.installed:
|
||||
tty.warn(
|
||||
f"Spec {old_record.spec.short_spec} was marked installed in the database "
|
||||
"but was not found on the file system. It is now marked as missing."
|
||||
for spec, deprecator in directory_layout.all_deprecated_specs():
|
||||
self._construct_entry_from_directory_layout(
|
||||
directory_layout, old_data, spec, deprecator
|
||||
)
|
||||
processed_specs.add(spec)
|
||||
|
||||
def create_edge(edge: spack.spec.DependencySpec, is_upstream: bool):
|
||||
if not edge.parent:
|
||||
return
|
||||
parent_record = self._data[edge.parent.dag_hash()]
|
||||
if is_upstream:
|
||||
upstream, child_record = self.query_by_spec_hash(edge.spec.dag_hash())
|
||||
assert upstream and child_record, "Internal error: upstream spec not found"
|
||||
else:
|
||||
child_record = self._data[edge.spec.dag_hash()]
|
||||
parent_record.spec._add_dependency(
|
||||
child_record.spec, depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
for key, entry in old_data.items():
|
||||
# We already took care of this spec using
|
||||
# `spec.yaml` from its prefix.
|
||||
if entry.spec in processed_specs:
|
||||
msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
|
||||
msg += " [already reconstructed from spec.yaml]"
|
||||
tty.debug(msg.format(entry.spec))
|
||||
continue
|
||||
|
||||
# Then store edges
|
||||
tr.traverse_breadth_first_with_visitor(
|
||||
known_specs,
|
||||
tr.CoverEdgesVisitor(
|
||||
NoUpstreamVisitor(upstream_hashes, create_edge), key=tr.by_dag_hash
|
||||
),
|
||||
)
|
||||
# If we arrived here it very likely means that
|
||||
# we have external specs that are not dependencies
|
||||
# of other specs. This may be the case for externally
|
||||
# installed compilers or externally installed
|
||||
# applications.
|
||||
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
|
||||
try:
|
||||
layout = None if entry.spec.external else directory_layout
|
||||
kwargs = {
|
||||
"spec": entry.spec,
|
||||
"directory_layout": layout,
|
||||
"explicit": entry.explicit,
|
||||
"installation_time": entry.installation_time,
|
||||
}
|
||||
self._add(**kwargs)
|
||||
processed_specs.add(entry.spec)
|
||||
except Exception as e:
|
||||
# Something went wrong, so the spec was not restored
|
||||
# from old data
|
||||
tty.debug(e)
|
||||
|
||||
# Finally update the ref counts
|
||||
for record in self._data.values():
|
||||
for dep in record.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
dep_record = self._data.get(dep.dag_hash())
|
||||
if dep_record: # dep might be upstream
|
||||
dep_record.ref_count += 1
|
||||
if record.deprecated_for:
|
||||
self._data[record.deprecated_for].ref_count += 1
|
||||
|
||||
self._check_ref_counts()
|
||||
self._check_ref_counts()
|
||||
|
||||
def _check_ref_counts(self):
|
||||
"""Ensure consistency of reference counts in the DB.
|
||||
@@ -1043,7 +1033,7 @@ def _check_ref_counts(self):
|
||||
|
||||
Does no locking.
|
||||
"""
|
||||
counts: Dict[str, int] = {}
|
||||
counts = {}
|
||||
for key, rec in self._data.items():
|
||||
counts.setdefault(key, 0)
|
||||
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
@@ -1127,23 +1117,29 @@ def _read(self):
|
||||
|
||||
def _add(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
allow_missing: bool = False,
|
||||
spec,
|
||||
directory_layout=None,
|
||||
explicit=False,
|
||||
installation_time=None,
|
||||
allow_missing=False,
|
||||
):
|
||||
"""Add an install record for this spec to the database.
|
||||
|
||||
Also ensures dependencies are present and updated in the DB as either installed or missing.
|
||||
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
|
||||
|
||||
Also ensures dependencies are present and updated in the DB as
|
||||
either installed or missing.
|
||||
|
||||
Args:
|
||||
spec: spec to be added
|
||||
spec (spack.spec.Spec): spec to be added
|
||||
directory_layout: layout of the spec installation
|
||||
explicit:
|
||||
Possible values: True, False, any
|
||||
|
||||
A spec that was installed following a specific user request is marked as explicit.
|
||||
If instead it was pulled-in as a dependency of a user requested spec it's
|
||||
considered implicit.
|
||||
A spec that was installed following a specific user
|
||||
request is marked as explicit. If instead it was
|
||||
pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
|
||||
installation_time:
|
||||
Date and time of installation
|
||||
@@ -1154,42 +1150,48 @@ def _add(
|
||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||
|
||||
key = spec.dag_hash()
|
||||
spec_pkg_hash = spec._package_hash # type: ignore[attr-defined]
|
||||
spec_pkg_hash = spec._package_hash
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
if upstream:
|
||||
return
|
||||
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
# warnings when a spec is installed, and its build dep
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
explicit=False,
|
||||
installation_time=installation_time,
|
||||
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||
# installed, and its build dep is missing a build dep; there's no need to install
|
||||
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||
allow_missing=dep_allow_missing,
|
||||
)
|
||||
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
if not spec.external and self.layout:
|
||||
path = self.layout.path_for_spec(spec)
|
||||
if not spec.external and directory_layout:
|
||||
path = directory_layout.path_for_spec(spec)
|
||||
installed = False
|
||||
try:
|
||||
self.layout.ensure_installed(spec)
|
||||
directory_layout.ensure_installed(spec)
|
||||
installed = True
|
||||
self._installed_prefixes.add(path)
|
||||
except DirectoryLayoutError as e:
|
||||
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(
|
||||
f"{spec.short_spec} is being {action} in the database with prefix {path}, "
|
||||
msg = (
|
||||
"{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
f"the spec, due to: {e}"
|
||||
"the spec, due to: {3}"
|
||||
)
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
installed = True
|
||||
@@ -1200,27 +1202,23 @@ def _add(
|
||||
if key not in self._data:
|
||||
# Create a new install record with no deps initially.
|
||||
new_spec = spec.copy(deps=False)
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec,
|
||||
path=path,
|
||||
installed=installed,
|
||||
ref_count=0,
|
||||
explicit=explicit,
|
||||
installation_time=installation_time,
|
||||
origin=None if not hasattr(spec, "origin") else spec.origin,
|
||||
)
|
||||
extra_args = {"explicit": explicit, "installation_time": installation_time}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, "origin"):
|
||||
extra_args["origin"] = spec.origin
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
assert record, f"Missing dependency {dep.spec.short_spec} in DB"
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
# Mark concrete once everything is built, and preserve the original hashes of concrete
|
||||
# specs.
|
||||
# Mark concrete once everything is built, and preserve
|
||||
# the original hashes of concrete specs.
|
||||
new_spec._mark_concrete()
|
||||
new_spec._hash = key
|
||||
new_spec._package_hash = spec_pkg_hash
|
||||
@@ -1233,7 +1231,7 @@ def _add(
|
||||
self._data[key].explicit = explicit
|
||||
|
||||
@_autospec
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
def add(self, spec, directory_layout, explicit=False):
|
||||
"""Add spec at path to database, locking and reading DB to sync.
|
||||
|
||||
``add()`` will lock and read from the DB on disk.
|
||||
@@ -1242,9 +1240,9 @@ def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
# TODO: ensure that spec is concrete?
|
||||
# Entire add is transactional.
|
||||
with self.write_transaction():
|
||||
self._add(spec, explicit=explicit)
|
||||
self._add(spec, directory_layout, explicit=explicit)
|
||||
|
||||
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
def _get_matching_spec_key(self, spec, **kwargs):
|
||||
"""Get the exact spec OR get a single spec that matches."""
|
||||
key = spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
@@ -1256,12 +1254,12 @@ def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
return key
|
||||
|
||||
@_autospec
|
||||
def get_record(self, spec: "spack.spec.Spec", **kwargs) -> Optional[InstallRecord]:
|
||||
def get_record(self, spec, **kwargs):
|
||||
key = self._get_matching_spec_key(spec, **kwargs)
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
return record
|
||||
|
||||
def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
def _decrement_ref_count(self, spec):
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1278,7 +1276,7 @@ def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
self._decrement_ref_count(dep)
|
||||
|
||||
def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
def _increment_ref_count(self, spec):
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1287,14 +1285,14 @@ def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
rec = self._data[key]
|
||||
rec.ref_count += 1
|
||||
|
||||
def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
def _remove(self, spec):
|
||||
"""Non-locking version of remove(); does real work."""
|
||||
key = self._get_matching_spec_key(spec)
|
||||
rec = self._data[key]
|
||||
|
||||
# This install prefix is now free for other specs to use, even if the
|
||||
# spec is only marked uninstalled.
|
||||
if not rec.spec.external and rec.installed and rec.path:
|
||||
if not rec.spec.external and rec.installed:
|
||||
self._installed_prefixes.remove(rec.path)
|
||||
|
||||
if rec.ref_count > 0:
|
||||
@@ -1318,7 +1316,7 @@ def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
return rec.spec
|
||||
|
||||
@_autospec
|
||||
def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
def remove(self, spec):
|
||||
"""Removes a spec from the database. To be called on uninstall.
|
||||
|
||||
Reads the database, then:
|
||||
@@ -1333,7 +1331,7 @@ def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
with self.write_transaction():
|
||||
return self._remove(spec)
|
||||
|
||||
def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
|
||||
def deprecator(self, spec):
|
||||
"""Return the spec that the given spec is deprecated for, or None"""
|
||||
with self.read_transaction():
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
@@ -1344,14 +1342,14 @@ def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
|
||||
else:
|
||||
return None
|
||||
|
||||
def specs_deprecated_by(self, spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
||||
def specs_deprecated_by(self, spec):
|
||||
"""Return all specs deprecated in favor of the given spec"""
|
||||
with self.read_transaction():
|
||||
return [
|
||||
rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash()
|
||||
]
|
||||
|
||||
def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
def _deprecate(self, spec, deprecator):
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
spec_rec = self._data[spec_key]
|
||||
|
||||
@@ -1369,17 +1367,17 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
def mark(self, spec, key, value):
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
|
||||
def _mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
def _mark(self, spec, key, value):
|
||||
record = self._data[self._get_matching_spec_key(spec)]
|
||||
setattr(record, key, value)
|
||||
|
||||
@_autospec
|
||||
def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
def deprecate(self, spec, deprecator):
|
||||
"""Marks a spec as deprecated in favor of its deprecator"""
|
||||
with self.write_transaction():
|
||||
return self._deprecate(spec, deprecator)
|
||||
@@ -1387,16 +1385,16 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
||||
@_autospec
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: str = "children",
|
||||
transitive: bool = True,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
):
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
||||
relatives: Set[spack.spec.Spec] = set()
|
||||
relatives = set()
|
||||
for spec in self.query(spec):
|
||||
if transitive:
|
||||
to_add = spec.traverse(direction=direction, root=False, deptype=deptype)
|
||||
@@ -1407,13 +1405,17 @@ def installed_relatives(
|
||||
|
||||
for relative in to_add:
|
||||
hash_key = relative.dag_hash()
|
||||
_, record = self.query_by_spec_hash(hash_key)
|
||||
upstream, record = self.query_by_spec_hash(hash_key)
|
||||
if not record:
|
||||
tty.warn(
|
||||
f"Inconsistent state: "
|
||||
f"{'dependent' if direction == 'parents' else 'dependency'} {hash_key} of "
|
||||
f"{spec.dag_hash()} not in DB"
|
||||
reltype = "Dependent" if direction == "parents" else "Dependency"
|
||||
msg = "Inconsistent state! %s %s of %s not in DB" % (
|
||||
reltype,
|
||||
hash_key,
|
||||
spec.dag_hash(),
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
if not record.installed:
|
||||
@@ -1423,7 +1425,7 @@ def installed_relatives(
|
||||
return relatives
|
||||
|
||||
@_autospec
|
||||
def installed_extensions_for(self, extendee_spec: "spack.spec.Spec"):
|
||||
def installed_extensions_for(self, extendee_spec):
|
||||
"""Returns the specs of all packages that extend the given spec"""
|
||||
for spec in self.query():
|
||||
if spec.package.extends(extendee_spec):
|
||||
@@ -1682,7 +1684,7 @@ def unused_specs(
|
||||
self,
|
||||
root_hashes: Optional[Container[str]] = None,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.LINK | dt.RUN,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
) -> "List[spack.spec.Spec]":
|
||||
"""Return all specs that are currently installed but not needed by root specs.
|
||||
|
||||
By default, roots are all explicit specs in the database. If a set of root
|
||||
@@ -1726,33 +1728,6 @@ def update_explicit(self, spec, explicit):
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class NoUpstreamVisitor:
|
||||
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
upstream_hashes: Set[str],
|
||||
on_visit: Callable[["spack.spec.DependencySpec", bool], None],
|
||||
):
|
||||
self.upstream_hashes = upstream_hashes
|
||||
self.on_visit = on_visit
|
||||
|
||||
def accept(self, item: tr.EdgeAndDepth) -> bool:
|
||||
self.on_visit(item.edge, self.is_upstream(item))
|
||||
return True
|
||||
|
||||
def is_upstream(self, item: tr.EdgeAndDepth) -> bool:
|
||||
return item.edge.spec.dag_hash() in self.upstream_hashes
|
||||
|
||||
def neighbors(self, item: tr.EdgeAndDepth):
|
||||
# Prune edges from upstream nodes, only follow database tracked dependencies
|
||||
return (
|
||||
[]
|
||||
if self.is_upstream(item)
|
||||
else item.edge.spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES)
|
||||
)
|
||||
|
||||
|
||||
class UpstreamDatabaseLockingError(SpackError):
|
||||
"""Raised when an operation would need to lock an upstream database"""
|
||||
|
||||
|
@@ -2,11 +2,17 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import executable_prefix, set_virtuals_nonbuildable, update_configuration
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
executable_prefix,
|
||||
set_virtuals_nonbuildable,
|
||||
update_configuration,
|
||||
)
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
|
@@ -6,9 +6,9 @@
|
||||
function to update packages.yaml given a list of detected packages.
|
||||
|
||||
Ideally, each detection method should be placed in a specific subpackage
|
||||
and implement at least a function that returns a list of specs.
|
||||
|
||||
The update in packages.yaml can then be done using the function provided here.
|
||||
and implement at least a function that returns a list of DetectedPackage
|
||||
objects. The update in packages.yaml can then be done using the function
|
||||
provided here.
|
||||
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
@@ -17,10 +17,9 @@
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
@@ -31,6 +30,25 @@
|
||||
import spack.util.windows_registry
|
||||
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
@@ -45,7 +63,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List["spack.spec.Spec"],
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
@@ -65,19 +83,22 @@ def _pkg_config_dict(
|
||||
pkg_dict = spack.util.spack_yaml.syaml_dict()
|
||||
pkg_dict["externals"] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e):
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e)),
|
||||
("prefix", pathlib.Path(e.external_path).as_posix()),
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
]
|
||||
if e.external_modules:
|
||||
external_items.append(("modules", e.external_modules))
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
|
||||
if e.extra_attributes:
|
||||
if e.spec.extra_attributes:
|
||||
external_items.append(
|
||||
("extra_attributes", spack.util.spack_yaml.syaml_dict(e.extra_attributes.items()))
|
||||
(
|
||||
"extra_attributes",
|
||||
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()),
|
||||
)
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
@@ -115,10 +136,10 @@ def path_to_dict(search_paths: List[str]):
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if llnl.util.filesystem.is_readable_file(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
@@ -198,32 +219,33 @@ def library_prefix(library_dir: str) -> str:
|
||||
|
||||
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List["spack.spec.Spec"]],
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages: list of specs to be added
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for package_name, entries in detected_packages.items():
|
||||
new_entries = [s for s in entries if s not in predefined_external_specs]
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend(new_entries)
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
|
||||
pkgs_cfg = spack.config.get("packages", scope=scope)
|
||||
|
||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set("packages", pkgs_cfg, scope=scope)
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
|
||||
from typing import Dict, List, Optional, Set, Tuple, Type
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
@@ -24,6 +24,7 @@
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
_convert_to_iterable,
|
||||
@@ -61,7 +62,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
|
||||
|
||||
def file_identifier(path):
|
||||
s = os.stat(path)
|
||||
return s.st_dev, s.st_ino
|
||||
return (s.st_dev, s.st_ino)
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
@@ -79,8 +80,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
@@ -188,7 +187,7 @@ def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[s
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
@@ -228,7 +227,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||
) -> List["spack.spec.Spec"]:
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
@@ -244,9 +243,7 @@ def detect_specs(
|
||||
return []
|
||||
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in _group_by_prefix(
|
||||
llnl.util.lang.dedupe(paths)
|
||||
).items():
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
@@ -294,25 +291,27 @@ def detect_specs(
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if not spec.external_path:
|
||||
spec.external_path = prefix
|
||||
if spec.external_path:
|
||||
prefix = spec.external_path
|
||||
|
||||
result.append(spec)
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
repository: repository to retrieve the package
|
||||
initial_guess: initial list of paths to search from the caller if None, default paths
|
||||
are searched. If this is an empty list, nothing will be searched.
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
if None, default paths are searched. If this
|
||||
is an empty list, nothing will be searched.
|
||||
"""
|
||||
pkg_cls = repository.get_pkg_class(pkg_name)
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
@@ -336,10 +335,13 @@ def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> Lis
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
result = [path for path, exe in executables_by_path.items() if joined_pattern.search(exe)]
|
||||
result.sort()
|
||||
return result
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
@@ -383,11 +385,11 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: Iterable[str],
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List["spack.spec.Spec"]]:
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
@@ -397,28 +399,19 @@ def by_path(
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
repository = spack.repo.PATH.ensure_unwrapped()
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
|
@@ -68,7 +68,7 @@ def execute(self) -> List[spack.spec.Spec]:
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(entries[unqualified_name])
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -104,9 +104,7 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [
|
||||
spack.spec.Spec.from_detection(
|
||||
item.spec, external_path=self.tmpdir.name, extra_attributes=item.extra_attributes
|
||||
)
|
||||
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
|
||||
for item in self.test.results
|
||||
]
|
||||
|
||||
|
@@ -32,9 +32,10 @@ class OpenMpi(Package):
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -47,7 +48,6 @@ class OpenMpi(Package):
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency
|
||||
from spack.directives_meta import DirectiveError, DirectiveMeta
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import (
|
||||
@@ -80,6 +80,22 @@ class OpenMpi(Package):
|
||||
"redistribute",
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = [
|
||||
"arch",
|
||||
"architecture",
|
||||
"dev_path",
|
||||
"namespace",
|
||||
"operating_system",
|
||||
"os",
|
||||
"patches",
|
||||
"platform",
|
||||
"target",
|
||||
]
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
directive_names = ["build_system"]
|
||||
|
||||
_patch_order_index = 0
|
||||
|
||||
@@ -139,6 +155,219 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
return spack.spec.Spec(value)
|
||||
|
||||
|
||||
class DirectiveMeta(type):
|
||||
"""Flushes the directives that were temporarily stored in the staging
|
||||
area into the package.
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_default_args: List[dict] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
# to be executed. Here we go reversed because we want to execute
|
||||
# commands:
|
||||
# 1. in the order they were defined
|
||||
# 2. following the MRO
|
||||
attr_dict["_directives_to_be_executed"] = []
|
||||
for base in reversed(bases):
|
||||
try:
|
||||
directive_from_base = base._directives_to_be_executed
|
||||
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
|
||||
except AttributeError:
|
||||
# The base class didn't have the required attribute.
|
||||
# Continue searching
|
||||
pass
|
||||
|
||||
# De-duplicates directives from base classes
|
||||
attr_dict["_directives_to_be_executed"] = [
|
||||
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
|
||||
]
|
||||
|
||||
# Move things to be executed from module scope (where they
|
||||
# are collected first) to class scope
|
||||
if DirectiveMeta._directives_to_be_executed:
|
||||
attr_dict["_directives_to_be_executed"].extend(
|
||||
DirectiveMeta._directives_to_be_executed
|
||||
)
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if "spack.pkg" in cls.__module__:
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
setattr(cls, d, {})
|
||||
|
||||
# Lazily execute directives
|
||||
for directive in cls._directives_to_be_executed:
|
||||
directive(cls)
|
||||
|
||||
# Ignore any directives executed *within* top-level
|
||||
# directives by clearing out the queue they're appended to
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def push_to_context(when_spec):
|
||||
"""Add a spec to the context constraints."""
|
||||
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
||||
|
||||
@staticmethod
|
||||
def pop_from_context():
|
||||
"""Pop the last constraint from the context"""
|
||||
return DirectiveMeta._when_constraints_from_context.pop()
|
||||
|
||||
@staticmethod
|
||||
def push_default_args(default_args):
|
||||
"""Push default arguments"""
|
||||
DirectiveMeta._default_args.append(default_args)
|
||||
|
||||
@staticmethod
|
||||
def pop_default_args():
|
||||
"""Pop default arguments"""
|
||||
return DirectiveMeta._default_args.pop()
|
||||
|
||||
@staticmethod
|
||||
def directive(dicts=None):
|
||||
"""Decorator for Spack directives.
|
||||
|
||||
Spack directives allow you to modify a package while it is being
|
||||
defined, e.g. to add version or dependency information. Directives
|
||||
are one of the key pieces of Spack's package "language", which is
|
||||
embedded in python.
|
||||
|
||||
Here's an example directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@directive(dicts='versions')
|
||||
version(pkg, ...):
|
||||
...
|
||||
|
||||
This directive allows you write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
version(...)
|
||||
|
||||
The ``@directive`` decorator handles a couple things for you:
|
||||
|
||||
1. Adds the class scope (pkg) as an initial parameter when
|
||||
called, like a class method would. This allows you to modify
|
||||
a package from within a directive, while the package is still
|
||||
being defined.
|
||||
|
||||
2. It automatically adds a dictionary called "versions" to the
|
||||
package so that you can refer to pkg.versions.
|
||||
|
||||
The ``(dicts='versions')`` part ensures that ALL packages in Spack
|
||||
will have a ``versions`` attribute after they're constructed, and
|
||||
that if no directive actually modified it, it will just be an
|
||||
empty dict.
|
||||
|
||||
This is just a modular way to add storage attributes to the
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
||||
if not isinstance(dicts, collections.abc.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
|
||||
# Add the dictionary names if not already there
|
||||
DirectiveMeta._directive_dict_names |= set(dicts)
|
||||
|
||||
# This decorator just returns the directive functions
|
||||
def _decorator(decorated_function):
|
||||
directive_names.append(decorated_function.__name__)
|
||||
|
||||
@functools.wraps(decorated_function)
|
||||
def _wrapper(*args, **_kwargs):
|
||||
# First merge default args with kwargs
|
||||
kwargs = dict()
|
||||
for default_args in DirectiveMeta._default_args:
|
||||
kwargs.update(default_args)
|
||||
kwargs.update(_kwargs)
|
||||
|
||||
# Inject when arguments from the context
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
# are not used inside the context manager
|
||||
if decorated_function.__name__ == "version":
|
||||
msg = (
|
||||
'directive "{0}" cannot be used within a "when"'
|
||||
' context since it does not support a "when=" '
|
||||
"argument"
|
||||
)
|
||||
msg = msg.format(decorated_function.__name__)
|
||||
raise DirectiveError(msg)
|
||||
|
||||
when_constraints = [
|
||||
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
|
||||
]
|
||||
if kwargs.get("when"):
|
||||
when_constraints.append(spack.spec.Spec(kwargs["when"]))
|
||||
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
|
||||
|
||||
kwargs["when"] = when_spec
|
||||
|
||||
# If any of the arguments are executors returned by a
|
||||
# directive passed as an argument, don't execute them
|
||||
# lazily. Instead, let the called directive handle them.
|
||||
# This allows nested directive calls in packages. The
|
||||
# caller can return the directive if it should be queued.
|
||||
def remove_directives(arg):
|
||||
directives = DirectiveMeta._directives_to_be_executed
|
||||
if isinstance(arg, (list, tuple)):
|
||||
# Descend into args that are lists or tuples
|
||||
for a in arg:
|
||||
remove_directives(a)
|
||||
else:
|
||||
# Remove directives args from the exec queue
|
||||
remove = next((d for d in directives if d is arg), None)
|
||||
if remove is not None:
|
||||
directives.remove(remove)
|
||||
|
||||
# Nasty, but it's the best way I can think of to avoid
|
||||
# side effects if directive results are passed as args
|
||||
remove_directives(args)
|
||||
remove_directives(list(kwargs.values()))
|
||||
|
||||
# A directive returns either something that is callable on a
|
||||
# package or a sequence of them
|
||||
result = decorated_function(*args, **kwargs)
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, collections.abc.Sequence):
|
||||
values = (values,)
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
||||
# wrapped function returns same result as original so
|
||||
# that we can nest directives
|
||||
return result
|
||||
|
||||
return _wrapper
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
|
||||
directive = DirectiveMeta.directive
|
||||
|
||||
@@ -617,7 +846,7 @@ def format_error(msg, pkg):
|
||||
msg += " @*r{{[{0}, variant '{1}']}}"
|
||||
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
||||
|
||||
if name in spack.variant.reserved_names:
|
||||
if name in reserved_names:
|
||||
|
||||
def _raise_reserved_name(pkg):
|
||||
msg = "The name '%s' is reserved by Spack" % name
|
||||
@@ -881,6 +1110,10 @@ def _execute_languages(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_languages
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
||||
class DependencyError(DirectiveError):
|
||||
"""This is raised when a dependency specification is invalid."""
|
||||
|
||||
|
@@ -1,234 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections.abc
|
||||
import functools
|
||||
from typing import List, Set
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
directive_names = ["build_system"]
|
||||
|
||||
|
||||
class DirectiveMeta(type):
|
||||
"""Flushes the directives that were temporarily stored in the staging
|
||||
area into the package.
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_default_args: List[dict] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
# to be executed. Here we go reversed because we want to execute
|
||||
# commands:
|
||||
# 1. in the order they were defined
|
||||
# 2. following the MRO
|
||||
attr_dict["_directives_to_be_executed"] = []
|
||||
for base in reversed(bases):
|
||||
try:
|
||||
directive_from_base = base._directives_to_be_executed
|
||||
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
|
||||
except AttributeError:
|
||||
# The base class didn't have the required attribute.
|
||||
# Continue searching
|
||||
pass
|
||||
|
||||
# De-duplicates directives from base classes
|
||||
attr_dict["_directives_to_be_executed"] = [
|
||||
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
|
||||
]
|
||||
|
||||
# Move things to be executed from module scope (where they
|
||||
# are collected first) to class scope
|
||||
if DirectiveMeta._directives_to_be_executed:
|
||||
attr_dict["_directives_to_be_executed"].extend(
|
||||
DirectiveMeta._directives_to_be_executed
|
||||
)
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if "spack.pkg" in cls.__module__:
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
setattr(cls, d, {})
|
||||
|
||||
# Lazily execute directives
|
||||
for directive in cls._directives_to_be_executed:
|
||||
directive(cls)
|
||||
|
||||
# Ignore any directives executed *within* top-level
|
||||
# directives by clearing out the queue they're appended to
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def push_to_context(when_spec):
|
||||
"""Add a spec to the context constraints."""
|
||||
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
||||
|
||||
@staticmethod
|
||||
def pop_from_context():
|
||||
"""Pop the last constraint from the context"""
|
||||
return DirectiveMeta._when_constraints_from_context.pop()
|
||||
|
||||
@staticmethod
|
||||
def push_default_args(default_args):
|
||||
"""Push default arguments"""
|
||||
DirectiveMeta._default_args.append(default_args)
|
||||
|
||||
@staticmethod
|
||||
def pop_default_args():
|
||||
"""Pop default arguments"""
|
||||
return DirectiveMeta._default_args.pop()
|
||||
|
||||
@staticmethod
|
||||
def directive(dicts=None):
|
||||
"""Decorator for Spack directives.
|
||||
|
||||
Spack directives allow you to modify a package while it is being
|
||||
defined, e.g. to add version or dependency information. Directives
|
||||
are one of the key pieces of Spack's package "language", which is
|
||||
embedded in python.
|
||||
|
||||
Here's an example directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@directive(dicts='versions')
|
||||
version(pkg, ...):
|
||||
...
|
||||
|
||||
This directive allows you write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
version(...)
|
||||
|
||||
The ``@directive`` decorator handles a couple things for you:
|
||||
|
||||
1. Adds the class scope (pkg) as an initial parameter when
|
||||
called, like a class method would. This allows you to modify
|
||||
a package from within a directive, while the package is still
|
||||
being defined.
|
||||
|
||||
2. It automatically adds a dictionary called "versions" to the
|
||||
package so that you can refer to pkg.versions.
|
||||
|
||||
The ``(dicts='versions')`` part ensures that ALL packages in Spack
|
||||
will have a ``versions`` attribute after they're constructed, and
|
||||
that if no directive actually modified it, it will just be an
|
||||
empty dict.
|
||||
|
||||
This is just a modular way to add storage attributes to the
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
||||
if not isinstance(dicts, collections.abc.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
|
||||
# Add the dictionary names if not already there
|
||||
DirectiveMeta._directive_dict_names |= set(dicts)
|
||||
|
||||
# This decorator just returns the directive functions
|
||||
def _decorator(decorated_function):
|
||||
directive_names.append(decorated_function.__name__)
|
||||
|
||||
@functools.wraps(decorated_function)
|
||||
def _wrapper(*args, **_kwargs):
|
||||
# First merge default args with kwargs
|
||||
kwargs = dict()
|
||||
for default_args in DirectiveMeta._default_args:
|
||||
kwargs.update(default_args)
|
||||
kwargs.update(_kwargs)
|
||||
|
||||
# Inject when arguments from the context
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
# are not used inside the context manager
|
||||
if decorated_function.__name__ == "version":
|
||||
msg = (
|
||||
'directive "{0}" cannot be used within a "when"'
|
||||
' context since it does not support a "when=" '
|
||||
"argument"
|
||||
)
|
||||
msg = msg.format(decorated_function.__name__)
|
||||
raise DirectiveError(msg)
|
||||
|
||||
when_constraints = [
|
||||
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
|
||||
]
|
||||
if kwargs.get("when"):
|
||||
when_constraints.append(spack.spec.Spec(kwargs["when"]))
|
||||
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
|
||||
|
||||
kwargs["when"] = when_spec
|
||||
|
||||
# If any of the arguments are executors returned by a
|
||||
# directive passed as an argument, don't execute them
|
||||
# lazily. Instead, let the called directive handle them.
|
||||
# This allows nested directive calls in packages. The
|
||||
# caller can return the directive if it should be queued.
|
||||
def remove_directives(arg):
|
||||
directives = DirectiveMeta._directives_to_be_executed
|
||||
if isinstance(arg, (list, tuple)):
|
||||
# Descend into args that are lists or tuples
|
||||
for a in arg:
|
||||
remove_directives(a)
|
||||
else:
|
||||
# Remove directives args from the exec queue
|
||||
remove = next((d for d in directives if d is arg), None)
|
||||
if remove is not None:
|
||||
directives.remove(remove)
|
||||
|
||||
# Nasty, but it's the best way I can think of to avoid
|
||||
# side effects if directive results are passed as args
|
||||
remove_directives(args)
|
||||
remove_directives(list(kwargs.values()))
|
||||
|
||||
# A directive returns either something that is callable on a
|
||||
# package or a sequence of them
|
||||
result = decorated_function(*args, **kwargs)
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, collections.abc.Sequence):
|
||||
values = (values,)
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
||||
# wrapped function returns same result as original so
|
||||
# that we can nest directives
|
||||
return result
|
||||
|
||||
return _wrapper
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
@@ -4,14 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.config
|
||||
@@ -20,8 +23,13 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_projections = {
|
||||
"all": "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
"all": posixpath.join(
|
||||
"{architecture}", "{compiler.name}-{compiler.version}", "{name}-{version}-{hash}"
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -31,42 +39,6 @@ def _check_concrete(spec):
|
||||
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
|
||||
|
||||
|
||||
def _get_spec(prefix: str) -> Optional["spack.spec.Spec"]:
|
||||
"""Returns a spec if the prefix contains a spec file in the .spack subdir"""
|
||||
for f in ("spec.json", "spec.yaml"):
|
||||
try:
|
||||
return spack.spec.Spec.from_specfile(os.path.join(prefix, ".spack", f))
|
||||
except Exception:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
||||
stack = [root]
|
||||
specs = []
|
||||
|
||||
while stack:
|
||||
prefix = stack.pop()
|
||||
|
||||
spec = _get_spec(prefix)
|
||||
|
||||
if spec:
|
||||
spec.prefix = prefix
|
||||
specs.append(spec)
|
||||
continue
|
||||
|
||||
try:
|
||||
scandir = os.scandir(prefix)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with scandir as entries:
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
stack.append(entry.path)
|
||||
return specs
|
||||
|
||||
|
||||
class DirectoryLayout:
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want different layouts for their
|
||||
@@ -180,9 +152,20 @@ def read_spec(self, path):
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
# Attempts to convert to JSON if possible.
|
||||
# Otherwise just returns the YAML.
|
||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except OSError as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
return json_path
|
||||
|
||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
"""Gets full path to spec file for deprecated spec
|
||||
@@ -216,7 +199,23 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
deprecated_spec.dag_hash() + "_" + self.spec_file_name,
|
||||
)
|
||||
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(deprecated_spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except (IOError, OSError) as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
|
||||
return json_path
|
||||
|
||||
@contextmanager
|
||||
def disable_upstream_check(self):
|
||||
self.check_upstream = False
|
||||
yield
|
||||
self.check_upstream = True
|
||||
|
||||
def metadata_path(self, spec):
|
||||
return os.path.join(spec.prefix, self.metadata_dir)
|
||||
@@ -272,6 +271,53 @@ def ensure_installed(self, spec):
|
||||
"Spec file in %s does not match hash!" % spec_file_path
|
||||
)
|
||||
|
||||
def all_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
specs = []
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [self.metadata_dir, "spec.json"]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
if not spec_files: # we're probably looking at legacy yaml...
|
||||
path_elems += [self.metadata_dir, "spec.yaml"]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
specs.extend([self.read_spec(s) for s in spec_files])
|
||||
return specs
|
||||
|
||||
def all_deprecated_specs(self):
|
||||
if not os.path.isdir(self.root):
|
||||
return []
|
||||
|
||||
deprecated_specs = set()
|
||||
for _, path_scheme in self.projections.items():
|
||||
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
|
||||
# NOTE: Does not validate filename extension; should happen later
|
||||
path_elems += [
|
||||
self.metadata_dir,
|
||||
self.deprecated_dir,
|
||||
"*_spec.*",
|
||||
] # + self.spec_file_name]
|
||||
pattern = os.path.join(self.root, *path_elems)
|
||||
spec_files = glob.glob(pattern)
|
||||
get_depr_spec_file = lambda x: os.path.join(
|
||||
os.path.dirname(os.path.dirname(x)), self.spec_file_name
|
||||
)
|
||||
deprecated_specs |= set(
|
||||
(self.read_spec(s), self.read_spec(get_depr_spec_file(s))) for s in spec_files
|
||||
)
|
||||
return deprecated_specs
|
||||
|
||||
def specs_by_hash(self):
|
||||
by_hash = {}
|
||||
for spec in self.all_specs():
|
||||
by_hash[spec.dag_hash()] = spec
|
||||
return by_hash
|
||||
|
||||
def path_for_spec(self, spec):
|
||||
"""Return absolute path from the root to a directory for the spec."""
|
||||
_check_concrete(spec)
|
||||
@@ -337,35 +383,6 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
raise e
|
||||
path = os.path.dirname(path)
|
||||
|
||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||
"""Returns a list of all specs detected in self.root, detected by `.spack` directories.
|
||||
Their prefix is set to the directory containing the `.spack` directory. Note that these
|
||||
specs may follow a different layout than the current layout if it was changed after
|
||||
installation."""
|
||||
return specs_from_metadata_dirs(self.root)
|
||||
|
||||
def deprecated_for(
|
||||
self, specs: List["spack.spec.Spec"]
|
||||
) -> List[Tuple["spack.spec.Spec", "spack.spec.Spec"]]:
|
||||
"""Returns a list of tuples of specs (new, old) where new is deprecated for old"""
|
||||
spec_with_deprecated = []
|
||||
for spec in specs:
|
||||
try:
|
||||
deprecated = os.scandir(
|
||||
os.path.join(str(spec.prefix), self.metadata_dir, self.deprecated_dir)
|
||||
)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with deprecated as entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
deprecated_spec = spack.spec.Spec.from_specfile(entry.path)
|
||||
spec_with_deprecated.append((spec, deprecated_spec))
|
||||
except Exception:
|
||||
continue
|
||||
return spec_with_deprecated
|
||||
|
||||
|
||||
class DirectoryLayoutError(SpackError):
|
||||
"""Superclass for directory layout errors."""
|
||||
|
@@ -58,8 +58,9 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import SpecList
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.variant import UnknownVariantError
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
@@ -1213,6 +1214,7 @@ def scope_name(self):
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
@@ -1223,26 +1225,37 @@ def include_concrete_envs(self):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
|
||||
# Copy unique root specs from env
|
||||
for root_dict in env._concrete_roots_dict():
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
||||
if dag_hash not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
||||
{dag_hash: spec_details}
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
)
|
||||
concrete_hash_seen.add(dag_hash)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
|
||||
# Copy transitive include data
|
||||
transitive = env.included_concrete_spec_data
|
||||
if transitive:
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
@@ -1624,15 +1637,16 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec in self.user_specs:
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, str(uspec), tests))
|
||||
args.append((i, [str(x) for x in uspec_constraints], tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -1643,7 +1657,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
@@ -2160,23 +2174,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
|
||||
return specs
|
||||
|
||||
def _concrete_specs_dict(self):
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = {}
|
||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = self._concrete_specs_dict()
|
||||
root_specs = self._concrete_roots_dict()
|
||||
|
||||
spack_dict = {"version": spack.spack_version}
|
||||
spack_commit = spack.main.get_spack_commit()
|
||||
@@ -2197,7 +2204,7 @@ def _to_lockfile_dict(self):
|
||||
# spack version information
|
||||
"spack": spack_dict,
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": root_specs,
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
# Concrete specs by hash, including dependencies
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
@@ -2507,11 +2514,52 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
if len(root_spec) != 1:
|
||||
m = "The constraints %s are not a valid spec " % spec_constraints
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ["^" + d for d in e.invalid_deps]
|
||||
invalid_deps = [
|
||||
c
|
||||
for c in spec_constraints
|
||||
if any(c.satisfies(invd) for invd in invalid_deps_string)
|
||||
]
|
||||
if len(invalid_deps) != len(invalid_deps_string):
|
||||
raise e
|
||||
invalid_constraints.extend(invalid_deps)
|
||||
except UnknownVariantError as e:
|
||||
invalid_variants = e.unknown_variants
|
||||
inv_variant_constraints = [
|
||||
c for c in spec_constraints if any(name in c.variants for name in invalid_variants)
|
||||
]
|
||||
if len(inv_variant_constraints) != len(invalid_variants):
|
||||
raise e
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
spec_constraints = [Spec(x) for x in spec_constraints]
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
|
@@ -24,14 +24,12 @@
|
||||
"""
|
||||
import copy
|
||||
import functools
|
||||
import http.client
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -55,11 +53,24 @@
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
|
||||
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
|
||||
"The contents of {subject} look like {content_type}. Either the URL"
|
||||
" you are trying to use does not exist or you have an internet gateway"
|
||||
" issue. You can remove the bad archive using 'spack clean"
|
||||
" <package>', then try again using the correct URL."
|
||||
)
|
||||
|
||||
|
||||
def warn_content_type_mismatch(subject, content_type="HTML"):
|
||||
tty.warn(
|
||||
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE.format(subject=subject, content_type=content_type)
|
||||
)
|
||||
|
||||
|
||||
def _needs_stage(fun):
|
||||
"""Many methods on fetch strategies require a stage to be set
|
||||
@@ -234,31 +245,38 @@ class URLFetchStrategy(FetchStrategy):
|
||||
|
||||
# these are checksum types. The generic 'checksum' is deprecated for
|
||||
# specific hash names, but we need it for backward compatibility
|
||||
optional_attrs = [*crypto.hashes.keys(), "checksum"]
|
||||
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
|
||||
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
self.url = url
|
||||
# Prefer values in kwargs to the positionals.
|
||||
self.url = kwargs.get("url", url)
|
||||
self.mirrors = kwargs.get("mirrors", [])
|
||||
|
||||
# digest can be set as the first argument, or from an explicit
|
||||
# kwarg by the hash name.
|
||||
self.digest: Optional[str] = checksum
|
||||
self.digest = kwargs.get("checksum", checksum)
|
||||
for h in self.optional_attrs:
|
||||
if h in kwargs:
|
||||
self.digest = kwargs[h]
|
||||
|
||||
self.expand_archive: bool = kwargs.get("expand", True)
|
||||
self.extra_options: dict = kwargs.get("fetch_options", {})
|
||||
self._curl: Optional[Executable] = None
|
||||
self.extension: Optional[str] = kwargs.get("extension", None)
|
||||
self._effective_url: Optional[str] = None
|
||||
self.expand_archive = kwargs.get("expand", True)
|
||||
self.extra_options = kwargs.get("fetch_options", {})
|
||||
self._curl = None
|
||||
|
||||
self.extension = kwargs.get("extension", None)
|
||||
|
||||
if not self.url:
|
||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||
|
||||
@property
|
||||
def curl(self) -> Executable:
|
||||
def curl(self):
|
||||
if not self._curl:
|
||||
self._curl = web_util.require_curl()
|
||||
try:
|
||||
self._curl = which("curl", required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
return self._curl
|
||||
|
||||
def source_id(self):
|
||||
@@ -279,23 +297,27 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
errors: List[Exception] = []
|
||||
url = None
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
if not web_util.url_exists(url):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.extend(e.exceptions)
|
||||
else:
|
||||
raise FailedDownloadError(*errors)
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
raise FailedDownloadError(url)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
@@ -309,45 +331,32 @@ def _check_headers(self, headers):
|
||||
# redirects properly.
|
||||
content_types = re.findall(r"Content-Type:[^\r\n]+", headers, flags=re.IGNORECASE)
|
||||
if content_types and "text/html" in content_types[-1]:
|
||||
msg = (
|
||||
f"The contents of {self.archive_file or 'the archive'} fetched from {self.url} "
|
||||
" looks like HTML. This can indicate a broken URL, or an internet gateway issue."
|
||||
)
|
||||
if self._effective_url != self.url:
|
||||
msg += f" The URL redirected to {self._effective_url}."
|
||||
tty.warn(msg)
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
msg = "urllib failed to fetch with error {0}".format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
with open(save_file, "wb") as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
|
||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||
# for users to know which URL was actually fetched.
|
||||
if isinstance(response, http.client.HTTPResponse):
|
||||
self._effective_url = response.geturl()
|
||||
|
||||
self._check_headers(str(response.headers))
|
||||
self._check_headers(str(headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -356,7 +365,7 @@ def _fetch_curl(self, url):
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + ".part"
|
||||
tty.msg(f"Fetching {url}")
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
if partial_file:
|
||||
save_args = [
|
||||
"-C",
|
||||
@@ -396,8 +405,8 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except spack.error.FetchError as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
@@ -464,9 +473,9 @@ def check(self):
|
||||
"""Check the downloaded archive against a checksum digest.
|
||||
No-op if this stage checks code out of a repository."""
|
||||
if not self.digest:
|
||||
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
|
||||
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
|
||||
|
||||
verify_checksum(self.archive_file, self.digest, self.url, self._effective_url)
|
||||
verify_checksum(self.archive_file, self.digest)
|
||||
|
||||
@_needs_stage
|
||||
def reset(self):
|
||||
@@ -475,8 +484,8 @@ def reset(self):
|
||||
"""
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
f"Tried to reset {self.__class__.__name__} before fetching",
|
||||
f"Failed on reset() for URL{self.url}",
|
||||
"Tried to reset URLFetchStrategy before fetching",
|
||||
"Failed on reset() for URL %s" % self.url,
|
||||
)
|
||||
|
||||
# Remove everything but the archive from the stage
|
||||
@@ -489,10 +498,14 @@ def reset(self):
|
||||
self.expand()
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}<{self.url}>"
|
||||
url = self.url if self.url else "no url"
|
||||
return "%s<%s>" % (self.__class__.__name__, url)
|
||||
|
||||
def __str__(self):
|
||||
return self.url
|
||||
if self.url:
|
||||
return self.url
|
||||
else:
|
||||
return "[no url]"
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -505,7 +518,7 @@ def fetch(self):
|
||||
|
||||
# check whether the cache file exists.
|
||||
if not os.path.isfile(path):
|
||||
raise NoCacheError(f"No cache of {path}")
|
||||
raise NoCacheError("No cache of %s" % path)
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
@@ -515,8 +528,8 @@ def fetch(self):
|
||||
# Symlink to local cached archive.
|
||||
symlink(path, filename)
|
||||
|
||||
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
|
||||
# download.
|
||||
# Remove link if checksum fails, or subsequent fetchers
|
||||
# will assume they don't need to download.
|
||||
if self.digest:
|
||||
try:
|
||||
self.check()
|
||||
@@ -525,12 +538,12 @@ def fetch(self):
|
||||
raise
|
||||
|
||||
# Notify the user how we fetched.
|
||||
tty.msg(f"Using cached archive: {path}")
|
||||
tty.msg("Using cached archive: {0}".format(path))
|
||||
|
||||
|
||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||
super().__init__(url=url, checksum=checksum, **kwargs)
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(url, checksum, **kwargs)
|
||||
|
||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||
|
||||
@@ -541,13 +554,13 @@ def fetch(self):
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except urllib.error.URLError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(e) from e
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
@@ -575,18 +588,18 @@ def __init__(self, **kwargs):
|
||||
# Set a URL based on the type of fetch strategy.
|
||||
self.url = kwargs.get(self.url_attr, None)
|
||||
if not self.url:
|
||||
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
|
||||
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
|
||||
|
||||
for attr in self.optional_attrs:
|
||||
setattr(self, attr, kwargs.get(attr, None))
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
|
||||
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
|
||||
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
|
||||
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
@@ -606,10 +619,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f"VCS: {self.url}"
|
||||
return "VCS: %s" % self.url
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__}<{self.url}>"
|
||||
return "%s<%s>" % (self.__class__, self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -712,17 +725,11 @@ class GitFetchStrategy(VCSFetchStrategy):
|
||||
"submodules",
|
||||
"get_full_repo",
|
||||
"submodules_delete",
|
||||
"git_sparse_paths",
|
||||
]
|
||||
|
||||
git_version_re = r"git version (\S+)"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
self.commit: Optional[str] = None
|
||||
self.tag: Optional[str] = None
|
||||
self.branch: Optional[str] = None
|
||||
|
||||
# Discards the keywords in kwargs that may conflict with the next call
|
||||
# to __init__
|
||||
forwarded_args = copy.copy(kwargs)
|
||||
@@ -733,7 +740,6 @@ def __init__(self, **kwargs):
|
||||
self.submodules = kwargs.get("submodules", False)
|
||||
self.submodules_delete = kwargs.get("submodules_delete", False)
|
||||
self.get_full_repo = kwargs.get("get_full_repo", False)
|
||||
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
|
||||
|
||||
@property
|
||||
def git_version(self):
|
||||
@@ -771,71 +777,68 @@ def git(self):
|
||||
|
||||
@property
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.commit)
|
||||
return self.cache_enabled and bool(self.commit or self.tag)
|
||||
|
||||
def source_id(self):
|
||||
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
|
||||
return self.commit
|
||||
return self.commit or self.tag
|
||||
|
||||
def mirror_id(self):
|
||||
if self.commit:
|
||||
repo_ref = self.commit or self.tag or self.branch
|
||||
if repo_ref:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
result = os.path.sep.join(["git", repo_path, self.commit])
|
||||
result = os.path.sep.join(["git", repo_path, repo_ref])
|
||||
return result
|
||||
|
||||
def _repo_info(self):
|
||||
args = ""
|
||||
if self.commit:
|
||||
args = f" at commit {self.commit}"
|
||||
elif self.tag:
|
||||
args = f" at tag {self.tag}"
|
||||
elif self.branch:
|
||||
args = f" on branch {self.branch}"
|
||||
|
||||
return f"{self.url}{args}"
|
||||
if self.commit:
|
||||
args = " at commit {0}".format(self.commit)
|
||||
elif self.tag:
|
||||
args = " at tag {0}".format(self.tag)
|
||||
elif self.branch:
|
||||
args = " on branch {0}".format(self.branch)
|
||||
|
||||
return "{0}{1}".format(self.url, args)
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.debug(f"Already fetched {self.stage.source_path}")
|
||||
tty.debug("Already fetched {0}".format(self.stage.source_path))
|
||||
return
|
||||
|
||||
if self.git_sparse_paths:
|
||||
self._sparse_clone_src()
|
||||
else:
|
||||
self._clone_src()
|
||||
self.submodule_operations()
|
||||
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
|
||||
def bare_clone(self, dest: str) -> None:
|
||||
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
"""
|
||||
Execute a bare clone for metadata only
|
||||
Clone a repository to a path.
|
||||
|
||||
Requires a destination since bare cloning does not provide source
|
||||
and shouldn't be used for staging.
|
||||
This method handles cloning from git, but does not require a stage.
|
||||
|
||||
Arguments:
|
||||
dest (str or None): The path into which the code is cloned. If None,
|
||||
requires a stage and uses the stage's source path.
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
bare (bool): Execute a "bare" git clone (--bare option to git)
|
||||
"""
|
||||
# Default to spack source path
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
dest = dest or self.stage.source_path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
|
||||
def _clone_src(self) -> None:
|
||||
"""Clone a repository to a path using git."""
|
||||
# Default to spack source path
|
||||
dest = self.stage.source_path
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if self.commit:
|
||||
if bare:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
elif commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
clone_args = ["clone", self.url]
|
||||
@@ -854,7 +857,7 @@ def _clone_src(self) -> None:
|
||||
)
|
||||
|
||||
with working_dir(dest):
|
||||
checkout_args = ["checkout", self.commit]
|
||||
checkout_args = ["checkout", commit]
|
||||
if not debug:
|
||||
checkout_args.insert(1, "--quiet")
|
||||
git(*checkout_args)
|
||||
@@ -866,10 +869,10 @@ def _clone_src(self) -> None:
|
||||
args.append("--quiet")
|
||||
|
||||
# If we want a particular branch ask for it.
|
||||
if self.branch:
|
||||
args.extend(["--branch", self.branch])
|
||||
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", self.tag])
|
||||
if branch:
|
||||
args.extend(["--branch", branch])
|
||||
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", tag])
|
||||
|
||||
# Try to be efficient if we're using a new enough git.
|
||||
# This checks out only one branch's history
|
||||
@@ -901,7 +904,7 @@ def _clone_src(self) -> None:
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
if tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
@@ -914,79 +917,6 @@ def _clone_src(self) -> None:
|
||||
git(*pull_args, ignore_errors=1)
|
||||
git(*co_args)
|
||||
|
||||
def _sparse_clone_src(self, **kwargs):
|
||||
"""Use git's sparse checkout feature to clone portions of a git repository"""
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.git_version < spack.version.Version("2.26.0"):
|
||||
# technically this should be supported for 2.25, but bumping for OS issues
|
||||
# see https://github.com/spack/spack/issues/45771
|
||||
# code paths exist where the package is not set. Assure some indentifier for the
|
||||
# package that was configured for sparse checkout exists in the error message
|
||||
identifier = str(self.url)
|
||||
if self.package:
|
||||
identifier += f" ({self.package.name})"
|
||||
tty.warn(
|
||||
(
|
||||
f"{identifier} is configured for git sparse-checkout "
|
||||
"but the git version is too old to support sparse cloning. "
|
||||
"Cloning the full repository instead."
|
||||
)
|
||||
)
|
||||
self._clone_src()
|
||||
else:
|
||||
# default to depth=2 to allow for retention of some git properties
|
||||
depth = kwargs.get("depth", 2)
|
||||
needs_fetch = self.branch or self.tag
|
||||
git_ref = self.branch or self.tag or self.commit
|
||||
|
||||
assert git_ref
|
||||
|
||||
clone_args = ["clone"]
|
||||
|
||||
if needs_fetch:
|
||||
clone_args.extend(["--branch", git_ref])
|
||||
|
||||
if self.get_full_repo:
|
||||
clone_args.append("--no-single-branch")
|
||||
else:
|
||||
clone_args.append("--single-branch")
|
||||
|
||||
clone_args.extend(
|
||||
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
|
||||
)
|
||||
|
||||
sparse_args = ["sparse-checkout", "set"]
|
||||
|
||||
if callable(self.git_sparse_paths):
|
||||
sparse_args.extend(self.git_sparse_paths())
|
||||
else:
|
||||
sparse_args.extend([p for p in self.git_sparse_paths])
|
||||
|
||||
sparse_args.append("--cone")
|
||||
|
||||
checkout_args = ["checkout", git_ref]
|
||||
|
||||
if not spack.config.get("config:debug"):
|
||||
clone_args.insert(1, "--quiet")
|
||||
checkout_args.insert(1, "--quiet")
|
||||
|
||||
with temp_cwd():
|
||||
git(*clone_args)
|
||||
repo_name = get_single_file(".")
|
||||
if self.stage:
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, dest)
|
||||
|
||||
with working_dir(dest):
|
||||
git(*sparse_args)
|
||||
git(*checkout_args)
|
||||
|
||||
def submodule_operations(self):
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.submodules_delete:
|
||||
with working_dir(dest):
|
||||
for submodule_to_delete in self.submodules_delete:
|
||||
@@ -1039,7 +969,7 @@ def protocol_supports_shallow_clone(self):
|
||||
return not (self.url.startswith("http://") or self.url.startswith("/"))
|
||||
|
||||
def __str__(self):
|
||||
return f"[git] {self._repo_info()}"
|
||||
return "[git] {0}".format(self._repo_info())
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1363,7 +1293,7 @@ def reset(self):
|
||||
shutil.move(scrubbed, source_path)
|
||||
|
||||
def __str__(self):
|
||||
return f"[hg] {self.url}"
|
||||
return "[hg] %s" % self.url
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1372,21 +1302,46 @@ class S3FetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "s3"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("S3FetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if not self.url.startswith("s3://"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from s3:// urls."
|
||||
)
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
self._fetch_urllib(self.url)
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
llnl.util.filesystem.rename(
|
||||
os.path.join(self.stage.path, basename), self.stage.save_filename
|
||||
)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
class GCSFetchStrategy(URLFetchStrategy):
|
||||
@@ -1394,22 +1349,43 @@ class GCSFetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "gs"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("GCSFetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if not self.url.startswith("gs"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from gs:// urls."
|
||||
)
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
self._fetch_urllib(self.url)
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1418,7 +1394,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
|
||||
as well as after expanding it."""
|
||||
|
||||
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
|
||||
super().__init__(url=url, checksum=archive_sha256)
|
||||
super().__init__(url, archive_sha256)
|
||||
self.expanded_sha256 = expanded_sha256
|
||||
|
||||
def expand(self):
|
||||
@@ -1434,26 +1410,21 @@ def expand(self):
|
||||
if len(files) != 1:
|
||||
raise ChecksumError(self, f"Expected a single file in {src_dir}.")
|
||||
|
||||
verify_checksum(
|
||||
os.path.join(src_dir, files[0]), self.expanded_sha256, self.url, self._effective_url
|
||||
)
|
||||
verify_checksum(os.path.join(src_dir, files[0]), self.expanded_sha256)
|
||||
|
||||
|
||||
def verify_checksum(file: str, digest: str, url: str, effective_url: Optional[str]) -> None:
|
||||
def verify_checksum(file, digest):
|
||||
checker = crypto.Checker(digest)
|
||||
if not checker.check(file):
|
||||
# On failure, provide some information about the file size and
|
||||
# contents, so that we can quickly see what the issue is (redirect
|
||||
# was not followed, empty file, text instead of binary, ...)
|
||||
size, contents = fs.filesummary(file)
|
||||
long_msg = (
|
||||
raise ChecksumError(
|
||||
f"{checker.hash_name} checksum failed for {file}",
|
||||
f"Expected {digest} but got {checker.sum}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}. "
|
||||
f"URL = {url}"
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
if effective_url and effective_url != url:
|
||||
long_msg += f", redirected to = {effective_url}"
|
||||
raise ChecksumError(f"{checker.hash_name} checksum failed for {file}", long_msg)
|
||||
|
||||
|
||||
def stable_target(fetcher):
|
||||
@@ -1465,14 +1436,14 @@ def stable_target(fetcher):
|
||||
return False
|
||||
|
||||
|
||||
def from_url(url: str) -> URLFetchStrategy:
|
||||
def from_url(url):
|
||||
"""Given a URL, find an appropriate fetch strategy for it.
|
||||
Currently just gives you a URLFetchStrategy that uses curl.
|
||||
|
||||
TODO: make this return appropriate fetch strategies for other
|
||||
types of URLs.
|
||||
"""
|
||||
return URLFetchStrategy(url=url)
|
||||
return URLFetchStrategy(url)
|
||||
|
||||
|
||||
def from_kwargs(**kwargs):
|
||||
@@ -1541,12 +1512,10 @@ def _check_version_attributes(fetcher, pkg, version):
|
||||
def _extrapolate(pkg, version):
|
||||
"""Create a fetcher from an extrapolated URL for this version."""
|
||||
try:
|
||||
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
except spack.package_base.NoURLError:
|
||||
raise ExtrapolationError(
|
||||
f"Can't extrapolate a URL for version {version} because "
|
||||
f"package {pkg.name} defines no URLs"
|
||||
)
|
||||
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
|
||||
raise ExtrapolationError(msg % (version, pkg.name))
|
||||
|
||||
|
||||
def _from_merged_attrs(fetcher, pkg, version):
|
||||
@@ -1563,11 +1532,8 @@ def _from_merged_attrs(fetcher, pkg, version):
|
||||
attrs["fetch_options"] = pkg.fetch_options
|
||||
attrs.update(pkg.versions[version])
|
||||
|
||||
if fetcher.url_attr == "git":
|
||||
pkg_attr_list = ["submodules", "git_sparse_paths"]
|
||||
for pkg_attr in pkg_attr_list:
|
||||
if hasattr(pkg, pkg_attr):
|
||||
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
|
||||
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
|
||||
attrs.setdefault("submodules", pkg.submodules)
|
||||
|
||||
return fetcher(**attrs)
|
||||
|
||||
@@ -1662,9 +1628,11 @@ def for_package_version(pkg, version=None):
|
||||
raise InvalidArgsError(pkg, version, **args)
|
||||
|
||||
|
||||
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
|
||||
def from_url_scheme(url, *args, **kwargs):
|
||||
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
||||
in the given url."""
|
||||
|
||||
url = kwargs.get("url", url)
|
||||
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
||||
|
||||
scheme_mapping = kwargs.get("scheme_mapping") or {
|
||||
@@ -1681,9 +1649,11 @@ def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
|
||||
for fetcher in all_strategies:
|
||||
url_attr = getattr(fetcher, "url_attr", None)
|
||||
if url_attr and url_attr == scheme:
|
||||
return fetcher(url=url, **kwargs)
|
||||
return fetcher(url, *args, **kwargs)
|
||||
|
||||
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
|
||||
raise ValueError(
|
||||
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
|
||||
)
|
||||
|
||||
|
||||
def from_list_url(pkg):
|
||||
@@ -1708,9 +1678,7 @@ def from_list_url(pkg):
|
||||
)
|
||||
|
||||
# construct a fetcher
|
||||
return URLFetchStrategy(
|
||||
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
|
||||
)
|
||||
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
|
||||
except KeyError as e:
|
||||
tty.debug(e)
|
||||
tty.msg("Cannot find version %s in url_list" % pkg.version)
|
||||
@@ -1738,10 +1706,10 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
path = os.path.join(self.root, target_path)
|
||||
url = url_util.path_to_file_url(path)
|
||||
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
|
||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
||||
|
||||
def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
@@ -1754,9 +1722,9 @@ class NoCacheError(spack.error.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, *exceptions: Exception):
|
||||
super().__init__("Failed to download")
|
||||
self.exceptions = exceptions
|
||||
def __init__(self, url, msg=""):
|
||||
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
|
@@ -37,12 +37,6 @@ def __call__(self, spec):
|
||||
"""Run this hash on the provided spec."""
|
||||
return spec.spec_hash(self)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"SpecHashDescriptor(depflag={self.depflag!r}, "
|
||||
f"package_hash={self.package_hash!r}, name={self.name!r}, override={self.override!r})"
|
||||
)
|
||||
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user