Compare commits
249 Commits
develop-20
...
hs/ci/gha-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d78283946 | ||
|
|
5a8a7b83f6 | ||
|
|
70de20eaa2 | ||
|
|
d8172e2c29 | ||
|
|
3371cc55ed | ||
|
|
35ee3706bb | ||
|
|
9493cf016b | ||
|
|
f6caa1c824 | ||
|
|
2c9f94e6a5 | ||
|
|
2504dcf4f8 | ||
|
|
82baa658a4 | ||
|
|
21384be78d | ||
|
|
acd47147a5 | ||
|
|
3ed6736b2c | ||
|
|
842954f6a4 | ||
|
|
5c918e40a6 | ||
|
|
98570929aa | ||
|
|
75bcf58b30 | ||
|
|
4a03cac6cc | ||
|
|
75faab206b | ||
|
|
c9ab0d8fcb | ||
|
|
c45e02d58f | ||
|
|
33c8f518ae | ||
|
|
2491a9abff | ||
|
|
1a26ec7b8b | ||
|
|
89a79d3df0 | ||
|
|
ce700d69d7 | ||
|
|
a505fb1f37 | ||
|
|
f039b22093 | ||
|
|
18ea8f813e | ||
|
|
d7e740defa | ||
|
|
c21dc1a27a | ||
|
|
f30d8ea2a5 | ||
|
|
03cb30cb96 | ||
|
|
f6da037129 | ||
|
|
31c2897fd8 | ||
|
|
1a379215da | ||
|
|
0f7c1b5e38 | ||
|
|
7e3af5d42d | ||
|
|
f45e312f81 | ||
|
|
a82e21e82f | ||
|
|
1ba40b99ee | ||
|
|
60f2698a4a | ||
|
|
b3772f8bb6 | ||
|
|
cd75e52ba2 | ||
|
|
b0b316c646 | ||
|
|
7bbf581169 | ||
|
|
7b93d01a68 | ||
|
|
a95fa26857 | ||
|
|
6f2393a345 | ||
|
|
9fa2bb375c | ||
|
|
98c08ce5c6 | ||
|
|
83f115894b | ||
|
|
59339be48f | ||
|
|
ef0599b53c | ||
|
|
9c4207a551 | ||
|
|
eb95390ce7 | ||
|
|
527d723db0 | ||
|
|
63fe6fc893 | ||
|
|
4f2a1806f9 | ||
|
|
12a7e8d73a | ||
|
|
21d8c09c5e | ||
|
|
43596b4e23 | ||
|
|
97edcb5acc | ||
|
|
fc268b0945 | ||
|
|
0b4477c0df | ||
|
|
eff4c14a09 | ||
|
|
f485a622c8 | ||
|
|
f151bc65f7 | ||
|
|
99d849b2e6 | ||
|
|
3d8f9a7b22 | ||
|
|
c88e7bc492 | ||
|
|
931d034da4 | ||
|
|
a3a49daf8f | ||
|
|
2c05ce3607 | ||
|
|
6587b2a231 | ||
|
|
f1c743e235 | ||
|
|
b932c14008 | ||
|
|
285f95a4d8 | ||
|
|
3de68ef976 | ||
|
|
5c7fe24bec | ||
|
|
ecb122f4c1 | ||
|
|
6219780691 | ||
|
|
8ec1369d2b | ||
|
|
e3fcc41162 | ||
|
|
ae582c45c3 | ||
|
|
252a4d1076 | ||
|
|
df37a8ba76 | ||
|
|
99d06b95a3 | ||
|
|
38829b01df | ||
|
|
2a6a6602da | ||
|
|
1527e9703d | ||
|
|
4a22df5477 | ||
|
|
2b4f2daa73 | ||
|
|
02501bc4af | ||
|
|
7cd039d022 | ||
|
|
1ff81c1c88 | ||
|
|
3e3cb73446 | ||
|
|
8e948c03fc | ||
|
|
572e790b3d | ||
|
|
1873d6909a | ||
|
|
4a24ab53df | ||
|
|
671c394d32 | ||
|
|
ce3b511f59 | ||
|
|
03073a5fed | ||
|
|
787bff0d6a | ||
|
|
2504a76079 | ||
|
|
f665f4c41b | ||
|
|
4cab31323c | ||
|
|
fcbe8c50cd | ||
|
|
37de90c98c | ||
|
|
5ccd9dc64b | ||
|
|
1f59ada2c2 | ||
|
|
a8a402115b | ||
|
|
c2f3539a5e | ||
|
|
cdeb67ec02 | ||
|
|
2ddd8cd1aa | ||
|
|
5b352c3088 | ||
|
|
95c26245c1 | ||
|
|
6a0e03b81c | ||
|
|
858f70bf6f | ||
|
|
123c26c22d | ||
|
|
b42ef1e7b8 | ||
|
|
2f2c65f56b | ||
|
|
883d0739e6 | ||
|
|
f1a31fe5f7 | ||
|
|
c3785f4d30 | ||
|
|
cc8983cf82 | ||
|
|
30cea3ce8a | ||
|
|
1252bd975c | ||
|
|
6547758b2f | ||
|
|
c633149874 | ||
|
|
d640ce74e0 | ||
|
|
6d2cc2d27a | ||
|
|
43f180c2c5 | ||
|
|
0685c6277e | ||
|
|
eaabde6ee9 | ||
|
|
87505fc2fc | ||
|
|
d7d886e3b5 | ||
|
|
0b3bd1e294 | ||
|
|
b9b7450f60 | ||
|
|
a6b0dfbd53 | ||
|
|
ecc3752ee9 | ||
|
|
8e2caa2b83 | ||
|
|
25af7a36aa | ||
|
|
38daed0a78 | ||
|
|
fc3b732b14 | ||
|
|
382847976f | ||
|
|
c1b423849c | ||
|
|
45ea09a79f | ||
|
|
b96af088d1 | ||
|
|
d47478d7b6 | ||
|
|
4763581642 | ||
|
|
d264094fdc | ||
|
|
3c8c7ef341 | ||
|
|
f83beb09ba | ||
|
|
3604e5bffc | ||
|
|
7fba228cf3 | ||
|
|
1d379d96ab | ||
|
|
f3edc33a07 | ||
|
|
8d4ea9dbd3 | ||
|
|
742d313ba8 | ||
|
|
70407e8970 | ||
|
|
2d42675035 | ||
|
|
4c50915d81 | ||
|
|
3f8d5fed39 | ||
|
|
66c1c213b1 | ||
|
|
f46528ec6b | ||
|
|
41489efa4c | ||
|
|
3df5a85237 | ||
|
|
8921612f6a | ||
|
|
e6a0a6c145 | ||
|
|
104d6b4484 | ||
|
|
cba9436cf4 | ||
|
|
9dc3ad4db7 | ||
|
|
4bfd7aeb25 | ||
|
|
fcf615b53e | ||
|
|
1155318534 | ||
|
|
a3c430e810 | ||
|
|
41ff0500f9 | ||
|
|
059a4a58e2 | ||
|
|
14513ba76f | ||
|
|
21da90e062 | ||
|
|
a3c7e97463 | ||
|
|
f7ed3ce4ae | ||
|
|
36caa6158a | ||
|
|
1904d99fd0 | ||
|
|
de0b17c07f | ||
|
|
5d695623db | ||
|
|
3f063ace1d | ||
|
|
47b71ba8ca | ||
|
|
67eb9cfccb | ||
|
|
dddbd944a4 | ||
|
|
b7d85e7694 | ||
|
|
f4c4b06a46 | ||
|
|
6995010bab | ||
|
|
2d212561fb | ||
|
|
7cab3e2383 | ||
|
|
48ca9a5f3c | ||
|
|
1934c8cf73 | ||
|
|
42cd7c4f89 | ||
|
|
ce654b6882 | ||
|
|
94719a55b4 | ||
|
|
76168292c3 | ||
|
|
3fd6066e54 | ||
|
|
c62cc6a45d | ||
|
|
423548fc90 | ||
|
|
9010e6f556 | ||
|
|
6085586407 | ||
|
|
dbd745bdab | ||
|
|
31c5c0b423 | ||
|
|
41f99f8131 | ||
|
|
441ade5809 | ||
|
|
60f6f8d836 | ||
|
|
5e7925c502 | ||
|
|
d39382bec8 | ||
|
|
be270f2311 | ||
|
|
c500200952 | ||
|
|
71b110e6c7 | ||
|
|
7b877ec9e2 | ||
|
|
a74ac87d34 | ||
|
|
796adb6b9b | ||
|
|
2967bb5540 | ||
|
|
9f4c677e46 | ||
|
|
1d369ba02d | ||
|
|
dcde4f9d5a | ||
|
|
3c576ca8c2 | ||
|
|
a89c89a23e | ||
|
|
aee7455568 | ||
|
|
69edcc6d2f | ||
|
|
46263a493e | ||
|
|
b24f2875e6 | ||
|
|
18eebce04d | ||
|
|
f5c6e10e08 | ||
|
|
e7c17f7ed8 | ||
|
|
a284cbf256 | ||
|
|
8cbf067455 | ||
|
|
875397cf16 | ||
|
|
a38045f77e | ||
|
|
31ce23f3fc | ||
|
|
9e65bd5837 | ||
|
|
2c1a3eff74 | ||
|
|
1d81ceb101 | ||
|
|
044c37372a | ||
|
|
8f40889a46 | ||
|
|
0a0282163b | ||
|
|
54f4530df4 | ||
|
|
193f3b3c5a | ||
|
|
34b0e8ebce |
2
.flake8
2
.flake8
@@ -28,7 +28,7 @@ max-line-length = 99
|
||||
# - F821: undefined name `name`
|
||||
#
|
||||
per-file-ignores =
|
||||
var/spack/repos/*/package.py:F403,F405,F821
|
||||
var/spack/*/package.py:F403,F405,F821
|
||||
*-ci-package.py:F403,F405,F821
|
||||
|
||||
# exclude things we usually do not want linting for.
|
||||
|
||||
73
.github/workflows/audit.yaml
vendored
73
.github/workflows/audit.yaml
vendored
@@ -1,73 +0,0 @@
|
||||
name: audit
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
python_version:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.system.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
system:
|
||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||
- { os: ubuntu-latest, shell: bash }
|
||||
- { os: macos-latest, shell: bash }
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Setup for Windows run
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
194
.github/workflows/bootstrap.yml
vendored
194
.github/workflows/bootstrap.yml
vendored
@@ -1,194 +0,0 @@
|
||||
name: Bootstrapping
|
||||
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
schedule:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
distros-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||
steps:
|
||||
- name: Setup Fedora
|
||||
if: ${{ matrix.image == 'fedora:latest' }}
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static gawk
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||
zypper update -y || zypper update -y
|
||||
zypper install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
clingo-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
from-binaries:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
3.9
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
3.13
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
echo "Testing $ver_dir"
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
if (($not_found)) ; then
|
||||
echo Required python version $ver not found in runner!
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
|
||||
windows:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d gpg list
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
140
.github/workflows/build-containers.yml
vendored
140
.github/workflows/build-containers.yml
vendored
@@ -1,140 +0,0 @@
|
||||
name: Containers
|
||||
|
||||
on:
|
||||
# This Workflow can be triggered manually
|
||||
workflow_dispatch:
|
||||
# Build new Spack develop containers nightly.
|
||||
schedule:
|
||||
- cron: '34 0 * * *'
|
||||
# Run on pull requests that modify this file
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
- 'share/spack/templates/container/*'
|
||||
- 'lib/spack/spack/container/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
strategy:
|
||||
# Even if one container fails to build we still want the others
|
||||
# to continue their builds.
|
||||
fail-fast: false
|
||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
||||
# they support.
|
||||
matrix:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- name: Determine latest release tag
|
||||
id: latest
|
||||
run: |
|
||||
git fetch --quiet --tags
|
||||
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
tags: |
|
||||
type=schedule,pattern=nightly
|
||||
type=schedule,pattern=develop
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
merge-dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
delete-merged: true
|
||||
130
.github/workflows/ci.yaml
vendored
130
.github/workflows/ci.yaml
vendored
@@ -1,130 +0,0 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
bootstrap: ${{ steps.filter.outputs.bootstrap }}
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||
id: filter
|
||||
with:
|
||||
# For merge group events, compare against the target branch (main)
|
||||
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
||||
# For merge group events, use the merge group head ref
|
||||
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
# Don't run if we only modified packages in the
|
||||
# built-in repository or documentation
|
||||
filters: |
|
||||
bootstrap:
|
||||
- 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
- '.github/workflows/ci.yaml'
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
bootstrap:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/prechecks.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
with_packages: ${{ needs.changes.outputs.packages }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/import-check.yaml
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
if: ${{ needs.changes.outputs.core }}
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
36
.github/workflows/coverage.yml
vendored
36
.github/workflows/coverage.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: coverage
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Upload coverage reports to codecov once as a single bundle
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
|
||||
|
||||
- name: Download coverage artifact files
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||
with:
|
||||
pattern: coverage-*
|
||||
path: coverage
|
||||
merge-multiple: true
|
||||
|
||||
- run: ls -la coverage
|
||||
- run: coverage combine -a coverage/.coverage*
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
49
.github/workflows/import-check.yaml
vendored
49
.github/workflows/import-check.yaml
vendored
@@ -1,49 +0,0 @@
|
||||
name: import-check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
||||
31
.github/workflows/nightly-win-builds.yml
vendored
31
.github/workflows/nightly-win-builds.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
||||
104
.github/workflows/prechecks.yml
vendored
104
.github/workflows/prechecks.yml
vendored
@@ -1,104 +0,0 @@
|
||||
name: prechecks
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
with_coverage:
|
||||
required: true
|
||||
type: string
|
||||
with_packages:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: |
|
||||
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
- name: Run style tests
|
||||
run: |
|
||||
bin/spack style --base HEAD^1
|
||||
bin/spack license verify
|
||||
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.13'
|
||||
|
||||
verify-checksums:
|
||||
if: ${{ inputs.with_packages == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 2
|
||||
- name: Verify Added Checksums
|
||||
run: |
|
||||
bin/spack ci verify-versions HEAD^1 HEAD
|
||||
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
@@ -5,4 +5,4 @@ isort==6.0.1
|
||||
mypy==1.15.0
|
||||
types-six==1.17.0.20250403
|
||||
vermin==1.6.0
|
||||
pylint==3.3.6
|
||||
pylint==3.3.7
|
||||
|
||||
152
.github/workflows/unit_tests.yaml
vendored
152
.github/workflows/unit_tests.yaml
vendored
@@ -1,12 +1,7 @@
|
||||
name: unit tests
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
# Run unit tests with different configurations on linux
|
||||
@@ -88,146 +83,10 @@ jobs:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
- name: Setup tmate session
|
||||
if: ${{ failure() }}
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d bootstrap now --dev
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
@@ -258,3 +117,6 @@ jobs:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
- name: Setup tmate session
|
||||
if: ${{ failure() }}
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
@@ -25,6 +25,8 @@ packages:
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-clang:
|
||||
buildable: false
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
|
||||
@@ -72,6 +72,8 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cce:
|
||||
buildable: false
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
@@ -86,6 +88,8 @@ packages:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fj:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
@@ -102,3 +106,5 @@ packages:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
xl:
|
||||
buildable: false
|
||||
|
||||
@@ -11,4 +11,4 @@
|
||||
# ~/.spack/repos.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
@@ -23,3 +23,5 @@ packages:
|
||||
mpi:
|
||||
require:
|
||||
- one_of: [msmpi]
|
||||
msvc:
|
||||
buildable: false
|
||||
|
||||
@@ -1916,7 +1916,7 @@ diagnostics. Issues, if found, are reported to stdout:
|
||||
PKG-DIRECTIVES: 1 issue found
|
||||
1. lammps: wrong variant in "conflicts" directive
|
||||
the variant 'adios' does not exist
|
||||
in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
|
||||
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py
|
||||
|
||||
|
||||
------------
|
||||
|
||||
@@ -45,10 +45,14 @@ provided binary cache, which can be a local directory or a remote URL.
|
||||
Here is an example where a build cache is created in a local directory named
|
||||
"spack-cache", to which we push the "ninja" spec:
|
||||
|
||||
ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache
|
||||
...
|
||||
==> [30/30] Pushed ninja@1.12.1/ngldn2k
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
@@ -98,9 +102,10 @@ Now you can use list:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache list
|
||||
==> 1 cached build.
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
ninja@1.10.2
|
||||
==> 24 cached builds.
|
||||
-- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ----------------
|
||||
[ ... ]
|
||||
ninja@1.12.1
|
||||
|
||||
With ``mymirror`` configured and an index available, Spack will automatically
|
||||
use it during concretization and installation. That means that you can expect
|
||||
@@ -111,17 +116,17 @@ verify by re-installing ninja:
|
||||
|
||||
$ spack uninstall ninja
|
||||
$ spack install ninja
|
||||
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
|
||||
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
|
||||
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
|
||||
[ ... ]
|
||||
==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24]
|
||||
gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST
|
||||
gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07
|
||||
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
|
||||
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
|
||||
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a
|
||||
==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache
|
||||
==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s
|
||||
[+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating its index, listing the contents,
|
||||
@@ -344,19 +349,18 @@ which lets you get started quickly. See the following resources for more informa
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
Tarballs are checksummed and signed if gpg2 is available.
|
||||
Places them in a directory ``build_cache`` that can be copied to a mirror.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
|
||||
Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available.
|
||||
Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches.
|
||||
|
||||
============== ========================================================================================================================
|
||||
Arguments Description
|
||||
============== ========================================================================================================================
|
||||
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
|
||||
``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
|
||||
``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
|
||||
``-d <path>`` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.``
|
||||
``-f`` overwrite compressed tarball and spec metadata files if they already exist
|
||||
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
|
||||
``-r`` make paths in binaries relative before creating tarball
|
||||
``-y`` answer yes to all create unsigned ``build_cache`` questions
|
||||
``-y`` answer yes to all questions about creating unsigned build caches
|
||||
============== ========================================================================================================================
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -397,6 +401,165 @@ List public keys available on Spack mirror.
|
||||
========= ==============================================
|
||||
Arguments Description
|
||||
========= ==============================================
|
||||
``-i`` trust the keys downloaded with prompt for each
|
||||
``-it`` trust the keys downloaded with prompt for each
|
||||
``-y`` answer yes to all trust all keys downloaded
|
||||
========= ==============================================
|
||||
|
||||
.. _build_cache_layout:
|
||||
|
||||
------------------
|
||||
Build Cache Layout
|
||||
------------------
|
||||
|
||||
This section describes the structure and content of URL-style build caches, as
|
||||
distinguished from OCI-style build caches.
|
||||
|
||||
The entry point for a binary package is a manifest json file that points to at
|
||||
least two other files stored as content-addressed blobs. These files include a spec
|
||||
metadata file, as well as the installation directory of the package stored as
|
||||
a compressed archive file. Binary package manifest files are named to indicate
|
||||
the package name and version, as well as the hash of the concrete spec. For
|
||||
example::
|
||||
|
||||
gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json
|
||||
|
||||
would contain the manifest for a binary package of ``gcc-runtime@12.3.0``.
|
||||
The id of the built package is defined to be the DAG hash of the concrete spec,
|
||||
and exists in the name of the file as well. The id distinguishes a particular
|
||||
binary package from all other binary packages with the same package name and
|
||||
version. Below is an example binary package manifest file. Such a file would
|
||||
live in the versioned spec manifests directory of a binary mirror, for example
|
||||
``v3/manifests/spec/``::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
The manifest points to both the compressed tar file as well as the compressed
|
||||
spec metadata file, and contains the checksum of each. This checksum
|
||||
is also used as the address of the associated file, and hence, must be
|
||||
known in order to locate the tarball or spec file within the mirror. Once the
|
||||
tarball or spec metadata file is downloaded, the checksum should be computed locally
|
||||
and compared to the checksum in the manifest to ensure the contents have not changed
|
||||
since the binary package was pushed. Spack stores all data files (including compressed
|
||||
tar files, spec metadata, indices, public keys, etc) within a ``blobs/<hash-algorithm>/``
|
||||
directory, using the first two characters of the checksum as a sub-directory
|
||||
to reduce the number files in a single folder. Here is a depiction of the
|
||||
organization of binary mirror contents::
|
||||
|
||||
mirror_directory/
|
||||
v3/
|
||||
layout.json
|
||||
manifests/
|
||||
spec/
|
||||
gcc-runtime/
|
||||
gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
gmake/
|
||||
gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json
|
||||
compiler-wrapper/
|
||||
compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json
|
||||
index/
|
||||
index.manifest.json
|
||||
key/
|
||||
75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json
|
||||
keys.manifest.json
|
||||
blobs/
|
||||
sha256/
|
||||
0f/
|
||||
0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210
|
||||
fb/
|
||||
fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041
|
||||
2a/
|
||||
2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f
|
||||
...
|
||||
|
||||
Files within the ``manifests`` directory are organized into subdirectories by
|
||||
the type of entity they represent. Binary package manifests live in the ``spec/``
|
||||
directory, binary cache index manifests live in the ``index/`` directory, and
|
||||
manifests for public keys and their indices live in the ``key/`` subdirectory.
|
||||
Regardless of the type of entity they represent, all manifest files are named
|
||||
with an extension ``.manifest.json``.
|
||||
|
||||
Every manifest contains a ``data`` array, each element of which refers to an
|
||||
associated file stored a content-addressed blob. Considering the example spec
|
||||
manifest shown above, the compressed installation archive can be found by
|
||||
picking out the data blob with the appropriate ``mediaType``, which in this
|
||||
case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated
|
||||
file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for
|
||||
the file named with the complete checksum value.
|
||||
|
||||
As mentioned above, every entity in a binary mirror (aka build cache) is stored
|
||||
as a content-addressed blob pointed to by a manifest. While an example spec
|
||||
manifest (i.e. a manifest for a binary package) is shown above, here is what
|
||||
the manifest of a build cache index looks like::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 6411,
|
||||
"mediaType": "application/vnd.spack.db.v8+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Some things to note about this manifest are that it points to a blob that is not
|
||||
compressed (``compression: "none"``), and that the ``mediaType`` is one we have
|
||||
not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress
|
||||
build cache indices stems from the fact that spack does not yet sign build cache
|
||||
index manifests. Once that changes, you may start to see these indices stored as
|
||||
compressed blobs.
|
||||
|
||||
For completeness, here are examples of manifests for the other two types of entities
|
||||
you might find in a spack build cache. First a public key manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 2472,
|
||||
"mediaType": "application/pgp-keys",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest::
|
||||
|
||||
{
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 56,
|
||||
"mediaType": "application/vnd.spack.keyindex.v1+json",
|
||||
"compression": "none",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note
|
||||
that both the above manifest examples refer to uncompressed blobs, this is for the same
|
||||
reason spack does not yet compress build cache index blobs.
|
||||
|
||||
@@ -83,7 +83,7 @@ packages. You can quickly find examples by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd var/spack/repos/builtin/packages
|
||||
$ cd var/spack/repos/spack_repo/builtin/packages
|
||||
$ grep -l QMakePackage */package.py
|
||||
|
||||
|
||||
|
||||
@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
||||
@@ -199,7 +199,7 @@ a variant to control this:
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
missing or replaced. For example, the
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
|
||||
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_
|
||||
package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
|
||||
focuses mostly on how Spack's build systems work.
|
||||
|
||||
In this guide, we will be using the
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
|
||||
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and
|
||||
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_
|
||||
packages as examples. ``perl``'s build system is a hand-written
|
||||
``Configure`` shell script, while ``cmake`` bootstraps itself during
|
||||
installation. Both of these packages require custom build systems.
|
||||
|
||||
@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_
|
||||
is a good example of a simple package that does this, while
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
|
||||
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_
|
||||
is a good example of a more complex package.
|
||||
|
||||
""""""""""""""""""""""
|
||||
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
|
||||
]
|
||||
|
||||
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
|
||||
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_
|
||||
is a good example of a package that uses this strategy.
|
||||
|
||||
"""""""""""""
|
||||
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_
|
||||
is a good example of a package that involves editing a Makefile to set
|
||||
the appropriate variables.
|
||||
|
||||
@@ -192,7 +192,7 @@ well for storing variables:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_
|
||||
is a good example of a package that uses a dictionary to store
|
||||
configuration variables.
|
||||
|
||||
@@ -213,7 +213,7 @@ them in a list:
|
||||
inc.write(f"{var}\n")
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_
|
||||
is a good example of a package that uses a list to store
|
||||
configuration variables.
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
|
||||
If it isn't on CRAN, try Bioconductor, another common R repository.
|
||||
|
||||
For the purposes of this tutorial, we will be walking through
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
|
||||
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_
|
||||
as an example. If you search for "CRAN caret", you will quickly find what
|
||||
you are looking for at https://cran.r-project.org/package=caret.
|
||||
https://cran.r-project.org is the main CRAN website. However, CRAN also
|
||||
@@ -337,7 +337,7 @@ Non-R dependencies
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some packages depend on non-R libraries for linking. Check out the
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
|
||||
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_
|
||||
package for an example: https://cloud.r-project.org/package=stringi.
|
||||
If you search for the text "SystemRequirements", you will see:
|
||||
|
||||
@@ -352,7 +352,7 @@ Passing arguments to the installation
|
||||
|
||||
Some R packages provide additional flags that can be passed to
|
||||
``R CMD INSTALL``, often to locate non-R dependencies.
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
|
||||
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_
|
||||
is an example of this, and flags for linking to an MPI library. To pass
|
||||
these to the installation command, you can override ``configure_args``
|
||||
like so:
|
||||
|
||||
@@ -104,10 +104,10 @@ Finding available options
|
||||
|
||||
The first place to start when looking for a list of valid options to
|
||||
build a package is ``scons --help``. Some packages like
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
|
||||
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_
|
||||
don't bother overwriting the default SCons help message, so this isn't
|
||||
very useful, but other packages like
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
|
||||
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_
|
||||
print a list of valid command-line variables:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
|
||||
|
||||
|
||||
More advanced packages like
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
|
||||
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_
|
||||
use ``scons --help`` to print a list of subcommands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -225,8 +225,14 @@ def setup(sphinx):
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
("py:class", "llnl.util.lang.KT"),
|
||||
("py:class", "llnl.util.lang.VT"),
|
||||
("py:class", "llnl.util.lang.K"),
|
||||
("py:class", "llnl.util.lang.V"),
|
||||
("py:class", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.KT"),
|
||||
("py:obj", "llnl.util.lang.VT"),
|
||||
("py:obj", "llnl.util.lang.ClassPropertyType"),
|
||||
("py:obj", "llnl.util.lang.K"),
|
||||
("py:obj", "llnl.util.lang.V"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
|
||||
|
||||
Modified files:
|
||||
|
||||
var/spack/repos/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/builtin/packages/hdf/package.py
|
||||
var/spack/repos/builtin/packages/netcdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/hdf/package.py
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py
|
||||
=======================================================
|
||||
Flake8 checks were clean.
|
||||
|
||||
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
|
||||
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
|
||||
Flake8 found errors.
|
||||
|
||||
Most of the error messages are straightforward, but if you don't understand what
|
||||
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. warning::
|
||||
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
|
||||
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_.
|
||||
If you're using a ``python`` from Spack and you installed
|
||||
``py-sphinx`` and friends, you need to make them available to your
|
||||
``python``. The easiest way to do this is to run:
|
||||
|
||||
@@ -154,9 +154,7 @@ Package-related modules
|
||||
|
||||
:mod:`spack.util.naming`
|
||||
Contains functions for mapping between Spack package names,
|
||||
Python module names, and Python class names. Functions like
|
||||
:func:`~spack.util.naming.mod_to_class` handle mapping package
|
||||
module names to class names.
|
||||
Python module names, and Python class names.
|
||||
|
||||
:mod:`spack.directives`
|
||||
*Directives* are functions that can be called inside a package definition
|
||||
|
||||
34
lib/spack/docs/env_vars_yaml.rst
Normal file
34
lib/spack/docs/env_vars_yaml.rst
Normal file
@@ -0,0 +1,34 @@
|
||||
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _env-vars-yaml:
|
||||
|
||||
=============================================
|
||||
Environment Variable Settings (env_vars.yaml)
|
||||
=============================================
|
||||
|
||||
Spack allows you to include shell environment variable modifications
|
||||
for a spack environment by including an ``env_vars.yaml``. Environment
|
||||
varaibles can be modified by setting, unsetting, appending, and prepending
|
||||
variables in the shell environment.
|
||||
The changes to the shell environment will take effect when the spack
|
||||
environment is activated.
|
||||
|
||||
for example,
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
|
||||
@@ -539,7 +539,9 @@ from the command line.
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
absolute path to the independent environments. Note, that you may use Spack
|
||||
config variables such as ``$spack`` or environment variables as long as the
|
||||
expression expands to an absolute path.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -549,7 +551,7 @@ absolute path to the independent environments.
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
- $spack/../path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
@@ -1000,6 +1002,28 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
-------------------------------
|
||||
Modifying Environment Variables
|
||||
-------------------------------
|
||||
|
||||
Spack Environments can modify the active shell's environment variables when activated. The environment can be
|
||||
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
|
||||
file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
env_vars:
|
||||
set:
|
||||
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
|
||||
unset:
|
||||
ENVAR_TO_UNSET_IN_ENV_LOAD:
|
||||
prepend_path:
|
||||
PATH_LIST: "path/to/prepend"
|
||||
append_path:
|
||||
PATH_LIST: "path/to/append"
|
||||
remove_path:
|
||||
PATH_LIST: "path/to/remove"
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
|
||||
@@ -131,7 +131,7 @@ creates a simple python file:
|
||||
It doesn't take much python coding to get from there to a working
|
||||
package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:lines: 5-
|
||||
|
||||
Spack also provides wrapper functions around common commands like
|
||||
|
||||
@@ -75,6 +75,7 @@ or refer to the full manual below.
|
||||
packages_yaml
|
||||
build_settings
|
||||
environments
|
||||
env_vars_yaml
|
||||
containers
|
||||
mirrors
|
||||
module_file_support
|
||||
|
||||
@@ -128,7 +128,7 @@ depend on the spec:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
|
||||
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None:
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
|
||||
|
||||
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
@@ -443,7 +443,7 @@ lives in:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack location -p gmp
|
||||
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py
|
||||
|
||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||
trouble of typing the full path.
|
||||
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
|
||||
handles creating package files for you, so you can skip most of the
|
||||
details here.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``var/spack/repos/spack_repo/builtin/packages``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A Spack installation directory is structured like a standard UNIX
|
||||
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
|
||||
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
|
||||
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``.
|
||||
|
||||
If you ``cd`` to that directory, you will see directories for each
|
||||
package:
|
||||
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
|
||||
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls
|
||||
:shell:
|
||||
:ellipsis: 10
|
||||
|
||||
@@ -479,7 +479,7 @@ package lives in:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
|
||||
Alongside the ``package.py`` file, a package may contain extra
|
||||
directories or files (like patches) that it needs to build.
|
||||
@@ -492,12 +492,12 @@ Packages are named after the directory containing ``package.py``. So,
|
||||
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
|
||||
The ``package.py`` file defines a class called ``Libelf``, which
|
||||
extends Spack's ``Package`` class. For example, here is
|
||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
|
||||
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
class Libelf(Package):
|
||||
""" ... description ... """
|
||||
@@ -520,7 +520,7 @@ these:
|
||||
$ spack install libelf@0.8.13
|
||||
|
||||
Spack sees the package name in the spec and looks for
|
||||
``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
|
||||
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``.
|
||||
Likewise, if you run ``spack install py-numpy``, Spack looks for
|
||||
``py-numpy/package.py``.
|
||||
|
||||
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
|
||||
In order to handle this, you can define a ``url_for_version()`` function
|
||||
like so:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.url_for_version
|
||||
|
||||
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
|
||||
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
|
||||
takes care of all of the plumbing and requires packagers to just define a proper
|
||||
``gnu_mirror_path`` attribute:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py
|
||||
:lines: 9-18
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1089,7 +1089,7 @@ You've already seen the ``homepage`` and ``url`` package attributes:
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Mpich(Package):
|
||||
@@ -1995,7 +1995,7 @@ structure like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
$SPACK_ROOT/var/spack/repos/builtin/packages/
|
||||
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/
|
||||
mvapich2/
|
||||
package.py
|
||||
ad_lustre_rwcontig_open_source.patch
|
||||
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
|
||||
|
||||
.. _pyside-patch:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py
|
||||
:pyobject: PyPyside.patch
|
||||
:linenos:
|
||||
|
||||
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
|
||||
|
||||
$ spack resource show 3877ab54
|
||||
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch
|
||||
applies to: builtin.m4
|
||||
|
||||
``spack resource show`` looks up downloadable resources from package
|
||||
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
|
||||
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
|
||||
$ spack resource show b37164268
|
||||
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
|
||||
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
|
||||
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch
|
||||
applies to: builtin.boost
|
||||
patched by: builtin.dealii
|
||||
|
||||
@@ -2930,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
@@ -2958,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
@@ -3785,7 +3785,7 @@ It is usually sufficient for a packager to override a few
|
||||
build system specific helper methods or attributes to provide, for instance,
|
||||
configure arguments:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py
|
||||
:pyobject: M4.configure_args
|
||||
:linenos:
|
||||
|
||||
@@ -4110,7 +4110,7 @@ Shell command functions
|
||||
|
||||
Recall the install method from ``libelf``:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py
|
||||
:pyobject: Libelf.install
|
||||
:linenos:
|
||||
|
||||
@@ -4901,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
|
||||
additional properties on it to help you out. E.g., in openmpi, you'll
|
||||
find this:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
|
||||
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py
|
||||
:pyobject: Openmpi.setup_dependent_package
|
||||
|
||||
That code allows the ``openmpi`` package to associate an ``mpicc`` property
|
||||
@@ -6001,16 +6001,16 @@ with those implemented in the package itself.
|
||||
* - Parent/Provider Package
|
||||
- Stand-alone Tests
|
||||
* - `C
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_
|
||||
- Compiles ``hello.c`` and runs it
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_
|
||||
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
|
||||
* - :ref:`PythonPackage <pythonpackage>`
|
||||
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
|
||||
@@ -6031,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
One example of a package that adds its own stand-alone tests to those
|
||||
"inherited" by the virtual package it provides an implementation for is
|
||||
the `Openmpi package
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_.
|
||||
|
||||
Below are snippets from running and viewing the stand-alone test results
|
||||
for ``openmpi``:
|
||||
@@ -6183,7 +6183,7 @@ running:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
||||
@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
|
||||
=================================
|
||||
|
||||
Spack comes with thousands of built-in package recipes in
|
||||
``var/spack/repos/builtin/``. This is a **package repository** -- a
|
||||
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a
|
||||
directory that Spack searches when it needs to find a package by name.
|
||||
You may need to maintain packages for restricted, proprietary or
|
||||
experimental software separately from the built-in repository. Spack
|
||||
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- $spack/var/spack/repos/builtin
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
The file starts with ``repos:`` and contains a single ordered list of
|
||||
paths to repositories. Each path is on a separate line starting with
|
||||
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /opt/local-repo
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /opt/repos/spack_repo/local_repo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
|
||||
it searches these repositories in order (first to last) to resolve each
|
||||
package name. In this example, Spack will look for the following
|
||||
packages and use the first valid file:
|
||||
|
||||
1. ``/opt/local-repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
|
||||
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py``
|
||||
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py``
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -101,14 +101,15 @@ Namespaces
|
||||
|
||||
Every repository in Spack has an associated **namespace** defined in its
|
||||
top-level ``repo.yaml`` file. If you look at
|
||||
``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll
|
||||
see that its namespace is ``builtin``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cat var/spack/repos/builtin/repo.yaml
|
||||
$ cat var/spack/repos/spack_repo/builtin/repo.yaml
|
||||
repo:
|
||||
namespace: builtin
|
||||
api: v2.0
|
||||
|
||||
Spack records the repository namespace of each installed package. For
|
||||
example, if you install the ``mpich`` package from the ``builtin`` repo,
|
||||
@@ -217,15 +218,15 @@ Suppose you have three repositories: the builtin Spack repo
|
||||
repo containing your own prototype packages (``proto``). Suppose they
|
||||
contain packages as follows:
|
||||
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+====================================+=============================+
|
||||
| ``proto`` | ``~/proto`` | ``mpich`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+------------------------------------+-----------------------------+
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| Namespace | Path to repo | Packages |
|
||||
+==============+===============================================+=============================+
|
||||
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others |
|
||||
+--------------+-----------------------------------------------+-----------------------------+
|
||||
|
||||
Suppose that ``hdf5`` depends on ``mpich``. You can override the
|
||||
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
@@ -233,8 +234,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
|
||||
|
||||
@@ -243,9 +244,9 @@ If, instead, ``repos.yaml`` looks like this:
|
||||
.. code-block:: yaml
|
||||
|
||||
repos:
|
||||
- ~/proto
|
||||
- /usr/local/llnl
|
||||
- $spack/var/spack/repos/builtin
|
||||
- ~/my_spack_repos/spack_repo/proto
|
||||
- /usr/local/repos/spack_repo/llnl
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
|
||||
|
||||
@@ -326,8 +327,8 @@ files, use ``spack repo list``.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
myrepo ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Each repository is listed with its associated namespace. To get the raw,
|
||||
merged YAML from all configuration files, use ``spack config get repos``:
|
||||
@@ -335,9 +336,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config get repos
|
||||
repos:srepos:
|
||||
- ~/myrepo
|
||||
- $spack/var/spack/repos/builtin
|
||||
repos:
|
||||
- ~/my_spack_repos/spack_repo/myrepo
|
||||
- $spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
Note that, unlike ``spack repo list``, this does not include the
|
||||
namespace, which is read from each repo's ``repo.yaml``.
|
||||
@@ -351,66 +352,54 @@ yourself; you can use the ``spack repo create`` command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo
|
||||
$ spack repo create ~/my_spack_repos myrepo
|
||||
==> Created repo with namespace 'myrepo'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
$ ls ~/my_spack_repos/spack_repo/myrepo
|
||||
packages/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: 'myrepo'
|
||||
api: v2.0
|
||||
|
||||
By default, the namespace of a new repo matches its directory's name.
|
||||
You can supply a custom namespace with a second argument, e.g.:
|
||||
Namespaces can also be nested, which can be useful if you have
|
||||
multiple package repositories for an organization. Spack will
|
||||
create the corresponding directory structure for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create myrepo llnl.comp
|
||||
$ spack repo create ~/my_spack_repos llnl.comp
|
||||
==> Created repo with namespace 'llnl.comp'.
|
||||
==> To register it with spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
|
||||
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
api: v2.0
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once your repository is created, you can register it with Spack with
|
||||
``spack repo add``:
|
||||
``spack repo add``. You nee to specify the path to the directory that
|
||||
contains the ``repo.yaml`` file.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo add ./myrepo
|
||||
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Added repo with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 2 package repositories.
|
||||
llnl.comp ~/myrepo
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp
|
||||
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
|
||||
This simply adds the repo to your ``repos.yaml`` file.
|
||||
|
||||
@@ -432,46 +421,43 @@ By namespace:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm llnl.comp
|
||||
==> Removed repository ~/myrepo with namespace 'llnl.comp'.
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'.
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
By path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo rm ~/myrepo
|
||||
==> Removed repository ~/myrepo
|
||||
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp
|
||||
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp
|
||||
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
builtin ~/spack/var/spack/repos/spack_repo/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
Package repositories are implemented as Python packages. To be precise,
|
||||
they are `namespace packages
|
||||
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_
|
||||
with ``spack_repo`` the top-level namespace, followed by the repository
|
||||
namespace as submodules. For example, the builtin repository corresponds
|
||||
to the Python module ``spack_repo.builtin.packages``.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
This structure allows you to extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
from spack_repo.builtin.packages.mpich.package import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
Spack populates ``sys.path`` at runtime with the path to the root of your
|
||||
package repository's ``spack_repo`` directory.
|
||||
|
||||
@@ -176,92 +176,72 @@ community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
.. _build_cache_signing:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
-------------------
|
||||
Build Cache Signing
|
||||
-------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
For an in-depth description of the layout of a binary mirror, see
|
||||
the :ref:`documentation<build_cache_layout>` covering binary caches. The
|
||||
key takeaway from that discussion that applies here is that the entry point
|
||||
to a binary package is it's manifest. The manifest refers unambiguously to the
|
||||
spec metadata and compressed archive, which are stored as content-addressed
|
||||
blobs.
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
The manifest files can either be signed or unsigned, but are always given
|
||||
a name ending with ``.spec.manifest.json`` regardless. The difference between
|
||||
signed and unsigned manifests is simply that the signed version is wrapped in
|
||||
a gpg cleartext signature, as illustrated below::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
"version": 3,
|
||||
"data": [
|
||||
{
|
||||
"contentLength": 10731083,
|
||||
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
|
||||
},
|
||||
{
|
||||
"contentLength": 1000,
|
||||
"mediaType": "application/vnd.spack.spec.v5+json",
|
||||
"compression": "gzip",
|
||||
"checksumAlgorithm": "sha256",
|
||||
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
|
||||
iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8
|
||||
mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU
|
||||
HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL
|
||||
W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD
|
||||
5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD
|
||||
ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6
|
||||
bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n
|
||||
HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3
|
||||
gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr
|
||||
XD4aDogm
|
||||
=RrFX
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
used to sign the above manifest file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
$ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
When attempting to install a binary package that has been signed, spack will
|
||||
attempt to verify the signature with one of the trusted keys in its keyring,
|
||||
and will fail if unable to do so. While not recommended, it is possible to
|
||||
force installation of a signed package without verification by providing the
|
||||
``--no-check-signature`` argument to ``spack install ...``.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
@@ -320,10 +300,10 @@ the following way:
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
4. Assuming the package has dependencies those spec manifests are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
5. The package is built and the spec manifest is generated
|
||||
6. The spec manifest is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
@@ -376,24 +356,24 @@ following way:
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
5. The job script syncs all spec manifest files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
signatures on the spec.manifest.json files. If any signed manifest
|
||||
does not verify, the job immediately fails.
|
||||
8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks
|
||||
the manifest json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
10. The private key is then used to sign each of the manifests and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
11. The re-signed manifests are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
|
||||
13
lib/spack/external/__init__.py
vendored
13
lib/spack/external/__init__.py
vendored
@@ -11,6 +11,7 @@
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.3
|
||||
* License: MIT
|
||||
|
||||
archspec
|
||||
--------
|
||||
@@ -18,6 +19,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
* License: Apache-2.0 or MIT
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -25,6 +27,7 @@
|
||||
* Homepage: https://github.com/simonpercivall/astunparse
|
||||
* Usage: Unparsing Python ASTs for package hashes in Spack
|
||||
* Version: 1.6.3 (plus modifications)
|
||||
* License: PSF-2.0
|
||||
* Note: This is in ``spack.util.unparse`` because it's very heavily
|
||||
modified, and we want to track coverage for it.
|
||||
Specifically, we have modified this library to generate consistent unparsed ASTs
|
||||
@@ -41,6 +44,7 @@
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 22.1.0
|
||||
* License: MIT
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -48,6 +52,7 @@
|
||||
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
|
||||
* Usage: Functions to parse build logs and extract error messages.
|
||||
* Version: Unversioned
|
||||
* License: BSD-3-Clause
|
||||
* Note: This is a homemade port of Kitware's CTest build handler.
|
||||
|
||||
distro
|
||||
@@ -56,6 +61,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.8.0
|
||||
* License: Apache-2.0
|
||||
|
||||
jinja2
|
||||
------
|
||||
@@ -63,6 +69,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -70,6 +77,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||
* Usage: An implementation of JSON Schema for Python.
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* License: MIT
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
macholib
|
||||
@@ -78,6 +86,7 @@
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.16.2
|
||||
* License: MIT
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
@@ -85,6 +94,7 @@
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
* License: BSD-3-Clause
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
@@ -92,6 +102,7 @@
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.18.0
|
||||
* License: MIT
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
@@ -101,6 +112,7 @@
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.17.21
|
||||
* License: MIT
|
||||
|
||||
six
|
||||
---
|
||||
@@ -108,5 +120,6 @@
|
||||
* Homepage: https://pypi.python.org/pypi/six
|
||||
* Usage: Python 2 and 3 compatibility utilities.
|
||||
* Version: 1.16.0
|
||||
* License: MIT
|
||||
|
||||
"""
|
||||
|
||||
@@ -764,7 +764,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src), errno.ENOENT)
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
|
||||
@@ -15,7 +15,20 @@
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -424,46 +437,39 @@ def add_func_to_class(name, func):
|
||||
return cls
|
||||
|
||||
|
||||
K = TypeVar("K")
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
@lazy_lexicographic_ordering
|
||||
class HashableMap(collections.abc.MutableMapping):
|
||||
class HashableMap(typing.MutableMapping[K, V]):
|
||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||
a tuple of the values in the dictionary."""
|
||||
|
||||
__slots__ = ("dict",)
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {}
|
||||
self.dict: Dict[K, V] = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
def __getitem__(self, key: K) -> V:
|
||||
return self.dict[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
def __setitem__(self, key: K, value: V) -> None:
|
||||
self.dict[key] = value
|
||||
|
||||
def __iter__(self):
|
||||
def __iter__(self) -> Iterator[K]:
|
||||
return iter(self.dict)
|
||||
|
||||
def __len__(self):
|
||||
def __len__(self) -> int:
|
||||
return len(self.dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
def __delitem__(self, key: K) -> None:
|
||||
del self.dict[key]
|
||||
|
||||
def _cmp_iter(self):
|
||||
for _, v in sorted(self.items()):
|
||||
yield v
|
||||
|
||||
def copy(self):
|
||||
"""Type-agnostic clone method. Preserves subclass type."""
|
||||
# Construct a new dict of my type
|
||||
self_type = type(self)
|
||||
clone = self_type()
|
||||
|
||||
# Copy everything from this dict into it.
|
||||
for key in self:
|
||||
clone[key] = self[key].copy()
|
||||
return clone
|
||||
|
||||
|
||||
def match_predicate(*args):
|
||||
"""Utility function for making string matching predicates.
|
||||
@@ -1047,19 +1053,28 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
ClassPropertyType = TypeVar("ClassPropertyType")
|
||||
|
||||
|
||||
class classproperty(Generic[ClassPropertyType]):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
the evaluation is injected at creation time and takes an owner (i.e., the class that
|
||||
originated the instance).
|
||||
"""
|
||||
|
||||
def __init__(self, callback):
|
||||
def __init__(self, callback: Callable[[Any], ClassPropertyType]) -> None:
|
||||
self.callback = callback
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
def __get__(self, instance, owner) -> ClassPropertyType:
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
#: A type alias that represents either a classproperty descriptor or a constant value of the same
|
||||
#: type. This allows derived classes to override a computed class-level property with a constant
|
||||
#: value while retaining type compatibility.
|
||||
ClassProperty = Union[ClassPropertyType, classproperty[ClassPropertyType]]
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
package_api_version = (2, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
|
||||
@@ -350,7 +350,7 @@ def _ensure_no_folders_without_package_py(error_cls):
|
||||
for repository in spack.repo.PATH.repos:
|
||||
missing = []
|
||||
for entry in os.scandir(repository.packages_path):
|
||||
if not entry.is_dir():
|
||||
if not entry.is_dir() or entry.name == "__pycache__":
|
||||
continue
|
||||
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
|
||||
if not package_py.exists():
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -846,7 +847,9 @@ def _remove_libtool_archives(self) -> None:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -86,7 +87,9 @@ def check_args(self):
|
||||
"""Argument for ``cargo test`` during check phase"""
|
||||
return []
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
|
||||
@@ -36,7 +36,7 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
compiler_version_argument: Union[str, Tuple[str, ...]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
@@ -47,6 +47,11 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Relative path to compiler wrappers
|
||||
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||
|
||||
#: Optimization flags
|
||||
opt_flags: Sequence[str] = []
|
||||
#: Flags for generating debug information
|
||||
debug_flags: Sequence[str] = []
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
@@ -185,7 +190,7 @@ def archspec_name(self) -> str:
|
||||
def cc(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("c", None)
|
||||
return self._cc_path()
|
||||
|
||||
def _cc_path(self) -> Optional[str]:
|
||||
@@ -196,7 +201,7 @@ def _cc_path(self) -> Optional[str]:
|
||||
def cxx(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("cxx", None)
|
||||
return self._cxx_path()
|
||||
|
||||
def _cxx_path(self) -> Optional[str]:
|
||||
@@ -207,7 +212,7 @@ def _cxx_path(self) -> Optional[str]:
|
||||
def fortran(self):
|
||||
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("fortran", None)
|
||||
return self.spec.extra_attributes.get("compilers", {}).get("fortran", None)
|
||||
return self._fortran_path()
|
||||
|
||||
def _fortran_path(self) -> Optional[str]:
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
@@ -68,7 +69,9 @@ class GoBuilder(BuilderWithDefaults):
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["check"]
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1016,7 +1017,7 @@ def libs(self):
|
||||
debug_print(result)
|
||||
return result
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
@@ -1049,11 +1050,13 @@ def setup_run_environment(self, env):
|
||||
env.set("F77", self.prefix.bin.ifort)
|
||||
env.set("F90", self.prefix.bin.ifort)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self, env: EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
# NB: This function is overwritten by 'mpi' provider packages:
|
||||
#
|
||||
# var/spack/repos/builtin/packages/intel-mpi/package.py
|
||||
# var/spack/repos/builtin/packages/intel-parallel-studio/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_mpi/package.py
|
||||
# var/spack/repos/spack_repo/builtin/packages/intel_parallel_studio/package.py
|
||||
#
|
||||
# They call _setup_dependent_env_callback() as well, but with the
|
||||
# dictionary kwarg compilers_of_client{} present and populated.
|
||||
@@ -1061,7 +1064,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# Handle everything in a callback version.
|
||||
self._setup_dependent_env_callback(env, dependent_spec)
|
||||
|
||||
def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
|
||||
def _setup_dependent_env_callback(
|
||||
self,
|
||||
env: EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
compilers_of_client={},
|
||||
) -> None:
|
||||
# Expected to be called from a client's
|
||||
# setup_dependent_build_environment(),
|
||||
# with args extended to convey the client's compilers as needed.
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
@@ -114,5 +115,7 @@ def install(
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
@@ -57,7 +58,9 @@ def install(
|
||||
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||
)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
# octave does not like those environment variables to be set:
|
||||
env.unset("CC")
|
||||
env.unset("CXX")
|
||||
|
||||
@@ -106,8 +106,8 @@ def install_component(self, installer_path):
|
||||
|
||||
bash = Executable("bash")
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env("HOME", self.prefix)
|
||||
# Installer writes files in ~/intel set HOME so it goes to staging directory
|
||||
bash.add_default_env("HOME", join_path(self.stage.path, "home"))
|
||||
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
|
||||
bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
|
||||
|
||||
@@ -132,7 +132,7 @@ def install_component(self, installer_path):
|
||||
if not isdir(install_dir):
|
||||
raise RuntimeError("install failed to directory: {0}".format(install_dir))
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
def setup_run_environment(self, env: EnvironmentModifications) -> None:
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
import archspec
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
from llnl.util.lang import ClassProperty, classproperty, match_predicate
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -139,7 +139,7 @@ def view_file_conflicts(self, view, merge_map):
|
||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
find_namespace = lang.match_predicate(namespace_re)
|
||||
find_namespace = match_predicate(namespace_re)
|
||||
if self.py_namespace in namespaces:
|
||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||
|
||||
@@ -206,7 +206,7 @@ def remove_files_from_view(self, view, merge_map):
|
||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||
)
|
||||
if self.py_namespace in remaining_namespaces:
|
||||
namespace_init = lang.match_predicate(
|
||||
namespace_init = match_predicate(
|
||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||
)
|
||||
ignore_namespace = True
|
||||
@@ -324,6 +324,27 @@ def get_external_python_for_prefix(self):
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
def _homepage(cls: "PythonPackage") -> Optional[str]:
|
||||
"""Get the homepage from PyPI if available."""
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "PythonPackage") -> Optional[str]:
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
|
||||
@@ -351,25 +372,9 @@ class PythonPackage(PythonExtension):
|
||||
|
||||
py_namespace: Optional[str] = None
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/project/{name}/"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls) -> Optional[str]:
|
||||
if cls.pypi:
|
||||
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
|
||||
return None
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
if cls.pypi:
|
||||
name = cls.pypi.split("/")[0]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
from spack.directives import extends
|
||||
|
||||
@@ -54,6 +54,32 @@ def install(self, pkg, spec, prefix):
|
||||
pkg.module.R(*args)
|
||||
|
||||
|
||||
def _homepage(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
def _url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
return None
|
||||
|
||||
|
||||
def _list_url(cls: "RPackage") -> Optional[str]:
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
return None
|
||||
|
||||
|
||||
def _git(cls: "RPackage") -> Optional[str]:
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
return None
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
"""Specialized class for packages that are built using R.
|
||||
|
||||
@@ -77,24 +103,7 @@ class RPackage(Package):
|
||||
|
||||
extends("r")
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/package={cls.cran}"
|
||||
elif cls.bioc:
|
||||
return f"https://bioconductor.org/packages/{cls.bioc}"
|
||||
|
||||
@lang.classproperty
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz"
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
url: ClassProperty[Optional[str]] = classproperty(_url)
|
||||
list_url: ClassProperty[Optional[str]] = classproperty(_list_url)
|
||||
git: ClassProperty[Optional[str]] = classproperty(_git)
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import ClassProperty, classproperty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
@@ -19,6 +19,12 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
def _homepage(cls: "RacketPackage") -> Optional[str]:
|
||||
if cls.racket_name:
|
||||
return f"https://pkgs.racket-lang.org/package/{cls.racket_name}"
|
||||
return None
|
||||
|
||||
|
||||
class RacketPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using Racket's
|
||||
`raco pkg install` and `raco setup` commands.
|
||||
@@ -37,13 +43,7 @@ class RacketPackage(PackageBase):
|
||||
extends("racket", when="build_system=racket")
|
||||
|
||||
racket_name: Optional[str] = None
|
||||
parallel = True
|
||||
|
||||
@lang.classproperty
|
||||
def homepage(cls):
|
||||
if cls.racket_name:
|
||||
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||
return None
|
||||
homepage: ClassProperty[Optional[str]] = classproperty(_homepage)
|
||||
|
||||
|
||||
@spack.builder.builder("racket")
|
||||
|
||||
351
lib/spack/spack/buildcache_migrate.py
Normal file
351
lib/spack/spack/buildcache_migrate.py
Normal file
@@ -0,0 +1,351 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import NamedTuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
from .url_buildcache import (
|
||||
BlobRecord,
|
||||
BuildcacheComponent,
|
||||
compressed_json_from_dict,
|
||||
get_url_buildcache_class,
|
||||
sign_file,
|
||||
try_verify,
|
||||
)
|
||||
|
||||
|
||||
def v2_tarball_directory_name(spec):
|
||||
"""
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def v2_tarball_name(spec, ext):
|
||||
"""
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def v2_tarball_path_name(spec, ext):
|
||||
"""
|
||||
Return the full path+name for a given spec according to the convention
|
||||
<tarball_directory_name>/<tarball_name>
|
||||
"""
|
||||
return os.path.join(v2_tarball_directory_name(spec), v2_tarball_name(spec, ext))
|
||||
|
||||
|
||||
class MigrateSpecResult(NamedTuple):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
class MigrationException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when migration fails irrevocably
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
def _migrate_spec(
|
||||
s: spack.spec.Spec, mirror_url: str, tmpdir: str, unsigned: bool = False, signing_key: str = ""
|
||||
) -> MigrateSpecResult:
|
||||
"""Parallelizable function to migrate a single spec"""
|
||||
print_spec = f"{s.name}/{s.dag_hash()[:7]}"
|
||||
|
||||
# Check if the spec file exists in the new location and exit early if so
|
||||
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_entry = v3_cache_class(mirror_url, s, allow_unsigned=unsigned)
|
||||
exists = v3_cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
v3_cache_entry.destroy()
|
||||
|
||||
if exists:
|
||||
msg = f"No need to migrate {print_spec}"
|
||||
return MigrateSpecResult(True, msg)
|
||||
|
||||
# Try to fetch the spec metadata
|
||||
v2_metadata_urls = [
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json.sig"))
|
||||
]
|
||||
|
||||
if unsigned:
|
||||
v2_metadata_urls.append(
|
||||
url_util.join(mirror_url, "build_cache", v2_tarball_name(s, ".spec.json"))
|
||||
)
|
||||
|
||||
spec_contents = None
|
||||
|
||||
for meta_url in v2_metadata_urls:
|
||||
try:
|
||||
_, _, meta_file = web_util.read_from_url(meta_url)
|
||||
spec_contents = codecs.getreader("utf-8")(meta_file).read()
|
||||
v2_spec_url = meta_url
|
||||
break
|
||||
except (web_util.SpackWebError, OSError):
|
||||
pass
|
||||
else:
|
||||
msg = f"Unable to read metadata for {print_spec}"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
spec_dict = {}
|
||||
|
||||
if unsigned:
|
||||
# User asked for unsigned, if we found a signed specfile, just ignore
|
||||
# the signature
|
||||
if v2_spec_url.endswith(".sig"):
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(spec_contents)
|
||||
else:
|
||||
spec_dict = json.loads(spec_contents)
|
||||
else:
|
||||
# User asked for signed, we must successfully verify the signature
|
||||
local_signed_pre_verify = os.path.join(
|
||||
tmpdir, f"{s.name}_{s.dag_hash()}_verify.spec.json.sig"
|
||||
)
|
||||
with open(local_signed_pre_verify, "w", encoding="utf-8") as fd:
|
||||
fd.write(spec_contents)
|
||||
if not try_verify(local_signed_pre_verify):
|
||||
return MigrateSpecResult(False, f"Failed to verify signature of {print_spec}")
|
||||
with open(local_signed_pre_verify, encoding="utf-8") as fd:
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(fd.read())
|
||||
|
||||
# Read out and remove the bits needed to rename and position the archive
|
||||
bcc = spec_dict.pop("binary_cache_checksum", None)
|
||||
if not bcc:
|
||||
msg = "Cannot migrate a spec that does not have 'binary_cache_checksum'"
|
||||
return MigrateSpecResult(False, msg)
|
||||
|
||||
algorithm = bcc["hash_algorithm"]
|
||||
checksum = bcc["hash"]
|
||||
|
||||
# TODO: Remove this key once oci buildcache no longer uses it
|
||||
spec_dict["buildcache_layout_version"] = 2
|
||||
|
||||
v2_archive_url = url_util.join(mirror_url, "build_cache", v2_tarball_path_name(s, ".spack"))
|
||||
|
||||
# spacks web utilities do not include direct copying of s3 objects, so we
|
||||
# need to download the archive locally, and then push it back to the target
|
||||
# location
|
||||
archive_stage_path = os.path.join(tmpdir, f"archive_stage_{s.name}_{s.dag_hash()}")
|
||||
archive_stage = spack.stage.Stage(v2_archive_url, path=archive_stage_path)
|
||||
|
||||
try:
|
||||
archive_stage.create()
|
||||
archive_stage.fetch()
|
||||
except spack.error.FetchError:
|
||||
return MigrateSpecResult(False, f"Unable to fetch archive for {print_spec}")
|
||||
|
||||
local_tarfile_path = archive_stage.save_filename
|
||||
|
||||
# As long as we have to download the tarball anyway, we might as well compute the
|
||||
# checksum locally and check it against the expected value
|
||||
local_checksum = spack.util.crypto.checksum(
|
||||
spack.util.crypto.hash_fun_for_algo(algorithm), local_tarfile_path
|
||||
)
|
||||
|
||||
if local_checksum != checksum:
|
||||
return MigrateSpecResult(
|
||||
False, f"Checksum mismatch for {print_spec}: expected {checksum}, got {local_checksum}"
|
||||
)
|
||||
|
||||
spec_dict["archive_size"] = os.stat(local_tarfile_path).st_size
|
||||
|
||||
# Compress the spec dict and compute its checksum
|
||||
metadata_checksum_algo = "sha256"
|
||||
spec_json_path = os.path.join(tmpdir, f"{s.name}_{s.dag_hash()}.spec.json")
|
||||
metadata_checksum, metadata_size = compressed_json_from_dict(
|
||||
spec_json_path, spec_dict, metadata_checksum_algo
|
||||
)
|
||||
|
||||
tarball_blob_record = BlobRecord(
|
||||
spec_dict["archive_size"], v3_cache_class.TARBALL_MEDIATYPE, "gzip", algorithm, checksum
|
||||
)
|
||||
|
||||
metadata_blob_record = BlobRecord(
|
||||
metadata_size,
|
||||
v3_cache_class.SPEC_MEDIATYPE,
|
||||
"gzip",
|
||||
metadata_checksum_algo,
|
||||
metadata_checksum,
|
||||
)
|
||||
|
||||
# Compute the urls to the new blobs
|
||||
v3_archive_url = v3_cache_class.get_blob_url(mirror_url, tarball_blob_record)
|
||||
v3_spec_url = v3_cache_class.get_blob_url(mirror_url, metadata_blob_record)
|
||||
|
||||
# First push the tarball
|
||||
tty.debug(f"Pushing {local_tarfile_path} to {v3_archive_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_tarfile_path, v3_archive_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push archive for {print_spec}")
|
||||
|
||||
# Then push the spec file
|
||||
tty.debug(f"Pushing {spec_json_path} to {v3_spec_url}")
|
||||
|
||||
try:
|
||||
web_util.push_to_url(spec_json_path, v3_spec_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push spec metadata for {print_spec}")
|
||||
|
||||
# Generate the manifest and write it to a temporary location
|
||||
manifest = {
|
||||
"version": v3_cache_class.get_layout_version(),
|
||||
"data": [tarball_blob_record.to_dict(), metadata_blob_record.to_dict()],
|
||||
}
|
||||
|
||||
manifest_path = os.path.join(tmpdir, f"{s.dag_hash()}.manifest.json")
|
||||
with open(manifest_path, "w", encoding="utf-8") as f:
|
||||
json.dump(manifest, f, indent=0, separators=(",", ":"))
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995
|
||||
# chars). If lines are longer, they are truncated without error. So,
|
||||
# here we still add newlines, but no indent, so save on file size and
|
||||
# line length.
|
||||
|
||||
# Possibly sign the manifest
|
||||
if not unsigned:
|
||||
manifest_path = sign_file(signing_key, manifest_path)
|
||||
|
||||
v3_manifest_url = v3_cache_class.get_manifest_url(s, mirror_url)
|
||||
|
||||
# Push the manifest
|
||||
try:
|
||||
web_util.push_to_url(manifest_path, v3_manifest_url, keep_original=True)
|
||||
except Exception:
|
||||
return MigrateSpecResult(False, f"Failed to push manifest for {print_spec}")
|
||||
|
||||
return MigrateSpecResult(True, f"Successfully migrated {print_spec}")
|
||||
|
||||
|
||||
def migrate(
|
||||
mirror: spack.mirrors.mirror.Mirror, unsigned: bool = False, delete_existing: bool = False
|
||||
) -> None:
|
||||
"""Perform migration of the given mirror
|
||||
|
||||
If unsigned is True, signatures on signed specs will be ignored, and specs
|
||||
will not be re-signed before pushing to the new location. Otherwise, spack
|
||||
will attempt to verify signatures and re-sign specs, and will fail if not
|
||||
able to do so. If delete_existing is True, spack will delete the original
|
||||
contents of the mirror once the migration is complete."""
|
||||
signing_key = ""
|
||||
if not unsigned:
|
||||
try:
|
||||
signing_key = bindist.select_signing_key()
|
||||
except (bindist.NoKeyException, bindist.PickKeyException):
|
||||
raise MigrationException(
|
||||
"Signed migration requires exactly one secret key in keychain"
|
||||
)
|
||||
|
||||
delete_action = "deleting" if delete_existing else "keeping"
|
||||
sign_action = "an unsigned" if unsigned else "a signed"
|
||||
mirror_url = mirror.fetch_url
|
||||
|
||||
tty.msg(
|
||||
f"Performing {sign_action} migration of {mirror.push_url} "
|
||||
f"and {delete_action} existing contents"
|
||||
)
|
||||
|
||||
index_url = url_util.join(mirror_url, "build_cache", spack_db.INDEX_JSON_FILE)
|
||||
contents = None
|
||||
|
||||
try:
|
||||
_, _, index_file = web_util.read_from_url(index_url)
|
||||
contents = codecs.getreader("utf-8")(index_file).read()
|
||||
except (web_util.SpackWebError, OSError):
|
||||
raise MigrationException("Buildcache migration requires a buildcache index")
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
index_path = os.path.join(tmpdir, "_tmp_index.json")
|
||||
with open(index_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(contents)
|
||||
|
||||
db = bindist.BuildCacheDatabase(tmpdir)
|
||||
db._read_from_file(pathlib.Path(index_path))
|
||||
|
||||
specs_to_migrate = [
|
||||
s
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if not s.external and db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
# Run the tasks in parallel if possible
|
||||
executor = spack.util.parallel.make_concurrent_executor()
|
||||
migrate_futures = [
|
||||
executor.submit(_migrate_spec, spec, mirror_url, tmpdir, unsigned, signing_key)
|
||||
for spec in specs_to_migrate
|
||||
]
|
||||
|
||||
success_count = 0
|
||||
|
||||
tty.msg("Migration summary:")
|
||||
for spec, migrate_future in zip(specs_to_migrate, migrate_futures):
|
||||
result = migrate_future.result()
|
||||
msg = f" {spec.name}/{spec.dag_hash()[:7]}: {result.message}"
|
||||
if result.success:
|
||||
success_count += 1
|
||||
tty.msg(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
# The migrated index should have the same specs as the original index,
|
||||
# modulo any specs that we failed to migrate for whatever reason. So
|
||||
# to avoid having to re-fetch all the spec files now, just mark them
|
||||
# appropriately in the existing database and push that.
|
||||
db.mark(spec, "in_buildcache", result.success)
|
||||
|
||||
if success_count > 0:
|
||||
tty.msg("Updating index and pushing keys")
|
||||
|
||||
# If the layout.json doesn't yet exist on this mirror, push it
|
||||
v3_cache_class = get_url_buildcache_class(layout_version=3)
|
||||
v3_cache_class.maybe_push_layout_json(mirror_url)
|
||||
|
||||
# Push the migrated mirror index
|
||||
index_tmpdir = os.path.join(tmpdir, "rebuild_index")
|
||||
os.mkdir(index_tmpdir)
|
||||
bindist._push_index(db, index_tmpdir, mirror_url)
|
||||
|
||||
# Push the public part of the signing key
|
||||
if not unsigned:
|
||||
keys_tmpdir = os.path.join(tmpdir, "keys")
|
||||
os.mkdir(keys_tmpdir)
|
||||
bindist._url_push_keys(
|
||||
mirror_url, keys=[signing_key], update_index=True, tmpdir=keys_tmpdir
|
||||
)
|
||||
else:
|
||||
tty.warn("No specs migrated, did you mean to perform an unsigned migration instead?")
|
||||
|
||||
# Delete the old layout if the user requested it
|
||||
if delete_existing:
|
||||
delete_prefix = url_util.join(mirror_url, "build_cache")
|
||||
tty.msg(f"Recursively deleting {delete_prefix}")
|
||||
web_util.remove_url(delete_prefix, recursive=True)
|
||||
|
||||
tty.msg("Migration complete")
|
||||
@@ -59,7 +59,7 @@ def __call__(self, spec, prefix):
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if cls and spack.repo.is_package_module(cls.__module__):
|
||||
return cls
|
||||
return None
|
||||
|
||||
@@ -121,6 +121,7 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
new_cls_name,
|
||||
bases,
|
||||
{
|
||||
"__module__": package_cls.__module__,
|
||||
"run_tests": property(lambda x: x.wrapped_package_object.run_tests),
|
||||
"test_requires_compiler": property(
|
||||
lambda x: x.wrapped_package_object.test_requires_compiler
|
||||
@@ -129,7 +130,6 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
"tester": property(lambda x: x.wrapped_package_object.tester),
|
||||
},
|
||||
)
|
||||
new_cls.__module__ = package_cls.__module__
|
||||
self.__class__ = new_cls
|
||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||
|
||||
@@ -185,10 +185,16 @@ def __init__(self, pkg):
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
# catch-all (*args, **kwargs)
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
def setup_dependent_build_environment(
|
||||
self,
|
||||
env: spack.util.environment.EnvironmentModifications,
|
||||
dependent_spec: spack.spec.Spec,
|
||||
) -> None:
|
||||
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
return Adapter(pkg)
|
||||
@@ -402,7 +408,7 @@ def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
def setup_build_environment(self, env: EnvironmentModifications) -> None:
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -32,6 +33,7 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
@@ -149,10 +151,10 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
return False
|
||||
|
||||
|
||||
def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
||||
def compute_affected_packages(rev1: str = "HEAD^", rev2: str = "HEAD") -> Set[str]:
|
||||
"""Determine which packages were added, removed or changed
|
||||
between rev1 and rev2, and return the names as a set"""
|
||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||
return spack.repo.get_all_package_diffs("ARC", spack.repo.builtin_repo(), rev1=rev1, rev2=rev2)
|
||||
|
||||
|
||||
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
@@ -244,7 +246,9 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
if not spec_locations:
|
||||
return RebuildDecision(True, "not found anywhere")
|
||||
|
||||
urls = ",".join([loc["mirror_url"] for loc in spec_locations])
|
||||
urls = ",".join(
|
||||
[f"{loc.url_and_version.url}@v{loc.url_and_version.version}" for loc in spec_locations]
|
||||
)
|
||||
message = f"up-to-date [{urls}]"
|
||||
return RebuildDecision(False, message)
|
||||
|
||||
@@ -613,32 +617,40 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_spec, and attempts to copy the files into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Args:
|
||||
Parameters:
|
||||
job_spec: spec associated with spack install log
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
|
||||
try:
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
except spack.error.SpackError as e:
|
||||
tty.error(f"Cannot copy logs: {str(e)}")
|
||||
if not job_spec.concrete:
|
||||
tty.warn("Cannot copy artifacts for non-concrete specs")
|
||||
return
|
||||
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if archive_root.is_dir():
|
||||
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||
else:
|
||||
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||
tty.warn(msg)
|
||||
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||
if not os.path.isdir(package_metadata_root):
|
||||
# Fallback to using the stage directory
|
||||
job_pkg = job_spec.package
|
||||
|
||||
package_metadata_root = pathlib.Path(job_pkg.stage.path)
|
||||
archive_files = spack.builder.create(job_pkg).archive_files
|
||||
tty.warn("Package not installed, falling back to use stage dir")
|
||||
tty.debug(f"stage dir: {package_metadata_root}")
|
||||
else:
|
||||
# Get the package's archived files
|
||||
archive_files = []
|
||||
archive_root = package_metadata_root / "archived-files"
|
||||
if os.path.isdir(archive_root):
|
||||
archive_files = [str(f) for f in archive_root.rglob("*") if os.path.isfile(f)]
|
||||
else:
|
||||
tty.debug(f"No archived files detected at {archive_root}")
|
||||
|
||||
# Try zipped and unzipped versions of the build log
|
||||
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||
build_log = package_metadata_root / "spack-build-out.txt"
|
||||
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||
|
||||
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir)
|
||||
for f in [build_log_zipped, build_log, build_env_mods, *archive_files]:
|
||||
copy_files_to_artifacts(str(f), job_log_dir, compress_artifacts=True)
|
||||
|
||||
|
||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
@@ -651,11 +663,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
"""
|
||||
tty.debug(f"test stage: {test_stage}")
|
||||
if not os.path.exists(test_stage):
|
||||
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||
tty.error(msg)
|
||||
tty.error(f"Cannot copy test logs: job test stage ({test_stage}) does not exist")
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
copy_files_to_artifacts(
|
||||
os.path.join(test_stage, "*", "*.txt"), job_test_dir, compress_artifacts=True
|
||||
)
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir) -> str:
|
||||
@@ -1232,33 +1245,31 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
"""Given a url to write to and the details of the failed job, write an entry
|
||||
in the broken specs list.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
file_path = os.path.join(tmpdir, "broken.txt")
|
||||
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
broken_spec_details = {
|
||||
"broken-spec": {
|
||||
"job-name": pkg_name,
|
||||
"job-stack": stack_name,
|
||||
"job-url": job_url,
|
||||
"pipeline-url": pipeline_url,
|
||||
"concrete-spec-dict": spec_dict,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
try:
|
||||
with open(file_path, "w", encoding="utf-8") as fd:
|
||||
syaml.dump(broken_spec_details, fd)
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def read_broken_spec(broken_spec_url):
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import errno
|
||||
import glob
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
@@ -25,13 +29,14 @@
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.compression as compression
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
from spack.url_buildcache import get_url_buildcache_class
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -40,22 +45,67 @@
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
def copy_gzipped(glob_or_path: str, dest: str) -> None:
|
||||
"""Copy all of the files in the source glob/path to the destination.
|
||||
|
||||
Args:
|
||||
glob_or_path: path to file to test
|
||||
dest: destination path to copy to
|
||||
"""
|
||||
|
||||
files = glob.glob(glob_or_path)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(glob_or_path), errno.ENOENT)
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(glob_or_path, dest)
|
||||
)
|
||||
|
||||
def is_gzipped(path):
|
||||
with open(path, "rb") as fd:
|
||||
return compression.GZipFileType().matches_magic(fd)
|
||||
|
||||
for src in files:
|
||||
if is_gzipped(src):
|
||||
fs.copy(src, dest)
|
||||
else:
|
||||
# Compress and copy in one step
|
||||
src_name = os.path.basename(src)
|
||||
if os.path.isdir(dest):
|
||||
zipped = os.path.join(dest, f"{src_name}.gz")
|
||||
elif not dest.endswith(".gz"):
|
||||
zipped = f"{dest}.gz"
|
||||
else:
|
||||
zipped = dest
|
||||
|
||||
with open(src, "rb") as fin, gzip.open(zipped, "wb") as fout:
|
||||
shutil.copyfileobj(fin, fout)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(
|
||||
src: str, artifacts_dir: str, *, compress_artifacts: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
Args:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
compress_artifacts (bool): option to compress copied artifacts using Gzip
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
|
||||
if compress_artifacts:
|
||||
copy_gzipped(src, artifacts_dir)
|
||||
else:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
tty.warn(
|
||||
(
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
@@ -129,33 +179,13 @@ def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||
|
||||
for release_spec in specs:
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
buildcache_copies[release_spec_dag_hash] = {
|
||||
"src": cache_class.get_manifest_url(release_spec, src_prefix),
|
||||
"dest": cache_class.get_manifest_url(release_spec, dest_prefix),
|
||||
}
|
||||
|
||||
target_dir = os.path.dirname(output_file)
|
||||
|
||||
|
||||
@@ -292,6 +292,9 @@ def main_script_replacements(cmd):
|
||||
)
|
||||
maybe_generate_manifest(pipeline, options, manifest_path)
|
||||
|
||||
relative_specs_url = bindist.buildcache_relative_specs_url()
|
||||
relative_keys_url = bindist.buildcache_relative_keys_url()
|
||||
|
||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
@@ -301,9 +304,12 @@ def main_script_replacements(cmd):
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"][
|
||||
"SPACK_COPY_ONLY_DESTINATION"
|
||||
] = options.buildcache_destination.fetch_url
|
||||
sync_job["variables"].update(
|
||||
{
|
||||
"SPACK_COPY_ONLY_DESTINATION": options.buildcache_destination.fetch_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
@@ -333,9 +339,13 @@ def main_script_replacements(cmd):
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = options.buildcache_destination.push_url
|
||||
signing_job["variables"].update(
|
||||
{
|
||||
"SPACK_BUILDCACHE_DESTINATION": options.buildcache_destination.push_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_SPECS_URL": relative_specs_url,
|
||||
"SPACK_BUILDCACHE_RELATIVE_KEYS_URL": relative_keys_url,
|
||||
}
|
||||
)
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -436,7 +436,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
specfile_format (bool): specfile format of the current spec
|
||||
"""
|
||||
|
||||
def get_arg(name, default=None):
|
||||
@@ -458,6 +458,7 @@ def get_arg(name, default=None):
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
specfile_format = get_arg("specfile_format", False)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -479,6 +480,9 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + vfmt + ffmt
|
||||
|
||||
if specfile_format:
|
||||
format_string = "[{specfile_version}] " + format_string
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -28,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache-v3.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -410,8 +411,9 @@ def _mirror(args):
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, "build_cache")
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
stage_dir = pathlib.Path(stage.source_path)
|
||||
for entry in stage_dir.iterdir():
|
||||
shutil.move(str(entry), mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
@@ -436,7 +438,6 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
||||
@@ -4,11 +4,9 @@
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -27,14 +25,21 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..buildcache_migrate import migrate
|
||||
from ..enums import InstallRecordStatus
|
||||
from ..url_buildcache import (
|
||||
BuildcacheComponent,
|
||||
BuildcacheEntryError,
|
||||
URLBuildcacheEntry,
|
||||
check_mirror_for_layout,
|
||||
get_url_buildcache_class,
|
||||
)
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -272,6 +277,27 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
# Migrate a buildcache from layout_version 2 to version 3
|
||||
migrate = subparsers.add_parser("migrate", help=migrate_fn.__doc__)
|
||||
migrate.add_argument("mirror", type=arguments.mirror_name, help="name of a configured mirror")
|
||||
migrate.add_argument(
|
||||
"-u",
|
||||
"--unsigned",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Ignore signatures and do not resign, default is False",
|
||||
)
|
||||
migrate.add_argument(
|
||||
"-d",
|
||||
"--delete-existing",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Delete the previous layout, the default is to keep it.",
|
||||
)
|
||||
arguments.add_common_arguments(migrate, ["yes_to_all"])
|
||||
# TODO: add -y argument to prompt if user really means to delete existing
|
||||
migrate.set_defaults(func=migrate_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
@@ -397,6 +423,10 @@ def push_fn(args):
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
|
||||
# Warn about possible old binary mirror layout
|
||||
if not mirror.push_url.startswith("oci://"):
|
||||
check_mirror_for_layout(mirror)
|
||||
|
||||
with bindist.make_uploader(
|
||||
mirror=mirror,
|
||||
force=args.force,
|
||||
@@ -527,8 +557,7 @@ def download_fn(args):
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
bindist.download_single_spec(specs[0], args.path)
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -553,29 +582,78 @@ def save_specfile_fn(args):
|
||||
)
|
||||
|
||||
|
||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
"""Copy from source url to destination url"""
|
||||
tmpdir = None
|
||||
def copy_buildcache_entry(cache_entry: URLBuildcacheEntry, destination_url: str):
|
||||
"""Download buildcache entry and copy it to the destination_url"""
|
||||
try:
|
||||
spec_dict = cache_entry.fetch_metadata()
|
||||
cache_entry.fetch_archive()
|
||||
except bindist.BuildcacheEntryError as e:
|
||||
tty.warn(f"Failed to retrieve buildcache for copying due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not local_path:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC)
|
||||
local_spec_path = cache_entry.get_local_spec_path()
|
||||
tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL)
|
||||
local_tarball_path = cache_entry.get_local_archive_path()
|
||||
|
||||
target_spec = spack.spec.Spec.from_dict(spec_dict)
|
||||
spec_label = f"{target_spec.name}/{target_spec.dag_hash()[:7]}"
|
||||
|
||||
if not tarball_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source tarball blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the tarball
|
||||
tarball_dest_url = cache_entry.get_blob_url(destination_url, tarball_blob_record)
|
||||
|
||||
try:
|
||||
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
finally:
|
||||
temp_stage.destroy()
|
||||
finally:
|
||||
if tmpdir and os.path.exists(tmpdir):
|
||||
shutil.rmtree(tmpdir)
|
||||
web_util.push_to_url(local_tarball_path, tarball_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_tarball_path} to {tarball_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
if not spec_blob_record:
|
||||
cache_entry.destroy()
|
||||
raise BuildcacheEntryError(f"No source spec blob record, failed to sync {spec_label}")
|
||||
|
||||
# Try to push the spec file
|
||||
spec_dest_url = cache_entry.get_blob_url(destination_url, spec_blob_record)
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_spec_path, spec_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push {local_spec_path} to {spec_dest_url} due to {e}")
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
# Stage the manifest locally, since if it's signed, we don't want to try to
|
||||
# to reproduce that here. Instead just push the locally staged manifest to
|
||||
# the expected path at the destination url.
|
||||
manifest_src_url = cache_entry.remote_manifest_url
|
||||
manifest_dest_url = cache_entry.get_manifest_url(target_spec, destination_url)
|
||||
|
||||
manifest_stage = spack.stage.Stage(manifest_src_url)
|
||||
|
||||
try:
|
||||
manifest_stage.create()
|
||||
manifest_stage.fetch()
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to fetch manifest from {manifest_src_url} due to {e}")
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
return
|
||||
|
||||
local_manifest_path = manifest_stage.save_filename
|
||||
|
||||
try:
|
||||
web_util.push_to_url(local_manifest_path, manifest_dest_url, keep_original=True)
|
||||
except Exception as e:
|
||||
tty.warn(f"Failed to push manifest to {manifest_dest_url} due to {e}")
|
||||
|
||||
manifest_stage.destroy()
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
@@ -615,37 +693,21 @@ def sync_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
buildcache_rel_paths = []
|
||||
|
||||
tty.debug("Syncing the following specs:")
|
||||
for s in env.all_specs():
|
||||
specs_to_sync = [s for s in env.all_specs() if not s.external]
|
||||
for s in specs_to_sync:
|
||||
tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash()))
|
||||
|
||||
buildcache_rel_paths.extend(
|
||||
[
|
||||
os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")),
|
||||
os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
|
||||
]
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
for rel_path in buildcache_rel_paths:
|
||||
src_url = url_util.join(src_mirror_url, rel_path)
|
||||
local_path = os.path.join(tmpdir, rel_path)
|
||||
dest_url = url_util.join(dest_mirror_url, rel_path)
|
||||
|
||||
tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path))
|
||||
copy_buildcache_file(src_url, dest_url, local_path=local_path)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
src_cache_entry = cache_class(src_mirror_url, s, allow_unsigned=True)
|
||||
src_cache_entry.read_manifest()
|
||||
copy_buildcache_entry(src_cache_entry, dest_mirror_url)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
def manifest_copy(
|
||||
manifest_file_list: List[str], dest_mirror: Optional[spack.mirrors.mirror.Mirror] = None
|
||||
):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -655,21 +717,24 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path, encoding="utf-8") as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
for spec_hash, copy_obj in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
deduped_manifest[spec_hash] = copy_obj
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
for spec_hash, copy_obj in deduped_manifest.items():
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
src_cache_entry = cache_class(
|
||||
cache_class.get_base_url(copy_obj["src"]), allow_unsigned=True
|
||||
)
|
||||
src_cache_entry.read_manifest(manifest_url=copy_obj["src"])
|
||||
if dest_mirror:
|
||||
destination_url = dest_mirror.push_url
|
||||
else:
|
||||
destination_url = cache_class.get_base_url(copy_obj["dest"])
|
||||
tty.debug("copying {0} to {1}".format(copy_obj["src"], destination_url))
|
||||
copy_buildcache_entry(src_cache_entry, destination_url)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
@@ -693,13 +758,9 @@ def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
bindist._url_generate_package_index(url, tmpdir)
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(url, tmpdir)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
@@ -711,5 +772,53 @@ def update_index_fn(args):
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def migrate_fn(args):
|
||||
"""perform in-place binary mirror migration (2 to 3)
|
||||
|
||||
A mirror can contain both layout version 2 and version 3 simultaneously without
|
||||
interference. This command performs in-place migration of a binary mirror laid
|
||||
out according to version 2, to a binary mirror laid out according to layout
|
||||
version 3. Only indexed specs will be migrated, so consider updating the mirror
|
||||
index before running this command. Re-run the command to migrate any missing
|
||||
items.
|
||||
|
||||
The default mode of operation is to perform a signed migration, that is, spack
|
||||
will attempt to verify the signatures on specs, and then re-sign them before
|
||||
migration, using whatever keys are already installed in your key ring. You can
|
||||
migrate a mirror of unsigned binaries (or convert a mirror of signed binaries
|
||||
to unsigned) by providing the --unsigned argument.
|
||||
|
||||
By default spack will leave the original mirror contents (in the old layout) in
|
||||
place after migration. You can have spack remove the old contents by providing
|
||||
the --delete-existing argument. Because migrating a mostly-already-migrated
|
||||
mirror should be fast, consider a workflow where you perform a default migration,
|
||||
(i.e. preserve the existing layout rather than deleting it) then evaluate the
|
||||
state of the migrated mirror by attempting to install from it, and finally
|
||||
running the migration again with --delete-existing."""
|
||||
target_mirror = args.mirror
|
||||
unsigned = args.unsigned
|
||||
assert isinstance(target_mirror, spack.mirrors.mirror.Mirror)
|
||||
delete_existing = args.delete_existing
|
||||
|
||||
proceed = True
|
||||
if delete_existing and not args.yes_to_all:
|
||||
msg = (
|
||||
"Using --delete-existing will delete the entire contents \n"
|
||||
" of the old layout within the mirror. Because migrating a mirror \n"
|
||||
" that has already been migrated should be fast, consider a workflow \n"
|
||||
" where you perform a default migration (i.e. preserve the existing \n"
|
||||
" layout rather than deleting it), then evaluate the state of the \n"
|
||||
" migrated mirror by attempting to install from it, and finally, \n"
|
||||
" run the migration again with --delete-existing."
|
||||
)
|
||||
tty.warn(msg)
|
||||
proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
|
||||
|
||||
if not proceed:
|
||||
tty.die("Migration aborted.")
|
||||
|
||||
migrate(target_mirror, unsigned=unsigned, delete_existing=delete_existing)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
return args.func(args)
|
||||
|
||||
@@ -423,7 +423,7 @@ def ci_rebuild(args):
|
||||
# jobs in subsequent stages.
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
tty.msg(" {0}".format(match.url_and_version.url))
|
||||
|
||||
# Now we are done and successful
|
||||
return 0
|
||||
@@ -453,7 +453,7 @@ def ci_rebuild(args):
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--only=package"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
@@ -493,6 +493,9 @@ def ci_rebuild(args):
|
||||
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Clear the stage directory
|
||||
spack.stage.purge()
|
||||
|
||||
# If the installation succeeded and we're running stand-alone tests for
|
||||
# the package, run them and copy the output. Failures of any kind should
|
||||
# *not* terminate the build process or preclude creating the build cache.
|
||||
@@ -788,7 +791,9 @@ def ci_verify_versions(args):
|
||||
"""
|
||||
# Get a list of all packages that have been changed or added
|
||||
# between from_ref and to_ref
|
||||
pkgs = spack.repo.get_all_package_diffs("AC", args.from_ref, args.to_ref)
|
||||
pkgs = spack.repo.get_all_package_diffs(
|
||||
"AC", spack.repo.builtin_repo(), args.from_ref, args.to_ref
|
||||
)
|
||||
|
||||
failed_version = False
|
||||
for pkg_name in pkgs:
|
||||
|
||||
@@ -63,7 +63,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help="list available compilers")
|
||||
list_parser = sp.add_parser("list", aliases=["ls"], help="list available compilers")
|
||||
list_parser.add_argument(
|
||||
"--scope", action=arguments.ConfigScope, help="configuration scope to read from"
|
||||
)
|
||||
@@ -216,5 +216,6 @@ def compiler(parser, args):
|
||||
"rm": compiler_remove,
|
||||
"info": compiler_info,
|
||||
"list": compiler_list,
|
||||
"ls": compiler_list,
|
||||
}
|
||||
action[args.compiler_command](args)
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
from spack.util.naming import pkg_name_to_class_name, simplify_name
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
@@ -95,7 +95,7 @@ class BundlePackageTemplate:
|
||||
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.class_name = pkg_name_to_class_name(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
@@ -874,7 +874,7 @@ def get_name(name, url):
|
||||
|
||||
result = simplify_name(result)
|
||||
|
||||
if not valid_fully_qualified_module_name(result):
|
||||
if not re.match(r"^[a-z0-9-]+$", result):
|
||||
tty.die("Package name can only contain a-z, 0-9, and '-'")
|
||||
|
||||
return result
|
||||
|
||||
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
"package Spack knows how to find."
|
||||
)
|
||||
|
||||
sp.add_parser("list", help="list detectable packages, by repository and name")
|
||||
sp.add_parser("list", aliases=["ls"], help="list detectable packages, by repository and name")
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
@@ -259,6 +259,7 @@ def external(parser, args):
|
||||
action = {
|
||||
"find": external_find,
|
||||
"list": external_list,
|
||||
"ls": external_list,
|
||||
"read-cray-manifest": external_read_cray_manifest,
|
||||
}
|
||||
action[args.external_command](args)
|
||||
|
||||
@@ -51,6 +51,12 @@ def setup_parser(subparser):
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--specfile-format",
|
||||
action="store_true",
|
||||
help="show the specfile format for installed deps ",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -280,6 +286,7 @@ def root_decorator(spec, string):
|
||||
show_flags=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
print()
|
||||
@@ -301,6 +308,7 @@ def root_decorator(spec, string):
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
variants=True,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
print()
|
||||
|
||||
@@ -390,7 +398,12 @@ def find(parser, args):
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
display_results,
|
||||
args,
|
||||
decorator=decorator,
|
||||
all_headers=True,
|
||||
status_fn=status_fn,
|
||||
specfile_format=args.specfile_format,
|
||||
)
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
|
||||
@@ -10,11 +10,13 @@
|
||||
import re
|
||||
import sys
|
||||
from html import escape
|
||||
from typing import Type
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
from spack.version import VersionList
|
||||
@@ -139,10 +141,10 @@ def name_only(pkgs, out):
|
||||
tty.msg("%d packages" % len(pkgs))
|
||||
|
||||
|
||||
def github_url(pkg):
|
||||
def github_url(pkg: Type[spack.package_base.PackageBase]) -> str:
|
||||
"""Link to a package file on github."""
|
||||
url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
|
||||
return url.format(pkg.name)
|
||||
mod_path = pkg.__module__.replace(".", "/")
|
||||
return f"https://github.com/spack/spack/blob/develop/var/spack/{mod_path}.py"
|
||||
|
||||
|
||||
def rows_for_ncols(elts, ncols):
|
||||
|
||||
@@ -89,17 +89,17 @@ def setup_parser(subparser):
|
||||
|
||||
def pkg_add(args):
|
||||
"""add a package to the git stage with `git add`"""
|
||||
spack.repo.add_package_to_git_stage(args.packages)
|
||||
spack.repo.add_package_to_git_stage(args.packages, spack.repo.builtin_repo())
|
||||
|
||||
|
||||
def pkg_list(args):
|
||||
"""list packages associated with a particular spack git revision"""
|
||||
colify(spack.repo.list_packages(args.rev))
|
||||
colify(spack.repo.list_packages(args.rev, spack.repo.builtin_repo()))
|
||||
|
||||
|
||||
def pkg_diff(args):
|
||||
"""compare packages available in two different git revisions"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
|
||||
if u1:
|
||||
print("%s:" % args.rev1)
|
||||
@@ -114,21 +114,23 @@ def pkg_diff(args):
|
||||
|
||||
def pkg_removed(args):
|
||||
"""show packages removed since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u1:
|
||||
colify(sorted(u1))
|
||||
|
||||
|
||||
def pkg_added(args):
|
||||
"""show packages added since a commit"""
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2)
|
||||
u1, u2 = spack.repo.diff_packages(args.rev1, args.rev2, spack.repo.builtin_repo())
|
||||
if u2:
|
||||
colify(sorted(u2))
|
||||
|
||||
|
||||
def pkg_changed(args):
|
||||
"""show packages changed since a commit"""
|
||||
packages = spack.repo.get_all_package_diffs(args.type, args.rev1, args.rev2)
|
||||
packages = spack.repo.get_all_package_diffs(
|
||||
args.type, spack.repo.builtin_repo(), args.rev1, args.rev2
|
||||
)
|
||||
|
||||
if packages:
|
||||
colify(sorted(packages))
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -24,9 +25,7 @@ def setup_parser(subparser):
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
create_parser.add_argument("directory", help="directory to create the repo in")
|
||||
create_parser.add_argument(
|
||||
"namespace",
|
||||
help="namespace to identify packages in the repository (defaults to the directory name)",
|
||||
nargs="?",
|
||||
"namespace", help="name or namespace to identify packages in the repository"
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"-d",
|
||||
@@ -138,7 +137,7 @@ def repo_remove(args):
|
||||
def repo_list(args):
|
||||
"""show registered repositories and their namespaces"""
|
||||
roots = spack.config.get("repos", scope=args.scope)
|
||||
repos = []
|
||||
repos: List[spack.repo.Repo] = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.from_path(r))
|
||||
@@ -146,17 +145,14 @@ def repo_list(args):
|
||||
continue
|
||||
|
||||
if sys.stdout.isatty():
|
||||
msg = "%d package repositor" % len(repos)
|
||||
msg += "y." if len(repos) == 1 else "ies."
|
||||
tty.msg(msg)
|
||||
tty.msg(f"{len(repos)} package repositor" + ("y." if len(repos) == 1 else "ies."))
|
||||
|
||||
if not repos:
|
||||
return
|
||||
|
||||
max_ns_len = max(len(r.namespace) for r in repos)
|
||||
for repo in repos:
|
||||
fmt = "%%-%ds%%s" % (max_ns_len + 4)
|
||||
print(fmt % (repo.namespace, repo.root))
|
||||
print(f"{repo.namespace:<{max_ns_len + 4}}{repo.package_api_str:<8}{repo.root}")
|
||||
|
||||
|
||||
def repo(parser, args):
|
||||
|
||||
@@ -136,20 +136,7 @@ def solve(parser, args):
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
if unify == "when_possible":
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
@@ -166,3 +153,29 @@ def solve(parser, args):
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
elif unify:
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for spec in specs:
|
||||
tty.msg("SOLVING SPEC:", spec)
|
||||
result = solver.solve(
|
||||
[spec],
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
||||
@@ -56,10 +56,10 @@ def is_package(f):
|
||||
"""Whether flake8 should consider a file as a core file or a package.
|
||||
|
||||
We run flake8 with different exceptions for the core and for
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
packages, since we allow `from spack.package import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/") and f.endswith("package.py")
|
||||
return f.startswith("var/spack/") and f.endswith("package.py")
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -380,7 +380,7 @@ def run_black(black_cmd, file_list, args):
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
if expr.startswith(spack.repo.PKG_MODULE_PREFIX_V1) or expr == "spack.pkg":
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
|
||||
@@ -18,6 +18,10 @@ class Languages(enum.Enum):
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
"""Provides access to compiler attributes via `Package.compiler`. Useful for
|
||||
packages which do not yet access compiler properties via `self.spec[language]`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, compiled_spec: spack.spec.Spec, compilers: Dict[Languages, spack.spec.Spec]
|
||||
) -> None:
|
||||
@@ -79,6 +83,14 @@ def implicit_rpaths(self) -> List[str]:
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def opt_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.opt_flags
|
||||
|
||||
@property
|
||||
def debug_flags(self) -> List[str]:
|
||||
return next(iter(self.compilers.values())).package.debug_flags
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
|
||||
@@ -65,7 +65,7 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
if spack.repo.is_package_module(cls.__module__):
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
@@ -144,7 +144,6 @@ class Foo(Package):
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
||||
@@ -1049,7 +1049,11 @@ def add_view(name, values):
|
||||
|
||||
def _process_concrete_includes(self):
|
||||
"""Extract and load into memory included concrete spec data."""
|
||||
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
_included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
# Expand config and environment variables
|
||||
self.included_concrete_envs = [
|
||||
spack.util.path.canonicalize_path(_env) for _env in _included_concrete_envs
|
||||
]
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
@@ -2312,8 +2316,12 @@ def update_environment_repository(self) -> None:
|
||||
|
||||
def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
"""Add the root node of the spec to the environment repository"""
|
||||
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
namespace: str = spec_node.namespace
|
||||
repository = spack.repo.create_or_construct(
|
||||
root=os.path.join(self.repos_path, namespace),
|
||||
namespace=namespace,
|
||||
package_api=spack.repo.PATH.get_repo(namespace).package_api,
|
||||
)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
@@ -202,3 +202,16 @@ class MirrorError(SpackError):
|
||||
|
||||
def __init__(self, msg, long_msg=None):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class NoChecksumException(SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
@@ -65,6 +65,7 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.timer as timer
|
||||
from spack.url_buildcache import BuildcacheEntryError
|
||||
from spack.util.environment import EnvironmentModifications, dump_environment
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -449,17 +450,17 @@ def _process_binary_cache_tarball(
|
||||
else ``False``
|
||||
"""
|
||||
with timer.measure("fetch"):
|
||||
download_result = binary_distribution.download_tarball(
|
||||
tarball_stage = binary_distribution.download_tarball(
|
||||
pkg.spec.build_spec, unsigned, mirrors_for_spec
|
||||
)
|
||||
|
||||
if download_result is None:
|
||||
if tarball_stage is None:
|
||||
return False
|
||||
|
||||
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
binary_distribution.extract_tarball(pkg.spec, tarball_stage, force=False, timer=timer)
|
||||
|
||||
if pkg.spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
@@ -566,10 +567,11 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
|
||||
|
||||
# Create a destination repository
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
if not os.path.exists(dest_repo_root):
|
||||
spack.repo.create_repo(dest_repo_root)
|
||||
repo = spack.repo.from_path(dest_repo_root)
|
||||
pkg_api = spack.repo.PATH.get_repo(node.namespace).package_api
|
||||
repo_root = os.path.join(path, node.namespace) if pkg_api < (2, 0) else path
|
||||
repo = spack.repo.create_or_construct(
|
||||
repo_root, namespace=node.namespace, package_api=pkg_api
|
||||
)
|
||||
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
@@ -2176,7 +2178,7 @@ def install(self) -> None:
|
||||
)
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
except BuildcacheEntryError as exc:
|
||||
if task.cache_only:
|
||||
raise
|
||||
|
||||
|
||||
@@ -550,7 +550,6 @@ def setup_main_options(args):
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -561,6 +560,8 @@ def setup_main_options(args):
|
||||
for config_var in args.config_vars or []:
|
||||
spack.config.add(fullpath=config_var, scope="command_line")
|
||||
|
||||
spack.repo.enable_repo(spack.repo.create(spack.config.CONFIG))
|
||||
|
||||
# On Windows10 console handling for ASCI/VT100 sequences is not
|
||||
# on by default. Turn on before we try to write to console
|
||||
# with color
|
||||
|
||||
@@ -172,3 +172,5 @@ class tty:
|
||||
spack_cxx: str
|
||||
spack_f77: str
|
||||
spack_fc: str
|
||||
prefix: Prefix
|
||||
dso_suffix: str
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@@ -28,7 +27,7 @@
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.lang import ClassProperty, classproperty, memoized
|
||||
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
@@ -48,6 +47,7 @@
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.naming
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
import spack.variant
|
||||
@@ -701,10 +701,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
_verbose = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
homepage: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Default list URL (place to find available versions)
|
||||
list_url: Optional[str] = None
|
||||
list_url: ClassProperty[Optional[str]] = None
|
||||
|
||||
#: Link depth to which list_url should be searched for new versions
|
||||
list_depth = 0
|
||||
@@ -818,12 +818,12 @@ def package_dir(cls):
|
||||
|
||||
@classproperty
|
||||
def module(cls):
|
||||
"""Module object (not just the name) that this package is defined in.
|
||||
"""Module instance that this package class is defined in.
|
||||
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return importlib.import_module(cls.__module__)
|
||||
return sys.modules[cls.__module__]
|
||||
|
||||
@classproperty
|
||||
def namespace(cls):
|
||||
@@ -839,26 +839,36 @@ def fullname(cls):
|
||||
def fullnames(cls):
|
||||
"""Fullnames for this package and any packages from which it inherits."""
|
||||
fullnames = []
|
||||
for cls in cls.__mro__:
|
||||
namespace = getattr(cls, "namespace", None)
|
||||
if namespace:
|
||||
fullnames.append("%s.%s" % (namespace, cls.name))
|
||||
if namespace == "builtin":
|
||||
# builtin packages cannot inherit from other repos
|
||||
for base in cls.__mro__:
|
||||
if not spack.repo.is_package_module(base.__module__):
|
||||
break
|
||||
fullnames.append(base.fullname)
|
||||
return fullnames
|
||||
|
||||
@classproperty
|
||||
def name(cls):
|
||||
"""The name of this package.
|
||||
|
||||
The name of a package is the name of its Python module, without
|
||||
the containing module names.
|
||||
"""
|
||||
"""The name of this package."""
|
||||
if cls._name is None:
|
||||
cls._name = cls.module.__name__
|
||||
if "." in cls._name:
|
||||
cls._name = cls._name[cls._name.rindex(".") + 1 :]
|
||||
# We cannot know the exact package API version, but we can distinguish between v1
|
||||
# v2 based on the module. We don't want to figure out the exact package API version
|
||||
# since it requires parsing the repo.yaml.
|
||||
module = cls.__module__
|
||||
|
||||
if module.startswith(spack.repo.PKG_MODULE_PREFIX_V1):
|
||||
version = (1, 0)
|
||||
elif module.startswith(spack.repo.PKG_MODULE_PREFIX_V2):
|
||||
version = (2, 0)
|
||||
else:
|
||||
raise ValueError(f"Package {cls.__qualname__} is not a known Spack package")
|
||||
|
||||
if version < (2, 0):
|
||||
# spack.pkg.builtin.package_name.
|
||||
_, _, pkg_module = module.rpartition(".")
|
||||
else:
|
||||
# spack_repo.builtin.packages.package_name.package
|
||||
pkg_module = module.rsplit(".", 2)[-2]
|
||||
|
||||
cls._name = spack.util.naming.pkg_dir_to_pkg_name(pkg_module, version)
|
||||
return cls._name
|
||||
|
||||
@classproperty
|
||||
|
||||
@@ -56,8 +56,9 @@
|
||||
|
||||
# read-only things in $spack/var/spack
|
||||
repos_path = os.path.join(var_path, "repos")
|
||||
packages_path = os.path.join(repos_path, "builtin")
|
||||
mock_packages_path = os.path.join(repos_path, "builtin.mock")
|
||||
test_repos_path = os.path.join(var_path, "test_repos")
|
||||
packages_path = os.path.join(repos_path, "spack_repo", "builtin")
|
||||
mock_packages_path = os.path.join(test_repos_path, "builtin.mock")
|
||||
|
||||
#
|
||||
# Writable things in $spack/var/spack
|
||||
|
||||
@@ -47,40 +47,34 @@
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
PKG_MODULE_PREFIX_V1 = "spack.pkg."
|
||||
PKG_MODULE_PREFIX_V2 = "spack_repo."
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
|
||||
For instance:
|
||||
|
||||
python_package_for_repo('builtin') == 'spack.pkg.builtin'
|
||||
|
||||
Args:
|
||||
namespace (str): repo namespace
|
||||
"""
|
||||
return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
|
||||
def is_package_module(fullname: str) -> bool:
|
||||
"""Check if the given module is a package module."""
|
||||
return fullname.startswith(PKG_MODULE_PREFIX_V1) or fullname.startswith(PKG_MODULE_PREFIX_V2)
|
||||
|
||||
|
||||
def namespace_from_fullname(fullname):
|
||||
def namespace_from_fullname(fullname: str) -> str:
|
||||
"""Return the repository namespace only for the full module name.
|
||||
|
||||
For instance:
|
||||
|
||||
namespace_from_fullname('spack.pkg.builtin.hdf5') == 'builtin'
|
||||
namespace_from_fullname("spack.pkg.builtin.hdf5") == "builtin"
|
||||
namespace_from_fullname("spack_repo.x.y.z.packages.pkg_name.package") == "x.y.z"
|
||||
|
||||
Args:
|
||||
fullname (str): full name for the Python module
|
||||
fullname: full name for the Python module
|
||||
"""
|
||||
namespace, dot, module = fullname.rpartition(".")
|
||||
prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
|
||||
if namespace.startswith(prefix_and_dot):
|
||||
namespace = namespace[len(prefix_and_dot) :]
|
||||
return namespace
|
||||
if fullname.startswith(PKG_MODULE_PREFIX_V1):
|
||||
namespace, _, _ = fullname.rpartition(".")
|
||||
return namespace[len(PKG_MODULE_PREFIX_V1) :]
|
||||
elif fullname.startswith(PKG_MODULE_PREFIX_V2) and fullname.endswith(".package"):
|
||||
return ".".join(fullname.split(".")[1:-3])
|
||||
return fullname
|
||||
|
||||
|
||||
class SpackNamespaceLoader:
|
||||
@@ -92,34 +86,13 @@ def exec_module(self, module):
|
||||
|
||||
|
||||
class ReposFinder:
|
||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
||||
"""MetaPathFinder class that loads a Python module corresponding to an API v1 Spack package.
|
||||
|
||||
Returns a loader based on the inspection of the current repository list.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._repo_init = _path
|
||||
self._repo = None
|
||||
|
||||
@property
|
||||
def current_repository(self):
|
||||
if self._repo is None:
|
||||
self._repo = self._repo_init()
|
||||
return self._repo
|
||||
|
||||
@current_repository.setter
|
||||
def current_repository(self, value):
|
||||
self._repo = value
|
||||
|
||||
@contextlib.contextmanager
|
||||
def switch_repo(self, substitute: "RepoType"):
|
||||
"""Switch the current repository list for the duration of the context manager."""
|
||||
old = self._repo
|
||||
try:
|
||||
self._repo = substitute
|
||||
yield
|
||||
finally:
|
||||
self._repo = old
|
||||
#: The current list of repositories.
|
||||
repo_path: "RepoPath"
|
||||
|
||||
def find_spec(self, fullname, python_path, target=None):
|
||||
# "target" is not None only when calling importlib.reload()
|
||||
@@ -127,7 +100,7 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
raise RuntimeError('cannot reload module "{0}"'.format(fullname))
|
||||
|
||||
# Preferred API from https://peps.python.org/pep-0451/
|
||||
if not fullname.startswith(ROOT_PYTHON_NAMESPACE):
|
||||
if not fullname.startswith(PKG_MODULE_PREFIX_V1) and fullname != "spack.pkg":
|
||||
return None
|
||||
|
||||
loader = self.compute_loader(fullname)
|
||||
@@ -135,18 +108,16 @@ def find_spec(self, fullname, python_path, target=None):
|
||||
return None
|
||||
return importlib.util.spec_from_loader(fullname, loader)
|
||||
|
||||
def compute_loader(self, fullname):
|
||||
def compute_loader(self, fullname: str):
|
||||
# namespaces are added to repo, and package modules are leaves.
|
||||
namespace, dot, module_name = fullname.rpartition(".")
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
|
||||
is_repo_path = isinstance(self.current_repository, RepoPath)
|
||||
if is_repo_path:
|
||||
repos = self.current_repository.repos
|
||||
else:
|
||||
repos = [self.current_repository]
|
||||
|
||||
for repo in repos:
|
||||
if not hasattr(self, "repo_path"):
|
||||
return None
|
||||
|
||||
for repo in self.repo_path.repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
if namespace == repo.full_namespace:
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
@@ -161,7 +132,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
||||
if self.repo_path.by_namespace.is_prefix(fullname[len(PKG_MODULE_PREFIX_V1) :]):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -179,12 +150,12 @@ def compute_loader(self, fullname):
|
||||
NOT_PROVIDED = object()
|
||||
|
||||
|
||||
def packages_path():
|
||||
def builtin_repo() -> "Repo":
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return PATH.get_repo("builtin.mock").packages_path
|
||||
return PATH.get_repo("builtin.mock")
|
||||
except UnknownNamespaceError:
|
||||
return PATH.get_repo("builtin").packages_path
|
||||
return PATH.get_repo("builtin")
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -192,24 +163,25 @@ class GitExe:
|
||||
# invocations.
|
||||
#
|
||||
# Not using -C as that is not supported for git < 1.8.5.
|
||||
def __init__(self):
|
||||
def __init__(self, packages_path: str):
|
||||
self._git_cmd = spack.util.git.git(required=True)
|
||||
self.packages_dir = packages_path
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
with working_dir(packages_path()):
|
||||
return self._git_cmd(*args, **kwargs)
|
||||
def __call__(self, *args, **kwargs) -> str:
|
||||
with working_dir(self.packages_dir):
|
||||
return self._git_cmd(*args, **kwargs, output=str)
|
||||
|
||||
|
||||
def list_packages(rev):
|
||||
def list_packages(rev: str, repo: "Repo") -> List[str]:
|
||||
"""List all packages associated with the given revision"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
# git ls-tree does not support ... merge-base syntax, so do it manually
|
||||
if rev.endswith("..."):
|
||||
ref = rev.replace("...", "")
|
||||
rev = git("merge-base", ref, "HEAD", output=str).strip()
|
||||
rev = git("merge-base", ref, "HEAD").strip()
|
||||
|
||||
output = git("ls-tree", "-r", "--name-only", rev, output=str)
|
||||
output = git("ls-tree", "-r", "--name-only", rev)
|
||||
|
||||
# recursively list the packages directory
|
||||
package_paths = [
|
||||
@@ -217,54 +189,56 @@ def list_packages(rev):
|
||||
]
|
||||
|
||||
# take the directory names with one-level-deep package files
|
||||
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
|
||||
package_names = [
|
||||
nm.pkg_dir_to_pkg_name(line[0], repo.package_api)
|
||||
for line in package_paths
|
||||
if len(line) == 2
|
||||
]
|
||||
|
||||
return package_names
|
||||
return sorted(set(package_names))
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
def diff_packages(rev1: str, rev2: str, repo: "Repo") -> Tuple[Set[str], Set[str]]:
|
||||
"""Compute packages lists for the two revisions and return a tuple
|
||||
containing all the packages in rev1 but not in rev2 and all the
|
||||
packages in rev2 but not in rev1."""
|
||||
p1 = set(list_packages(rev1))
|
||||
p2 = set(list_packages(rev2))
|
||||
p1 = set(list_packages(rev1, repo))
|
||||
p2 = set(list_packages(rev2, repo))
|
||||
return p1.difference(p2), p2.difference(p1)
|
||||
|
||||
|
||||
def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
"""Show packages changed, added, or removed (or any combination of those)
|
||||
since a commit.
|
||||
def get_all_package_diffs(type: str, repo: "Repo", rev1="HEAD^1", rev2="HEAD") -> Set[str]:
|
||||
"""Get packages changed, added, or removed (or any combination of those) since a commit.
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str): String containing one or more of 'A', 'R', 'C'
|
||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||
|
||||
Returns:
|
||||
|
||||
A set contain names of affected packages.
|
||||
type: String containing one or more of 'A', 'R', 'C'
|
||||
rev1: Revision to compare against, default is 'HEAD^'
|
||||
rev2: Revision to compare to rev1, default is 'HEAD'
|
||||
"""
|
||||
lower_type = type.lower()
|
||||
if not re.match("^[arc]*$", lower_type):
|
||||
tty.die(
|
||||
"Invald change type: '%s'." % type,
|
||||
"Can contain only A (added), R (removed), or C (changed)",
|
||||
f"Invalid change type: '{type}'. "
|
||||
"Can contain only A (added), R (removed), or C (changed)"
|
||||
)
|
||||
|
||||
removed, added = diff_packages(rev1, rev2)
|
||||
removed, added = diff_packages(rev1, rev2, repo)
|
||||
|
||||
git = GitExe()
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
|
||||
git = GitExe(repo.packages_path)
|
||||
out = git("diff", "--relative", "--name-only", rev1, rev2).strip()
|
||||
|
||||
lines = [] if not out else re.split(r"\s+", out)
|
||||
changed = set()
|
||||
changed: Set[str] = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition("/")
|
||||
dir_name, _, _ = path.partition("/")
|
||||
if not nm.valid_module_name(dir_name, repo.package_api):
|
||||
continue
|
||||
pkg_name = nm.pkg_dir_to_pkg_name(dir_name, repo.package_api)
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
packages = set()
|
||||
packages: Set[str] = set()
|
||||
if "a" in lower_type:
|
||||
packages |= added
|
||||
if "r" in lower_type:
|
||||
@@ -275,14 +249,14 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
return packages
|
||||
|
||||
|
||||
def add_package_to_git_stage(packages):
|
||||
def add_package_to_git_stage(packages: List[str], repo: "Repo") -> None:
|
||||
"""add a package to the git stage with `git add`"""
|
||||
git = GitExe()
|
||||
git = GitExe(repo.packages_path)
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
tty.die(f"No such package: {pkg_name}. Path does not exist:", filename)
|
||||
|
||||
git("add", filename)
|
||||
|
||||
@@ -352,9 +326,10 @@ class FastPackageChecker(collections.abc.Mapping):
|
||||
#: Global cache, reused by every instance
|
||||
_paths_cache: Dict[str, Dict[str, os.stat_result]] = {}
|
||||
|
||||
def __init__(self, packages_path):
|
||||
def __init__(self, packages_path: str, package_api: Tuple[int, int]):
|
||||
# The path of the repository managed by this instance
|
||||
self.packages_path = packages_path
|
||||
self.package_api = package_api
|
||||
|
||||
# If the cache we need is not there yet, then build it appropriately
|
||||
if packages_path not in self._paths_cache:
|
||||
@@ -379,41 +354,38 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
# Create a dictionary that will store the mapping between a
|
||||
# package name and its stat info
|
||||
cache: Dict[str, os.stat_result] = {}
|
||||
for pkg_name in os.listdir(self.packages_path):
|
||||
# Skip non-directories in the package root.
|
||||
pkg_dir = os.path.join(self.packages_path, pkg_name)
|
||||
with os.scandir(self.packages_path) as entries:
|
||||
for entry in entries:
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(entry.path, package_file_name)
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno in (errno.ENOENT, errno.ENOTDIR):
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn(f"Can't read package file {pkg_file}.")
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if not stat.S_ISREG(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# Only consider package.py files in directories that are valid module names under
|
||||
# the current package API
|
||||
if not nm.valid_module_name(entry.name, self.package_api):
|
||||
x, y = self.package_api
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
f"Package {pkg_file} cannot be used because `{entry.name}` is not a valid "
|
||||
f"Spack package module name for Package API v{x}.{y}."
|
||||
)
|
||||
continue
|
||||
|
||||
# Construct the file name from the directory
|
||||
pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
|
||||
|
||||
# Use stat here to avoid lots of calls to the filesystem.
|
||||
try:
|
||||
sinfo = os.stat(pkg_file)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
# No package.py file here.
|
||||
continue
|
||||
elif e.errno == errno.EACCES:
|
||||
tty.warn("Can't read package file %s." % pkg_file)
|
||||
continue
|
||||
raise e
|
||||
|
||||
# If it's not a file, skip it.
|
||||
if stat.S_ISDIR(sinfo.st_mode):
|
||||
continue
|
||||
|
||||
# If it is a file, then save the stats under the
|
||||
# appropriate key
|
||||
cache[pkg_name] = sinfo
|
||||
# Store the stat info by package name.
|
||||
cache[nm.pkg_dir_to_pkg_name(entry.name, self.package_api)] = sinfo
|
||||
|
||||
return cache
|
||||
|
||||
@@ -666,7 +638,6 @@ def __init__(
|
||||
if isinstance(repo, str):
|
||||
assert cache is not None, "cache must hold a value, when repo is a string"
|
||||
repo = Repo(repo, cache=cache, overrides=overrides)
|
||||
repo.finder(self)
|
||||
self.put_last(repo)
|
||||
except RepoError as e:
|
||||
tty.warn(
|
||||
@@ -676,6 +647,20 @@ def __init__(
|
||||
f" spack repo rm {repo}",
|
||||
)
|
||||
|
||||
def enable(self) -> None:
|
||||
"""Set the relevant search paths for package module loading"""
|
||||
REPOS_FINDER.repo_path = self
|
||||
for p in reversed(self.python_paths()):
|
||||
if p not in sys.path:
|
||||
sys.path.insert(0, p)
|
||||
|
||||
def disable(self) -> None:
|
||||
"""Disable the search paths for package module loading"""
|
||||
del REPOS_FINDER.repo_path
|
||||
for p in self.python_paths():
|
||||
if p in sys.path:
|
||||
sys.path.remove(p)
|
||||
|
||||
def ensure_unwrapped(self) -> "RepoPath":
|
||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||
return self
|
||||
@@ -688,7 +673,7 @@ def put_first(self, repo: "Repo") -> None:
|
||||
return
|
||||
|
||||
self.repos.insert(0, repo)
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def put_last(self, repo):
|
||||
"""Add repo last in the search path."""
|
||||
@@ -700,8 +685,8 @@ def put_last(self, repo):
|
||||
self.repos.append(repo)
|
||||
|
||||
# don't mask any higher-precedence repos with same namespace
|
||||
if repo.full_namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.full_namespace] = repo
|
||||
if repo.namespace not in self.by_namespace:
|
||||
self.by_namespace[repo.namespace] = repo
|
||||
|
||||
def remove(self, repo):
|
||||
"""Remove a repo from the search path."""
|
||||
@@ -710,10 +695,9 @@ def remove(self, repo):
|
||||
|
||||
def get_repo(self, namespace: str) -> "Repo":
|
||||
"""Get a repository by namespace."""
|
||||
full_namespace = python_package_for_repo(namespace)
|
||||
if full_namespace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace)
|
||||
return self.by_namespace[full_namespace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
def first_repo(self) -> Optional["Repo"]:
|
||||
"""Get the first repo in precedence order."""
|
||||
@@ -821,10 +805,9 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
|
||||
# If the spec already has a namespace, then return the
|
||||
# corresponding repo if we know about it.
|
||||
if namespace:
|
||||
fullspace = python_package_for_repo(namespace)
|
||||
if fullspace not in self.by_namespace:
|
||||
if namespace not in self.by_namespace:
|
||||
raise UnknownNamespaceError(namespace, name=name)
|
||||
return self.by_namespace[fullspace]
|
||||
return self.by_namespace[namespace]
|
||||
|
||||
# If there's no namespace, search in the RepoPath.
|
||||
for repo in self.repos:
|
||||
@@ -845,6 +828,10 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||
return self.repo_for_pkg(spec).get(spec)
|
||||
|
||||
def python_paths(self) -> List[str]:
|
||||
"""Return a list of all the Python paths in the repos."""
|
||||
return [repo.python_path for repo in self.repos if repo.python_path]
|
||||
|
||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
||||
"""Find a class for the spec's package and return the class object."""
|
||||
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
|
||||
@@ -942,6 +929,30 @@ def _parse_package_api_version(
|
||||
)
|
||||
|
||||
|
||||
def _validate_and_normalize_subdir(subdir: Any, root: str, package_api: Tuple[int, int]) -> str:
|
||||
if not isinstance(subdir, str):
|
||||
raise BadRepoError(f"Invalid subdirectory '{subdir}' in '{root}'. Must be a string")
|
||||
|
||||
if package_api < (2, 0):
|
||||
return subdir # In v1.x we did not validate subdir names
|
||||
|
||||
if subdir in (".", ""):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Use a symlink packages -> . instead"
|
||||
)
|
||||
|
||||
# Otherwise we expect a directory name (not path) that can be used as a Python module.
|
||||
if os.sep in subdir:
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Expected a directory name, not a path"
|
||||
)
|
||||
if not nm.valid_module_name(subdir, package_api):
|
||||
raise BadRepoError(
|
||||
f"Invalid subdirectory '{subdir}' in '{root}'. Must be a valid Python module name"
|
||||
)
|
||||
return subdir
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
@@ -962,6 +973,8 @@ class Repo:
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
namespace: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
@@ -991,38 +1004,82 @@ def check(condition, msg):
|
||||
|
||||
# Read configuration and validate namespace
|
||||
config = self._read_config()
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
self.subdirectory = _validate_and_normalize_subdir(
|
||||
config.get("subdirectory", packages_dir_name), root, self.package_api
|
||||
)
|
||||
self.packages_path = os.path.join(self.root, self.subdirectory)
|
||||
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
os.path.isdir(self.packages_path),
|
||||
f"No directory '{self.subdirectory}' found in '{root}'",
|
||||
)
|
||||
|
||||
self.namespace: str = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
# The parent dir of spack_repo/ which should be added to sys.path for api v2.x
|
||||
self.python_path: Optional[str] = None
|
||||
|
||||
if self.package_api < (2, 0):
|
||||
check(
|
||||
"namespace" in config,
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
self.namespace = config["namespace"]
|
||||
# Note: for Package API v1.x the namespace validation always had bugs, which won't be
|
||||
# fixed for compatibility reasons. The regex is missing "$" at the end, and it claims
|
||||
# to test for valid identifiers, but fails to split on `.` first.
|
||||
check(
|
||||
isinstance(self.namespace, str)
|
||||
and re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
else:
|
||||
# From Package API v2.0 the namespace follows from the directory structure.
|
||||
check(
|
||||
f"{os.sep}spack_repo{os.sep}" in self.root,
|
||||
f"Invalid repository path '{self.root}'. "
|
||||
f"Path must contain 'spack_repo{os.sep}'",
|
||||
)
|
||||
derived_namespace = self.root.rpartition(f"spack_repo{os.sep}")[2].replace(os.sep, ".")
|
||||
if "namespace" in config:
|
||||
self.namespace = config["namespace"]
|
||||
|
||||
check(
|
||||
isinstance(self.namespace, str) and self.namespace == derived_namespace,
|
||||
f"Namespace '{self.namespace}' should be {derived_namespace} or omitted in "
|
||||
f"{os.path.join(root, repo_config_name)}",
|
||||
)
|
||||
else:
|
||||
self.namespace = derived_namespace
|
||||
|
||||
# strip the namespace directories from the root path to get the python path
|
||||
# e.g. /my/pythonpath/spack_repo/x/y/z -> /my/pythonpath
|
||||
python_path = self.root
|
||||
for _ in self.namespace.split("."):
|
||||
python_path = os.path.dirname(python_path)
|
||||
self.python_path = os.path.dirname(python_path)
|
||||
|
||||
# check that all subdirectories are valid module names
|
||||
check(
|
||||
all(nm.valid_module_name(x, self.package_api) for x in self.namespace.split(".")),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'",
|
||||
)
|
||||
|
||||
# Set up 'full_namespace' to include the super-namespace
|
||||
self.full_namespace = python_package_for_repo(self.namespace)
|
||||
if self.package_api < (2, 0):
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V1}{self.namespace}"
|
||||
elif self.subdirectory == ".":
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}"
|
||||
else:
|
||||
self.full_namespace = f"{PKG_MODULE_PREFIX_V2}{self.namespace}.{self.subdirectory}"
|
||||
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
# Optional reference to a RepoPath to influence module import from spack.pkg
|
||||
self._finder: Optional[RepoPath] = None
|
||||
|
||||
# Maps that goes from package name to corresponding file stat
|
||||
self._fast_package_checker: Optional[FastPackageChecker] = None
|
||||
|
||||
@@ -1030,27 +1087,33 @@ def check(condition, msg):
|
||||
self._repo_index: Optional[RepoIndex] = None
|
||||
self._cache = cache
|
||||
|
||||
def finder(self, value: RepoPath) -> None:
|
||||
self._finder = value
|
||||
@property
|
||||
def package_api_str(self) -> str:
|
||||
return f"v{self.package_api[0]}.{self.package_api[1]}"
|
||||
|
||||
def real_name(self, import_name: str) -> Optional[str]:
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
|
||||
A python identifier might map to many different Spack package
|
||||
names due to hyphen/underscore ambiguity.
|
||||
In Package API v1.x, there was no canonical module name for a package, and package's dir
|
||||
was not necessarily a valid Python module name. For that case we have to guess the actual
|
||||
package directory. From Package API v2.0 there is a one-to-one mapping between Spack
|
||||
package names and Python module names, so there is no guessing.
|
||||
|
||||
Easy example:
|
||||
num3proxy -> 3proxy
|
||||
|
||||
Ambiguous:
|
||||
For Packge API v1.x we support the following one-to-many mappings:
|
||||
num3proxy -> 3proxy
|
||||
foo_bar -> foo_bar, foo-bar
|
||||
|
||||
More ambiguous:
|
||||
foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
|
||||
"""
|
||||
if self.package_api >= (2, 0):
|
||||
if nm.pkg_dir_to_pkg_name(import_name, package_api=self.package_api) in self:
|
||||
return import_name
|
||||
return None
|
||||
|
||||
if import_name in self:
|
||||
return import_name
|
||||
|
||||
# For v1 generate the possible package names from a module name, and return the first
|
||||
# package name that exists in this repo.
|
||||
options = nm.possible_spack_module_names(import_name)
|
||||
try:
|
||||
options.remove(import_name)
|
||||
@@ -1183,7 +1246,9 @@ def extensions_for(
|
||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Given a package name, get the directory containing its package.py file."""
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(unqualified_name, self.package_api)
|
||||
)
|
||||
|
||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
"""Get the filename for the module we should load for a particular
|
||||
@@ -1200,7 +1265,7 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
||||
@property
|
||||
def _pkg_checker(self) -> FastPackageChecker:
|
||||
if self._fast_package_checker is None:
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||
self._fast_package_checker = FastPackageChecker(self.packages_path, self.package_api)
|
||||
return self._fast_package_checker
|
||||
|
||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
@@ -1212,7 +1277,9 @@ def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
||||
|
||||
def package_path(self, name: str) -> str:
|
||||
"""Get path to package.py file for this repo."""
|
||||
return os.path.join(self.packages_path, name, package_file_name)
|
||||
return os.path.join(
|
||||
self.packages_path, nm.pkg_name_to_pkg_dir(name, self.package_api), package_file_name
|
||||
)
|
||||
|
||||
def all_package_paths(self) -> Generator[str, None, None]:
|
||||
for name in self.all_package_names():
|
||||
@@ -1270,15 +1337,17 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
||||
package. Then extracts the package class from the module
|
||||
according to Spack's naming convention.
|
||||
"""
|
||||
namespace, pkg_name = self.partition_package_name(pkg_name)
|
||||
class_name = nm.mod_to_class(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
_, pkg_name = self.partition_package_name(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{nm.pkg_name_to_pkg_dir(pkg_name, self.package_api)}"
|
||||
if self.package_api >= (2, 0):
|
||||
fullname += ".package"
|
||||
|
||||
class_name = nm.pkg_name_to_class_name(pkg_name)
|
||||
|
||||
try:
|
||||
with REPOS_FINDER.switch_repo(self._finder or self):
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
raise UnknownPackageError(fullname)
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError as e:
|
||||
raise UnknownPackageError(fullname) from e
|
||||
except Exception as e:
|
||||
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
|
||||
raise RepoError(msg) from e
|
||||
@@ -1369,46 +1438,71 @@ def partition_package_name(pkg_name: str) -> Tuple[str, str]:
|
||||
return namespace, pkg_name
|
||||
|
||||
|
||||
def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
def get_repo_yaml_dir(
|
||||
root: str, namespace: Optional[str], package_api: Tuple[int, int]
|
||||
) -> Tuple[str, str]:
|
||||
"""Returns the directory where repo.yaml is located and the effective namespace."""
|
||||
if package_api < (2, 0):
|
||||
namespace = namespace or os.path.basename(root)
|
||||
# This ad-hoc regex is left for historical reasons, and should not have a breaking change.
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace.")
|
||||
return root, namespace
|
||||
|
||||
# Package API v2 has <root>/spack_repo/<namespace>/<subdir> structure and requires a namespace
|
||||
if namespace is None:
|
||||
raise InvalidNamespaceError("Namespace must be provided.")
|
||||
|
||||
# if namespace has dots those translate to subdirs of further namespace packages.
|
||||
namespace_components = namespace.split(".")
|
||||
|
||||
if not all(nm.valid_module_name(n, package_api=package_api) for n in namespace_components):
|
||||
raise InvalidNamespaceError(f"'{namespace}' is not a valid namespace." % namespace)
|
||||
|
||||
return os.path.join(root, "spack_repo", *namespace_components), namespace
|
||||
|
||||
|
||||
def create_repo(
|
||||
root,
|
||||
namespace: Optional[str] = None,
|
||||
subdir: str = packages_dir_name,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[str, str]:
|
||||
"""Create a new repository in root with the specified namespace.
|
||||
|
||||
If the namespace is not provided, use basename of root.
|
||||
Return the canonicalized path and namespace of the created repository.
|
||||
"""
|
||||
root = spack.util.path.canonicalize_path(root)
|
||||
if not namespace:
|
||||
namespace = os.path.basename(root)
|
||||
repo_yaml_dir, namespace = get_repo_yaml_dir(os.path.abspath(root), namespace, package_api)
|
||||
|
||||
if not re.match(r"\w[\.\w-]*", namespace):
|
||||
raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
|
||||
existed = True
|
||||
try:
|
||||
dir_entry = next(os.scandir(repo_yaml_dir), None)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
existed = False
|
||||
dir_entry = None
|
||||
else:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: {e}")
|
||||
|
||||
existed = False
|
||||
if os.path.exists(root):
|
||||
if os.path.isfile(root):
|
||||
raise BadRepoError("File %s already exists and is not a directory" % root)
|
||||
elif os.path.isdir(root):
|
||||
if not os.access(root, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
|
||||
if os.listdir(root):
|
||||
raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
|
||||
existed = True
|
||||
if dir_entry is not None:
|
||||
raise BadRepoError(f"Cannot create new repo in {root}: directory is not empty.")
|
||||
|
||||
full_path = os.path.realpath(root)
|
||||
parent = os.path.dirname(full_path)
|
||||
if not os.access(parent, os.R_OK | os.W_OK):
|
||||
raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
|
||||
config_path = os.path.join(repo_yaml_dir, repo_config_name)
|
||||
|
||||
subdir = _validate_and_normalize_subdir(subdir, root, package_api)
|
||||
|
||||
packages_path = os.path.join(repo_yaml_dir, subdir)
|
||||
|
||||
try:
|
||||
config_path = os.path.join(root, repo_config_name)
|
||||
packages_path = os.path.join(root, subdir)
|
||||
|
||||
fs.mkdirp(packages_path)
|
||||
with open(config_path, "w", encoding="utf-8") as config:
|
||||
config.write("repo:\n")
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
x, y = spack.package_api_version
|
||||
x, y = package_api
|
||||
config.write(f" api: v{x}.{y}\n")
|
||||
|
||||
except OSError as e:
|
||||
@@ -1421,28 +1515,27 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
|
||||
raise BadRepoError(
|
||||
"Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
|
||||
)
|
||||
) from e
|
||||
|
||||
return full_path, namespace
|
||||
return repo_yaml_dir, namespace
|
||||
|
||||
|
||||
def from_path(path: str) -> "Repo":
|
||||
def from_path(path: str) -> Repo:
|
||||
"""Returns a repository from the path passed as input. Injects the global misc cache."""
|
||||
return Repo(path, cache=spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
def create_or_construct(path, namespace=None):
|
||||
def create_or_construct(
|
||||
root: str,
|
||||
namespace: Optional[str] = None,
|
||||
package_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Repo:
|
||||
"""Create a repository, or just return a Repo if it already exists."""
|
||||
if not os.path.exists(path):
|
||||
fs.mkdirp(path)
|
||||
create_repo(path, namespace)
|
||||
return from_path(path)
|
||||
|
||||
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
return create(configuration=configuration)
|
||||
repo_yaml_dir, _ = get_repo_yaml_dir(root, namespace, package_api)
|
||||
if not os.path.exists(repo_yaml_dir):
|
||||
fs.mkdirp(root)
|
||||
create_repo(root, namespace=namespace, package_api=package_api)
|
||||
return from_path(repo_yaml_dir)
|
||||
|
||||
|
||||
def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
@@ -1467,8 +1560,10 @@ def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
return RepoPath(*repo_dirs, cache=spack.caches.MISC_CACHE, overrides=overrides)
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
PATH: RepoPath = llnl.util.lang.Singleton(_path) # type: ignore
|
||||
#: Global package repository instance.
|
||||
PATH: RepoPath = llnl.util.lang.Singleton(
|
||||
lambda: create(configuration=spack.config.CONFIG)
|
||||
) # type: ignore[assignment]
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
@@ -1494,28 +1589,37 @@ def use_repositories(
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global PATH
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
scope_name = f"use-repo-{uuid.uuid4()}"
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||
old_repo, new_repo = PATH, create(configuration=spack.config.CONFIG)
|
||||
old_repo.disable()
|
||||
enable_repo(new_repo)
|
||||
try:
|
||||
with REPOS_FINDER.switch_repo(PATH): # type: ignore
|
||||
yield PATH
|
||||
yield new_repo
|
||||
finally:
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
PATH = saved
|
||||
enable_repo(old_repo)
|
||||
|
||||
|
||||
def enable_repo(repo_path: RepoPath) -> None:
|
||||
"""Set the global package repository and make them available in module search paths."""
|
||||
global PATH
|
||||
PATH = repo_path
|
||||
PATH.enable()
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
"""Build a mock repository in a directory"""
|
||||
|
||||
def __init__(self, root_directory, namespace=None):
|
||||
namespace = namespace or "".join(random.choice(string.ascii_uppercase) for _ in range(10))
|
||||
self.root, self.namespace = create_repo(str(root_directory), namespace)
|
||||
namespace = namespace or "".join(random.choice(string.ascii_lowercase) for _ in range(10))
|
||||
repo_root = os.path.join(root_directory, namespace)
|
||||
os.mkdir(repo_root)
|
||||
self.root, self.namespace = create_repo(repo_root, namespace)
|
||||
|
||||
def add_package(self, name, dependencies=None):
|
||||
"""Create a mock package in the repository, using a Jinja2 template.
|
||||
@@ -1527,7 +1631,7 @@ def add_package(self, name, dependencies=None):
|
||||
``spack.dependency.default_deptype`` and ``spack.spec.Spec()`` are used.
|
||||
"""
|
||||
dependencies = dependencies or []
|
||||
context = {"cls_name": nm.mod_to_class(name), "dependencies": dependencies}
|
||||
context = {"cls_name": nm.pkg_name_to_class_name(name), "dependencies": dependencies}
|
||||
template = spack.tengine.make_environment().get_template("mock-repository/package.pyt")
|
||||
text = template.render(context)
|
||||
package_py = self.recipe_filename(name)
|
||||
@@ -1539,8 +1643,10 @@ def remove(self, name):
|
||||
package_py = self.recipe_filename(name)
|
||||
shutil.rmtree(os.path.dirname(package_py))
|
||||
|
||||
def recipe_filename(self, name):
|
||||
return os.path.join(self.root, "packages", name, "package.py")
|
||||
def recipe_filename(self, name: str):
|
||||
return os.path.join(
|
||||
self.root, "packages", nm.pkg_name_to_pkg_dir(name, package_api=(2, 0)), "package.py"
|
||||
)
|
||||
|
||||
|
||||
class RepoError(spack.error.SpackError):
|
||||
@@ -1590,7 +1696,10 @@ def __init__(self, name, repo=None):
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
try:
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
except Exception:
|
||||
similar = []
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg += "\n\nDid you mean one of the following packages?\n "
|
||||
|
||||
@@ -19,10 +19,6 @@
|
||||
"additionalProperties": True,
|
||||
"items": spack.schema.spec.properties,
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
}
|
||||
|
||||
@@ -30,6 +26,6 @@
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack buildcache specfile schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"additionalProperties": True,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
45
lib/spack/spack/schema/url_buildcache_manifest.py
Normal file
45
lib/spack/spack/schema/url_buildcache_manifest.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for buildcache entry manifest file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/url_buildcache_manifest.py
|
||||
:lines: 11-
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"version": {"type": "integer"},
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"contentLength",
|
||||
"mediaType",
|
||||
"compression",
|
||||
"checksumAlgorithm",
|
||||
"checksum",
|
||||
],
|
||||
"properties": {
|
||||
"contentLength": {"type": "integer"},
|
||||
"mediaType": {"type": "string"},
|
||||
"compression": {"type": "string"},
|
||||
"checksumAlgorithm": {"type": "string"},
|
||||
"checksum": {"type": "string"},
|
||||
},
|
||||
"additionalProperties": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Buildcache manifest schema",
|
||||
"type": "object",
|
||||
"required": ["version", "data"],
|
||||
"additionalProperties": True,
|
||||
"properties": properties,
|
||||
}
|
||||
@@ -128,8 +128,6 @@ class Provenance(enum.IntEnum):
|
||||
SPEC = enum.auto()
|
||||
# A dev spec literal
|
||||
DEV_SPEC = enum.auto()
|
||||
# An external spec declaration
|
||||
EXTERNAL = enum.auto()
|
||||
# The 'packages' section of the configuration
|
||||
PACKAGES_YAML = enum.auto()
|
||||
# A package requirement
|
||||
@@ -138,6 +136,8 @@ class Provenance(enum.IntEnum):
|
||||
PACKAGE_PY = enum.auto()
|
||||
# An installed spec
|
||||
INSTALLED = enum.auto()
|
||||
# An external spec declaration
|
||||
EXTERNAL = enum.auto()
|
||||
# lower provenance for installed git refs so concretizer prefers StandardVersion installs
|
||||
INSTALLED_GIT_VERSION = enum.auto()
|
||||
# A runtime injected from another package (e.g. a compiler)
|
||||
@@ -2492,7 +2492,7 @@ def _spec_clauses(
|
||||
# TODO: variant="*" means 'variant is defined to something', which used to
|
||||
# be meaningless in concretization, as all variants had to be defined. But
|
||||
# now that variants can be conditional, it should force a variant to exist.
|
||||
if variant.value == ("*",):
|
||||
if not variant.values:
|
||||
continue
|
||||
|
||||
for value in variant.values:
|
||||
@@ -2512,7 +2512,22 @@ def _spec_clauses(
|
||||
if self.pkg_class(spec.name).has_variant(vname):
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
else:
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
variant_clause = f.variant_value(spec.name, vname, value)
|
||||
if (
|
||||
variant.concrete
|
||||
and variant.type == vt.VariantType.MULTI
|
||||
and not spec.concrete
|
||||
):
|
||||
if body is False:
|
||||
variant_clause.args = (
|
||||
f"concrete_{variant_clause.args[0]}",
|
||||
*variant_clause.args[1:],
|
||||
)
|
||||
else:
|
||||
clauses.append(
|
||||
fn.attr("concrete_variant_request", spec.name, vname, value)
|
||||
)
|
||||
clauses.append(variant_clause)
|
||||
|
||||
# compiler flags
|
||||
source = context.source if context else "none"
|
||||
@@ -3862,6 +3877,17 @@ def external_spec_selected(self, node, idx):
|
||||
)
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# Annotate compiler specs from externals
|
||||
external_spec = spack.spec.Spec(spec_info["spec"])
|
||||
external_spec_deps = external_spec.dependencies()
|
||||
if len(external_spec_deps) > 1:
|
||||
raise InvalidExternalError(
|
||||
f"external spec {spec_info['spec']} cannot have more than one dependency"
|
||||
)
|
||||
elif len(external_spec_deps) == 1:
|
||||
compiler_str = external_spec_deps[0]
|
||||
self._specs[node].annotations.with_compiler(spack.spec.Spec(compiler_str))
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
@@ -4765,3 +4791,7 @@ class InvalidSpliceError(spack.error.SpackError):
|
||||
|
||||
class NoCompilerFoundError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
||||
|
||||
class InvalidExternalError(spack.error.SpackError):
|
||||
"""Raised when there is no possible compiler"""
|
||||
|
||||
@@ -159,10 +159,12 @@ unification_set(SetID, VirtualNode)
|
||||
|
||||
% TODO: literals, at the moment, can only influence the "root" unification set. This needs to be extended later.
|
||||
|
||||
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
|
||||
multiple_nodes_attribute("depends_on").
|
||||
multiple_nodes_attribute("virtual_on_edge").
|
||||
multiple_nodes_attribute("provider_set").
|
||||
% Node attributes that need custom rules in ASP, e.g. because they involve multiple nodes
|
||||
node_attributes_with_custom_rules("depends_on").
|
||||
node_attributes_with_custom_rules("virtual_on_edge").
|
||||
node_attributes_with_custom_rules("provider_set").
|
||||
node_attributes_with_custom_rules("concrete_variant_set").
|
||||
node_attributes_with_custom_rules("concrete_variant_request").
|
||||
|
||||
trigger_condition_holds(TriggerID, node(min_dupe_id, Package)) :-
|
||||
solve_literal(TriggerID),
|
||||
@@ -184,6 +186,7 @@ literal_node(Root, node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root)
|
||||
1 { build_dependency_of_literal_node(LiteralNode, node(0..Y-1, BuildDependency)) : max_dupes(BuildDependency, Y) } 1 :-
|
||||
literal_node(Root, LiteralNode),
|
||||
build(LiteralNode),
|
||||
not external(LiteralNode),
|
||||
attr("build_requirement", LiteralNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
condition_set(node(min_dupe_id, Root), LiteralNode) :- literal_node(Root, LiteralNode).
|
||||
@@ -396,12 +399,26 @@ trigger_condition_holds(ID, RequestorNode) :-
|
||||
trigger_node(ID, PackageNode, RequestorNode);
|
||||
attr(Name, node(X, A1)) : condition_requirement(ID, Name, A1), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
attr(Name, node(X, A1), A2) : condition_requirement(ID, Name, A1, A2), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
|
||||
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not node_attributes_with_custom_rules(Name);
|
||||
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
% Special cases
|
||||
attr("depends_on", node(X, A1), node(Y, A2), A3) : condition_requirement(ID, "depends_on", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A2));
|
||||
not cannot_hold(ID, PackageNode).
|
||||
|
||||
condition_with_concrete_variant(ID, Package, Variant) :- condition_requirement(ID, "concrete_variant_request", Package, Variant, _).
|
||||
|
||||
cannot_hold(ID, PackageNode) :-
|
||||
not attr("variant_value", node(X, A1), Variant, Value),
|
||||
condition_with_concrete_variant(ID, A1, Variant),
|
||||
condition_requirement(ID, "concrete_variant_request", A1, Variant, Value),
|
||||
condition_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
cannot_hold(ID, PackageNode) :-
|
||||
attr("variant_value", node(X, A1), Variant, Value),
|
||||
condition_with_concrete_variant(ID, A1, Variant),
|
||||
not condition_requirement(ID, "concrete_variant_request", A1, Variant, Value),
|
||||
condition_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
condition_holds(ConditionID, node(X, Package))
|
||||
:- pkg_fact(Package, condition_trigger(ConditionID, TriggerID)),
|
||||
trigger_condition_holds(TriggerID, node(X, Package)).
|
||||
@@ -448,8 +465,8 @@ imposed_nodes(ConditionID, PackageNode, node(X, A1))
|
||||
|
||||
% Conditions that hold impose may impose constraints on other specs
|
||||
attr(Name, node(X, A1)) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)), not node_attributes_with_custom_rules(Name).
|
||||
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not node_attributes_with_custom_rules(Name).
|
||||
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
|
||||
@@ -470,6 +487,15 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
||||
imposed_constraint(ID, "depends_on", A1, A2, A3),
|
||||
internal_error("Build deps must land in exactly one duplicate").
|
||||
|
||||
% For := we must keep track of the origin of the fact, since we need to check
|
||||
% each condition separately, i.e. foo:=a,b in one place and foo:=c in another
|
||||
% should not make foo:=a,b,c possible
|
||||
attr("concrete_variant_set", node(X, A1), Variant, Value, ID)
|
||||
:- impose(ID, PackageNode),
|
||||
imposed_nodes(ID, PackageNode, node(X, A1)),
|
||||
imposed_constraint(ID, "concrete_variant_set", A1, Variant, Value).
|
||||
|
||||
|
||||
% The rule below accounts for expressions like:
|
||||
%
|
||||
% root ^dep %compiler
|
||||
@@ -490,6 +516,7 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
||||
build(node(X, Parent)),
|
||||
not external(node(X, Parent)).
|
||||
|
||||
% Concrete nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
concrete(ParentNode),
|
||||
not attr("concrete_build_dependency", ParentNode, BuildDependency, _).
|
||||
@@ -503,6 +530,23 @@ provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, Virtua
|
||||
attr("virtual_on_build_edge", ParentNode, BuildDependency, Virtual),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(Constraint, Version)) : hash_attr(BuildDependencyHash, "version", BuildDependency, Version) } 1.
|
||||
|
||||
% External nodes
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node", BuildDependency)),
|
||||
external(ParentNode),
|
||||
not attr("external_build_requirement", ParentNode, build_requirement("node", BuildDependency)).
|
||||
|
||||
candidate_external_version(Constraint, BuildDependency, Version)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, Constraint)),
|
||||
external(ParentNode),
|
||||
pkg_fact(BuildDependency, version_satisfies(Constraint, Version)).
|
||||
|
||||
error(100, "External {0} cannot satisfy both {1} and {2}", BuildDependency, LiteralConstraint, ExternalConstraint)
|
||||
:- attr("build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, LiteralConstraint)),
|
||||
external(ParentNode),
|
||||
attr("external_build_requirement", ParentNode, build_requirement("node_version_satisfies", BuildDependency, ExternalConstraint)),
|
||||
not 1 { pkg_fact(BuildDependency, version_satisfies(ExternalConstraint, Version)) : candidate_external_version(LiteralConstraint, BuildDependency, Version) }.
|
||||
|
||||
|
||||
% Asking for gcc@10 %gcc@9 shouldn't give us back an external gcc@10, just because of the hack
|
||||
% we have on externals
|
||||
:- attr("build_requirement", node(X, Parent), build_requirement("node", BuildDependency)),
|
||||
@@ -1130,6 +1174,22 @@ error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
attr("variant_set", PackageNode, Variant) :- attr("variant_set", PackageNode, Variant, _).
|
||||
|
||||
% Setting a concrete variant implies setting a variant
|
||||
concrete_variant_value(PackageNode, Variant, Value, Origin) :- attr("concrete_variant_set", PackageNode, Variant, Value, Origin).
|
||||
|
||||
attr("variant_set", PackageNode, Variant, Value) :- attr("concrete_variant_set", PackageNode, Variant, Value, _).
|
||||
|
||||
% Concrete variant values must be in the answer set
|
||||
:- concrete_variant_value(PackageNode, Variant, Value, _), not attr("variant_value", PackageNode, Variant, Value).
|
||||
|
||||
% Extra variant values are not allowed, if the variant is concrete
|
||||
variant_is_concrete(PackageNode, Variant, Origin) :- concrete_variant_value(PackageNode, Variant, _, Origin).
|
||||
|
||||
error(100, "The variant {0} in package {1} specified as := has the extra value {2}", Variant, PackageNode, Value)
|
||||
:- variant_is_concrete(PackageNode, Variant, Origin),
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
not concrete_variant_value(PackageNode, Variant, Value, Origin).
|
||||
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
% This only applies to packages we need to build -- concrete packages may
|
||||
% have been built w/different variants from older/different package versions.
|
||||
@@ -1619,7 +1679,12 @@ build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode).
|
||||
% 200 - 299 Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds and minimizing dupes.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode).
|
||||
|
||||
treat_node_as_concrete(node(X, Package)) :- external(node(X, Package)).
|
||||
treat_node_as_concrete(node(X, Package)) :- attr("node", node(X, Package)), runtime(Package).
|
||||
|
||||
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode), not treat_node_as_concrete(PackageNode).
|
||||
build_priority(PackageNode, 0) :- build(PackageNode), attr("node", PackageNode), treat_node_as_concrete(PackageNode).
|
||||
build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", PackageNode).
|
||||
|
||||
% don't assign versions from installed packages unless reuse is enabled
|
||||
@@ -1676,7 +1741,7 @@ opt_criterion(310, "requirement weight").
|
||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||
opt_criterion(110, "number of packages to build (vs. reuse)").
|
||||
#minimize { 0@110: #true }.
|
||||
#minimize { 1@110,PackageNode : build(PackageNode) }.
|
||||
#minimize { 1@110,PackageNode : build(PackageNode), not treat_node_as_concrete(PackageNode) }.
|
||||
|
||||
opt_criterion(100, "number of nodes from the same package").
|
||||
#minimize { 0@100: #true }.
|
||||
@@ -1841,48 +1906,59 @@ opt_criterion(10, "target mismatches").
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(5, "non-preferred targets").
|
||||
#minimize{ 0@205: #true }.
|
||||
#minimize{ 0@5: #true }.
|
||||
opt_criterion(7, "non-preferred targets").
|
||||
#minimize{ 0@207: #true }.
|
||||
#minimize{ 0@7: #true }.
|
||||
#minimize{
|
||||
Weight@5+Priority,node(X, Package)
|
||||
Weight@7+Priority,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
opt_criterion(4, "preferred providers (language runtimes)").
|
||||
#minimize{ 0@204: #true }.
|
||||
#minimize{ 0@4: #true }.
|
||||
opt_criterion(5, "preferred providers (language runtimes)").
|
||||
#minimize{ 0@205: #true }.
|
||||
#minimize{ 0@5: #true }.
|
||||
#minimize{
|
||||
Weight@4+Priority,ProviderNode,X,Virtual
|
||||
Weight@5+Priority,ProviderNode,X,Virtual
|
||||
: provider_weight(ProviderNode, node(X, Virtual), Weight),
|
||||
language_runtime(Virtual),
|
||||
build_priority(ProviderNode, Priority)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for runtimes
|
||||
opt_criterion(3, "version badness (runtimes)").
|
||||
#minimize{ 0@203: #true }.
|
||||
#minimize{ 0@3: #true }.
|
||||
opt_criterion(4, "version badness (runtimes)").
|
||||
#minimize{ 0@204: #true }.
|
||||
#minimize{ 0@4: #true }.
|
||||
#minimize{
|
||||
Weight@3,node(X, Package)
|
||||
Weight@4,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose best target for runtimes
|
||||
opt_criterion(2, "non-preferred targets (runtimes)").
|
||||
#minimize{ 0@202: #true }.
|
||||
#minimize{ 0@2: #true }.
|
||||
opt_criterion(3, "non-preferred targets (runtimes)").
|
||||
#minimize{ 0@203: #true }.
|
||||
#minimize{ 0@3: #true }.
|
||||
#minimize{
|
||||
Weight@2,node(X, Package)
|
||||
Weight@3,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(1, "edge wiring").
|
||||
opt_criterion(2, "providers on edges").
|
||||
#minimize{ 0@202: #true }.
|
||||
#minimize{ 0@2: #true }.
|
||||
#minimize{
|
||||
Weight@2,ParentNode,ProviderNode,Virtual
|
||||
: provider_weight(ProviderNode, Virtual, Weight),
|
||||
not attr("root", ProviderNode),
|
||||
depends_on(ParentNode, ProviderNode)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(1, "version badness on edges").
|
||||
#minimize{ 0@201: #true }.
|
||||
#minimize{ 0@1: #true }.
|
||||
#minimize{
|
||||
@@ -1893,14 +1969,6 @@ opt_criterion(1, "edge wiring").
|
||||
}.
|
||||
|
||||
|
||||
#minimize{ 0@201: #true }.
|
||||
#minimize{ 0@1: #true }.
|
||||
#minimize{
|
||||
Weight@1,ParentNode,ProviderNode,Virtual
|
||||
: provider_weight(ProviderNode, Virtual, Weight),
|
||||
not attr("root", ProviderNode),
|
||||
depends_on(ParentNode, ProviderNode)
|
||||
}.
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
|
||||
@@ -85,8 +85,10 @@ def is_virtual(self, name: str) -> bool:
|
||||
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
|
||||
"""Returns true if a package is allowed on the current host"""
|
||||
pkg_cls = self.repo.get_pkg_class(pkg_name)
|
||||
no_condition = spack.spec.Spec()
|
||||
for when_spec, conditions in pkg_cls.requirements.items():
|
||||
if not when_spec.intersects(self._platform_condition):
|
||||
# Restrict analysis to unconditional requirements
|
||||
if when_spec != no_condition:
|
||||
continue
|
||||
for requirements, _, _ in conditions:
|
||||
if not any(x.intersects(self._platform_condition) for x in requirements):
|
||||
|
||||
@@ -837,7 +837,7 @@ def _shared_subset_pair_iterate(container1, container2):
|
||||
b_idx += 1
|
||||
|
||||
|
||||
class FlagMap(lang.HashableMap):
|
||||
class FlagMap(lang.HashableMap[str, List[CompilerFlag]]):
|
||||
__slots__ = ("spec",)
|
||||
|
||||
def __init__(self, spec):
|
||||
@@ -1429,7 +1429,7 @@ def with_compiler(self, compiler: "Spec") -> "SpecAnnotations":
|
||||
def __repr__(self) -> str:
|
||||
result = f"SpecAnnotations().with_spec_format({self.original_spec_format})"
|
||||
if self.compiler_node_attribute:
|
||||
result += f"with_compiler({str(self.compiler_node_attribute)})"
|
||||
result += f".with_compiler({str(self.compiler_node_attribute)})"
|
||||
return result
|
||||
|
||||
|
||||
@@ -1861,9 +1861,7 @@ def add_dependency_edge(
|
||||
@property
|
||||
def fullname(self):
|
||||
return (
|
||||
("%s.%s" % (self.namespace, self.name))
|
||||
if self.namespace
|
||||
else (self.name if self.name else "")
|
||||
f"{self.namespace}.{self.name}" if self.namespace else (self.name if self.name else "")
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -3394,7 +3392,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return True
|
||||
|
||||
# If we have no dependencies, we can't satisfy any constraints.
|
||||
if not self._dependencies:
|
||||
if not self._dependencies and self.original_spec_format() >= 5 and not self.external:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
@@ -3405,6 +3403,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
||||
# will be verified later.
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
mock_nodes_from_old_specfiles = set()
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
|
||||
@@ -3426,13 +3425,27 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
if current_node.original_spec_format() < 5 or (
|
||||
current_node.original_spec_format() >= 5 and current_node.external
|
||||
):
|
||||
compiler_spec = current_node.annotations.compiler_node_attribute
|
||||
if compiler_spec is None:
|
||||
return False
|
||||
|
||||
mock_nodes_from_old_specfiles.add(compiler_spec)
|
||||
# This checks that the single node compiler spec satisfies the request
|
||||
# of a direct dependency. The check is not perfect, but based on heuristic.
|
||||
if not compiler_spec.satisfies(rhs_edge.spec):
|
||||
return False
|
||||
|
||||
else:
|
||||
candidates = current_node.dependencies(
|
||||
name=rhs_edge.spec.name,
|
||||
deptype=rhs_edge.depflag,
|
||||
virtuals=rhs_edge.virtuals or None,
|
||||
)
|
||||
if not candidates or not any(x.satisfies(rhs_edge.spec) for x in candidates):
|
||||
return False
|
||||
|
||||
continue
|
||||
|
||||
@@ -3472,8 +3485,9 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
lhs_nodes = [x for x in self.traverse(root=False)] + sorted(mock_nodes_from_old_specfiles)
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in lhs_nodes)
|
||||
for rhs in other.traverse(root=False)
|
||||
)
|
||||
|
||||
@@ -3947,6 +3961,8 @@ def format_attribute(match_object: Match) -> str:
|
||||
except AttributeError:
|
||||
if part == "compiler":
|
||||
return "none"
|
||||
elif part == "specfile_version":
|
||||
return f"v{current.original_spec_format()}"
|
||||
|
||||
raise SpecFormatStringError(
|
||||
f"Attempted to format attribute {attribute}. "
|
||||
@@ -4472,7 +4488,7 @@ def has_virtual_dependency(self, virtual: str) -> bool:
|
||||
return bool(self.dependencies(virtuals=(virtual,)))
|
||||
|
||||
|
||||
class VariantMap(lang.HashableMap):
|
||||
class VariantMap(lang.HashableMap[str, vt.VariantValue]):
|
||||
"""Map containing variant instances. New values can be added only
|
||||
if the key is not already present."""
|
||||
|
||||
@@ -4654,6 +4670,9 @@ def substitute_abstract_variants(spec: Spec):
|
||||
# in $spack/lib/spack/spack/spec_list.py
|
||||
unknown = []
|
||||
for name, v in spec.variants.items():
|
||||
if v.concrete and v.type == vt.VariantType.MULTI:
|
||||
continue
|
||||
|
||||
if name == "dev_path":
|
||||
v.type = vt.VariantType.SINGLE
|
||||
v.concrete = True
|
||||
|
||||
@@ -106,7 +106,7 @@ def __init__(self):
|
||||
|
||||
def restore(self):
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.repo.enable_repo(spack.repo.create(self.config))
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
@@ -129,7 +129,6 @@ def restore(self):
|
||||
|
||||
|
||||
def store_patches():
|
||||
global patches
|
||||
module_patches = list()
|
||||
class_patches = list()
|
||||
if not patches:
|
||||
|
||||
@@ -93,6 +93,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
]
|
||||
|
||||
|
||||
# TODO/RepoSplit: Should this not rely on mock packages post split?
|
||||
@pytest.mark.parametrize(
|
||||
"config_section,data,failing_check",
|
||||
[
|
||||
@@ -113,7 +114,7 @@ def test_package_audits(packages, expected_errors, mock_packages):
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_config_audits(config_section, data, failing_check):
|
||||
def test_config_audits(config_section, data, failing_check, mock_packages):
|
||||
with spack.config.override(config_section, data):
|
||||
reports = spack.audit.run_group("configs")
|
||||
assert any((check == failing_check) and errors for check, errors in reports)
|
||||
|
||||
@@ -17,11 +17,10 @@
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import Path, PurePath
|
||||
from typing import Any, Callable, Dict, NamedTuple, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import copy_tree, join_path
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
@@ -38,16 +37,27 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
from spack.url_buildcache import (
|
||||
INDEX_MANIFEST_FILE,
|
||||
BuildcacheComponent,
|
||||
BuildcacheEntryError,
|
||||
URLBuildcacheEntry,
|
||||
URLBuildcacheEntryV2,
|
||||
compression_writer,
|
||||
get_url_buildcache_class,
|
||||
get_valid_spec_file,
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
@@ -372,7 +382,7 @@ def test_built_spec_cache(temporary_mirror_dir):
|
||||
|
||||
for s in [gspec, cspec]:
|
||||
results = bindist.get_mirrors_for_spec(s)
|
||||
assert any([r["spec"] == s for r in results])
|
||||
assert any([r.spec == s for r in results])
|
||||
|
||||
|
||||
def fake_dag_hash(spec, length=None):
|
||||
@@ -435,7 +445,11 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
assert "libelf" in cache_list
|
||||
|
||||
# Remove dependency from cache
|
||||
libelf_files = glob.glob(os.path.join(mirror_dir.join("build_cache").strpath, "*libelf*"))
|
||||
libelf_files = glob.glob(
|
||||
os.path.join(
|
||||
mirror_dir.join(bindist.buildcache_relative_specs_path()).strpath, "libelf", "*libelf*"
|
||||
)
|
||||
)
|
||||
os.remove(*libelf_files)
|
||||
|
||||
# Update index
|
||||
@@ -480,8 +494,7 @@ def mock_list_url(url, recursive=False):
|
||||
|
||||
assert (
|
||||
"Warning: Encountered problem listing packages at "
|
||||
f"{test_url}/{bindist.BUILD_CACHE_RELATIVE_PATH}: Some HTTP error"
|
||||
in capfd.readouterr().err
|
||||
f"{test_url}: Some HTTP error" in capfd.readouterr().err
|
||||
)
|
||||
|
||||
|
||||
@@ -538,29 +551,6 @@ def test_update_sbang(tmp_path, temporary_mirror, mock_fetch, install_mockery):
|
||||
assert f.read() == new_contents
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
||||
)
|
||||
def test_install_legacy_buildcache_layout(mutable_config, compiler_factory, install_mockery):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
compressed archive file containing the install tree. This test
|
||||
makes sure we can still read that layout."""
|
||||
legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
|
||||
mirror_url = f"file://{legacy_layout_dir}"
|
||||
filename = (
|
||||
"test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
|
||||
"l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json"
|
||||
)
|
||||
spec_json_path = os.path.join(legacy_layout_dir, "build_cache", filename)
|
||||
mirror_cmd("add", "--scope", "site", "test-legacy-layout", mirror_url)
|
||||
output = install_cmd("--no-check-signature", "--cache-only", "-f", spec_json_path, output=str)
|
||||
mirror_cmd("rm", "--scope=site", "test-legacy-layout")
|
||||
expect_line = "Extracting archive-files-2.0-l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache"
|
||||
assert expect_line in output
|
||||
|
||||
|
||||
def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||
with pytest.raises(TypeError, match="list"):
|
||||
bindist.FetchCacheError("error")
|
||||
@@ -600,7 +590,60 @@ def test_text_relocate_if_needed(install_mockery, temporary_store, mock_fetch, t
|
||||
assert join_path("bin", "secretexe") not in manifest["relocate_textfiles"]
|
||||
|
||||
|
||||
def test_etag_fetching_304():
|
||||
def test_compression_writer(tmp_path):
|
||||
text = "This is some text. We might or might not like to compress it as we write."
|
||||
checksum_algo = "sha256"
|
||||
|
||||
# Write the data using gzip compression
|
||||
compressed_output_path = str(tmp_path / "compressed_text")
|
||||
with compression_writer(compressed_output_path, "gzip", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text.encode("utf-8"))
|
||||
|
||||
compressed_size = checker.length
|
||||
compressed_checksum = checker.hexdigest()
|
||||
|
||||
with open(compressed_output_path, "rb") as f:
|
||||
binary_content = f.read()
|
||||
|
||||
assert bindist.compute_hash(binary_content) == compressed_checksum
|
||||
assert os.stat(compressed_output_path).st_size == compressed_size
|
||||
assert binary_content[:2] == b"\x1f\x8b"
|
||||
decompressed_content = gzip.decompress(binary_content).decode("utf-8")
|
||||
|
||||
assert decompressed_content == text
|
||||
|
||||
# Write the data without compression
|
||||
uncompressed_output_path = str(tmp_path / "uncompressed_text")
|
||||
with compression_writer(uncompressed_output_path, "none", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text.encode("utf-8"))
|
||||
|
||||
uncompressed_size = checker.length
|
||||
uncompressed_checksum = checker.hexdigest()
|
||||
|
||||
with open(uncompressed_output_path, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
assert bindist.compute_hash(content) == uncompressed_checksum
|
||||
assert os.stat(uncompressed_output_path).st_size == uncompressed_size
|
||||
assert content == text
|
||||
|
||||
# Make sure we raise if requesting unknown compression type
|
||||
nocare_output_path = str(tmp_path / "wontwrite")
|
||||
with pytest.raises(BuildcacheEntryError, match="Unknown compression type"):
|
||||
with compression_writer(nocare_output_path, "gsip", checksum_algo) as (
|
||||
compressor,
|
||||
checker,
|
||||
):
|
||||
compressor.write(text)
|
||||
|
||||
|
||||
def test_v2_etag_fetching_304():
|
||||
# Test conditional fetch with etags. If the remote hasn't modified the file
|
||||
# it returns 304, which is an HTTPError in urllib-land. That should be
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
@@ -613,7 +656,7 @@ def response_304(request: urllib.request.Request):
|
||||
)
|
||||
assert False, "Should not fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_304,
|
||||
@@ -624,7 +667,7 @@ def response_304(request: urllib.request.Request):
|
||||
assert result.fresh
|
||||
|
||||
|
||||
def test_etag_fetching_200():
|
||||
def test_v2_etag_fetching_200():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
@@ -638,7 +681,7 @@ def response_200(request: urllib.request.Request):
|
||||
)
|
||||
assert False, "Should not fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_200,
|
||||
@@ -652,7 +695,7 @@ def response_200(request: urllib.request.Request):
|
||||
assert result.hash == bindist.compute_hash("Result")
|
||||
|
||||
|
||||
def test_etag_fetching_404():
|
||||
def test_v2_etag_fetching_404():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_404(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
@@ -663,7 +706,7 @@ def response_404(request: urllib.request.Request):
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
fetcher = bindist.EtagIndexFetcherV2(
|
||||
url="https://www.example.com",
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_404,
|
||||
@@ -673,13 +716,13 @@ def response_404(request: urllib.request.Request):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_fetch_200():
|
||||
def test_v2_default_index_fetch_200():
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -697,7 +740,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash="outdated", urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -710,7 +753,7 @@ def urlopen(request: urllib.request.Request):
|
||||
assert result.hash == index_json_hash
|
||||
|
||||
|
||||
def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
def test_v2_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
# When we don't have local hash, we should not be fetching the
|
||||
# remote index.json.hash file, but only index.json.
|
||||
index_json = '{"Hello": "World"}'
|
||||
@@ -728,7 +771,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=None, urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -741,13 +784,13 @@ def urlopen(request: urllib.request.Request):
|
||||
assert not result.fresh
|
||||
|
||||
|
||||
def test_default_index_not_modified():
|
||||
def test_v2_default_index_not_modified():
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -758,7 +801,7 @@ def urlopen(request: urllib.request.Request):
|
||||
# No request to index.json should be made.
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -766,7 +809,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("index_json", [b"\xa9", b"!#%^"])
|
||||
def test_default_index_invalid_hash_file(index_json):
|
||||
def test_v2_default_index_invalid_hash_file(index_json):
|
||||
# Test invalid unicode / invalid hash type
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
@@ -778,21 +821,21 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash=index_json_hash, urlopen=urlopen
|
||||
)
|
||||
|
||||
assert fetcher.get_remote_hash() is None
|
||||
|
||||
|
||||
def test_default_index_json_404():
|
||||
def test_v2_default_index_json_404():
|
||||
# Test invalid unicode / invalid hash type
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
if url.endswith("index.json.hash"):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -811,7 +854,7 @@ def urlopen(request: urllib.request.Request):
|
||||
|
||||
assert False, "Unexpected fetch {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
fetcher = bindist.DefaultIndexFetcherV2(
|
||||
url="https://www.example.com", local_hash="invalid", urlopen=urlopen
|
||||
)
|
||||
|
||||
@@ -1097,9 +1140,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
||||
json.dump(spec_dict, f)
|
||||
|
||||
try:
|
||||
spec_dict_disk, layout_disk = bindist._get_valid_spec_file(
|
||||
str(path), max_supported_layout=1
|
||||
)
|
||||
spec_dict_disk, layout_disk = get_valid_spec_file(str(path), max_supported_layout=1)
|
||||
assert expect_success
|
||||
assert spec_dict_disk == spec_dict
|
||||
assert layout_disk == effective_layout
|
||||
@@ -1109,51 +1150,66 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
||||
|
||||
def test_get_valid_spec_file_doesnt_exist(tmp_path):
|
||||
with pytest.raises(bindist.InvalidMetadataFile, match="No such file"):
|
||||
bindist._get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1)
|
||||
|
||||
|
||||
def test_get_valid_spec_file_gzipped(tmp_path):
|
||||
# Create a gzipped file, contents don't matter
|
||||
path = tmp_path / "spec.json.gz"
|
||||
with gzip.open(path, "wb") as f:
|
||||
f.write(b"hello")
|
||||
with pytest.raises(
|
||||
bindist.InvalidMetadataFile, match="Compressed spec files are not supported"
|
||||
):
|
||||
bindist._get_valid_spec_file(str(path), max_supported_layout=1)
|
||||
get_valid_spec_file(str(tmp_path / "no-such-file"), max_supported_layout=1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("filename", ["spec.json", "spec.json.sig"])
|
||||
def test_get_valid_spec_file_no_json(tmp_path, filename):
|
||||
tmp_path.joinpath(filename).write_text("not json")
|
||||
with pytest.raises(bindist.InvalidMetadataFile):
|
||||
bindist._get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1)
|
||||
get_valid_spec_file(str(tmp_path / filename), max_supported_layout=1)
|
||||
|
||||
|
||||
def test_download_tarball_with_unsupported_layout_fails(
|
||||
tmp_path, mock_packages, mutable_config, capsys
|
||||
):
|
||||
layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
|
||||
spec = spack.concretize.concretize_one("pkg-c")
|
||||
spec_dict = spec.to_dict()
|
||||
spec_dict["buildcache_layout_version"] = layout_version
|
||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "temporary_mirror")
|
||||
def test_url_buildcache_entry_v3(monkeypatch, tmpdir):
|
||||
"""Make sure URLBuildcacheEntry behaves as expected"""
|
||||
|
||||
# Setup a basic local build cache structure
|
||||
path = (
|
||||
tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
|
||||
)
|
||||
path.parent.mkdir(parents=True)
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(spec_dict, f)
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join("mirror_dir")
|
||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||
|
||||
# Configure as a mirror.
|
||||
mirror_cmd("add", "test-mirror", str(tmp_path))
|
||||
s = Spec("libdwarf").concretized()
|
||||
|
||||
# Shouldn't be able "download" this.
|
||||
assert bindist.download_tarball(spec, unsigned=True) is None
|
||||
# Install libdwarf
|
||||
install_cmd("--fake", s.name)
|
||||
|
||||
# And there should be a warning about an unsupported layout version.
|
||||
assert f"Layout version {layout_version} is too new" in capsys.readouterr().err
|
||||
# Push libdwarf to buildcache
|
||||
buildcache_cmd("push", "-u", mirror_dir.strpath, s.name)
|
||||
|
||||
cache_class = get_url_buildcache_class(bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION)
|
||||
build_cache = cache_class(mirror_url, s, allow_unsigned=True)
|
||||
|
||||
manifest = build_cache.read_manifest()
|
||||
spec_dict = build_cache.fetch_metadata()
|
||||
local_tarball_path = build_cache.fetch_archive()
|
||||
|
||||
assert "spec" in spec_dict
|
||||
|
||||
for blob_record in manifest.data:
|
||||
blob_path = build_cache.get_staged_blob_path(blob_record)
|
||||
assert os.path.exists(blob_path)
|
||||
actual_blob_size = os.stat(blob_path).st_size
|
||||
assert blob_record.content_length == actual_blob_size
|
||||
|
||||
build_cache.destroy()
|
||||
|
||||
assert not os.path.exists(local_tarball_path)
|
||||
|
||||
|
||||
def test_relative_path_components():
|
||||
blobs_v3 = URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.BLOB)
|
||||
assert len(blobs_v3) == 1
|
||||
assert "blobs" in blobs_v3
|
||||
|
||||
blobs_v2 = URLBuildcacheEntryV2.get_relative_path_components(BuildcacheComponent.BLOB)
|
||||
assert len(blobs_v2) == 1
|
||||
assert "build_cache" in blobs_v2
|
||||
|
||||
v2_spec_url = "file:///home/me/mymirror/build_cache/linux-ubuntu22.04-sapphirerapids-gcc-12.3.0-gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.json.sig"
|
||||
assert URLBuildcacheEntryV2.get_base_url(v2_spec_url) == "file:///home/me/mymirror"
|
||||
|
||||
v3_manifest_url = "file:///home/me/mymirror/v3/manifests/gmake-4.4.1-5pddli3htvfe6svs7nbrqmwi5735agi3.spec.manifest.json"
|
||||
assert URLBuildcacheEntry.get_base_url(v3_manifest_url) == "file:///home/me/mymirror"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -1170,3 +1226,244 @@ def test_download_tarball_with_unsupported_layout_fails(
|
||||
def test_default_tag(spec: str):
|
||||
"""Make sure that computed image tags are valid."""
|
||||
assert re.fullmatch(spack.oci.image.tag, bindist._oci_default_tag(spack.spec.Spec(spec)))
|
||||
|
||||
|
||||
class IndexInformation(NamedTuple):
|
||||
manifest_contents: Dict[str, Any]
|
||||
index_contents: str
|
||||
index_hash: str
|
||||
manifest_path: str
|
||||
index_path: str
|
||||
manifest_etag: str
|
||||
fetched_blob: Callable[[], bool]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_index(tmp_path, monkeypatch) -> IndexInformation:
|
||||
mirror_root = tmp_path / "mymirror"
|
||||
index_json = '{"Hello": "World"}'
|
||||
index_json_hash = bindist.compute_hash(index_json)
|
||||
fetched = False
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
index_blob_path = os.path.join(
|
||||
str(mirror_root),
|
||||
*cache_class.get_relative_path_components(BuildcacheComponent.BLOB),
|
||||
"sha256",
|
||||
index_json_hash[:2],
|
||||
index_json_hash,
|
||||
)
|
||||
|
||||
os.makedirs(os.path.dirname(index_blob_path))
|
||||
with open(index_blob_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(index_json)
|
||||
|
||||
index_blob_record = bindist.BlobRecord(
|
||||
os.stat(index_blob_path).st_size,
|
||||
cache_class.BUILDCACHE_INDEX_MEDIATYPE,
|
||||
"none",
|
||||
"sha256",
|
||||
index_json_hash,
|
||||
)
|
||||
|
||||
index_manifest = {
|
||||
"version": cache_class.get_layout_version(),
|
||||
"data": [index_blob_record.to_dict()],
|
||||
}
|
||||
|
||||
manifest_json_path = cache_class.get_index_url(str(mirror_root))
|
||||
|
||||
os.makedirs(os.path.dirname(manifest_json_path))
|
||||
|
||||
with open(manifest_json_path, "w", encoding="utf-8") as f:
|
||||
json.dump(index_manifest, f)
|
||||
|
||||
def fetch_patch(stage, mirror_only: bool = False, err_msg: Optional[str] = None):
|
||||
nonlocal fetched
|
||||
fetched = True
|
||||
|
||||
@property # type: ignore
|
||||
def save_filename_patch(stage):
|
||||
return str(index_blob_path)
|
||||
|
||||
monkeypatch.setattr(spack.stage.Stage, "fetch", fetch_patch)
|
||||
monkeypatch.setattr(spack.stage.Stage, "save_filename", save_filename_patch)
|
||||
|
||||
def get_did_fetch():
|
||||
# nonlocal fetched
|
||||
return fetched
|
||||
|
||||
return IndexInformation(
|
||||
index_manifest,
|
||||
index_json,
|
||||
index_json_hash,
|
||||
manifest_json_path,
|
||||
index_blob_path,
|
||||
"59bcc3ad6775562f845953cf01624225",
|
||||
get_did_fetch,
|
||||
)
|
||||
|
||||
|
||||
def test_etag_fetching_304():
|
||||
# Test conditional fetch with etags. If the remote hasn't modified the file
|
||||
# it returns 304, which is an HTTPError in urllib-land. That should be
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
def response_304(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
raise urllib.error.HTTPError(
|
||||
url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type]
|
||||
)
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_304,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert result.fresh
|
||||
|
||||
|
||||
def test_etag_fetching_200(mock_index):
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_200,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert not result.fresh
|
||||
assert mock_index.fetched_blob()
|
||||
assert result.etag == mock_index.manifest_etag
|
||||
assert result.data == mock_index.index_contents
|
||||
assert result.hash == mock_index.index_hash
|
||||
|
||||
|
||||
def test_etag_fetching_404():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_404(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
request.get_full_url(),
|
||||
404,
|
||||
"Not found",
|
||||
hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.EtagIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
etag="112a8bbc1b3f7f185621c1ee335f0502",
|
||||
urlopen=response_404,
|
||||
)
|
||||
|
||||
with pytest.raises(bindist.FetchIndexError):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_fetch_200(mock_index):
|
||||
# We fetch the manifest and then the index blob if the hash is outdated
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={"Etag": f'"{mock_index.manifest_etag}"'}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash="outdated",
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
result = fetcher.conditional_fetch()
|
||||
|
||||
assert isinstance(result, bindist.FetchIndexResult)
|
||||
assert not result.fresh
|
||||
assert mock_index.fetched_blob()
|
||||
assert result.etag == mock_index.manifest_etag
|
||||
assert result.data == mock_index.index_contents
|
||||
assert result.hash == mock_index.index_hash
|
||||
|
||||
|
||||
def test_default_index_404():
|
||||
# We get a fetch error if the index can't be fetched
|
||||
def urlopen(request: urllib.request.Request):
|
||||
raise urllib.error.HTTPError(
|
||||
request.get_full_url(),
|
||||
404,
|
||||
"Not found",
|
||||
hdrs={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
fp=None,
|
||||
)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash=None,
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
with pytest.raises(bindist.FetchIndexError):
|
||||
fetcher.conditional_fetch()
|
||||
|
||||
|
||||
def test_default_index_not_modified(mock_index):
|
||||
# We don't fetch the index blob if hash didn't change
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith(INDEX_MANIFEST_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(json.dumps(mock_index.manifest_contents).encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
url=url,
|
||||
code=200,
|
||||
)
|
||||
|
||||
# No other request should be made.
|
||||
assert False, "Unexpected request {}".format(url)
|
||||
|
||||
fetcher = bindist.DefaultIndexFetcher(
|
||||
bindist.MirrorURLAndVersion(
|
||||
"https://www.example.com", bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
),
|
||||
local_hash=mock_index.index_hash,
|
||||
urlopen=urlopen,
|
||||
)
|
||||
|
||||
assert fetcher.conditional_fetch().fresh
|
||||
assert not mock_index.fetched_blob()
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -37,12 +37,7 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
|
||||
assert not skipped
|
||||
|
||||
# Remove the tarball, which should cause push to push.
|
||||
os.remove(
|
||||
tmp_path
|
||||
/ bd.BUILD_CACHE_RELATIVE_PATH
|
||||
/ bd.tarball_directory_name(spec)
|
||||
/ bd.tarball_name(spec, ".spack")
|
||||
)
|
||||
shutil.rmtree(tmp_path / bd.buildcache_relative_blobs_path())
|
||||
|
||||
with bd.make_uploader(mirror) as uploader:
|
||||
skipped = uploader.push_or_raise(specs)
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
@pytest.fixture()
|
||||
def builder_test_repository(config):
|
||||
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
|
||||
builder_test_path = os.path.join(spack.paths.test_repos_path, "builder.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@@ -414,7 +414,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
||||
|
||||
|
||||
@pytest.mark.regression("29947")
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_packages):
|
||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path):
|
||||
e = ev.create("first_concretization")
|
||||
e.add("mpileaks~shared")
|
||||
e.add("mpileaks+shared")
|
||||
@@ -444,7 +444,7 @@ def _fail(self, args):
|
||||
ci.process_command("help", [], str(repro_dir))
|
||||
|
||||
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, monkeypatch):
|
||||
"""Test that create_buildcache returns a list of objects with the correct
|
||||
keys and types."""
|
||||
monkeypatch.setattr(ci, "push_to_build_cache", lambda a, b, c: True)
|
||||
@@ -483,7 +483,7 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd
|
||||
):
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
@@ -501,7 +501,7 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd
|
||||
):
|
||||
"""Test run_standalone_tests with cdash and related options."""
|
||||
log_file = tmp_path / "junit.xml"
|
||||
@@ -537,7 +537,7 @@ def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
assert "No such file or directory" in err
|
||||
|
||||
|
||||
def test_ci_skipped_report(tmpdir, mock_packages, config):
|
||||
def test_ci_skipped_report(tmpdir, config):
|
||||
"""Test explicit skipping of report as well as CI's 'package' arg."""
|
||||
pkg = "trivial-smoke-test"
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
|
||||
@@ -5,12 +5,16 @@
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import copy_tree, find
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.buildcache_migrate as migrate
|
||||
import spack.cmd.buildcache
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
@@ -18,8 +22,16 @@
|
||||
import spack.main
|
||||
import spack.mirrors.mirror
|
||||
import spack.spec
|
||||
import spack.util.url
|
||||
import spack.util.url as url_util
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.url_buildcache import (
|
||||
BuildcacheComponent,
|
||||
URLBuildcacheEntry,
|
||||
URLBuildcacheEntryV2,
|
||||
check_mirror_for_layout,
|
||||
get_url_buildcache_class,
|
||||
)
|
||||
|
||||
buildcache = spack.main.SpackCommand("buildcache")
|
||||
install = spack.main.SpackCommand("install")
|
||||
@@ -74,20 +86,6 @@ def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys):
|
||||
assert output.count("mpileaks") == 2
|
||||
|
||||
|
||||
def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
""" "Ensure that buildcache create creates output files"""
|
||||
pkg = "trivial-install-test-package"
|
||||
install(pkg)
|
||||
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
|
||||
|
||||
def tests_buildcache_create_env(
|
||||
install_mockery, mock_fetch, monkeypatch, tmpdir, mutable_mock_env_path
|
||||
):
|
||||
@@ -102,10 +100,15 @@ def tests_buildcache_create_env(
|
||||
buildcache("push", "--unsigned", str(tmpdir))
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
|
||||
mirror_url = f"file://{tmpdir.strpath}"
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
cache_entry = cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert cache_entry.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
def test_buildcache_create_fails_on_noargs(tmpdir):
|
||||
@@ -159,12 +162,14 @@ def test_update_key_index(
|
||||
# it causes the index to get update.
|
||||
buildcache("update-index", "--keys", mirror_dir.strpath)
|
||||
|
||||
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
|
||||
key_dir_list = os.listdir(
|
||||
os.path.join(mirror_dir.strpath, spack.binary_distribution.buildcache_relative_keys_path())
|
||||
)
|
||||
|
||||
uninstall("-y", s.name)
|
||||
mirror("rm", "test-mirror")
|
||||
|
||||
assert "index.json" in key_dir_list
|
||||
assert "keys.manifest.json" in key_dir_list
|
||||
|
||||
|
||||
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
@@ -180,10 +185,14 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
# Install and generate build cache index
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
||||
assert s.name is not None
|
||||
manifest_file = URLBuildcacheEntry.get_manifest_filename(s)
|
||||
specs_dirs = os.path.join(
|
||||
*URLBuildcacheEntry.get_relative_path_components(BuildcacheComponent.SPEC), s.name
|
||||
)
|
||||
|
||||
assert not (mirror_dir / "build_cache" / metadata_file).exists()
|
||||
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
|
||||
assert not (mirror_dir / specs_dirs / manifest_file).exists()
|
||||
assert (mirror_autopush_dir / specs_dirs / manifest_file).exists()
|
||||
|
||||
|
||||
def test_buildcache_sync(
|
||||
@@ -205,7 +214,11 @@ def test_buildcache_sync(
|
||||
out_env_pkg = "libdwarf"
|
||||
|
||||
def verify_mirror_contents():
|
||||
dest_list = os.listdir(os.path.join(dest_mirror_dir, "build_cache"))
|
||||
dest_list = os.listdir(
|
||||
os.path.join(
|
||||
dest_mirror_dir, spack.binary_distribution.buildcache_relative_specs_path()
|
||||
)
|
||||
)
|
||||
|
||||
found_pkg = False
|
||||
|
||||
@@ -252,33 +265,15 @@ def verify_mirror_contents():
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
|
||||
def manifest_insert(manifest, spec, dest_url):
|
||||
manifest[spec.dag_hash()] = [
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
manifest[spec.dag_hash()] = {
|
||||
"src": cache_class.get_manifest_url(spec, src_mirror_url),
|
||||
"dest": cache_class.get_manifest_url(spec, dest_url),
|
||||
}
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
@@ -298,9 +293,7 @@ def manifest_insert(manifest, spec, dest_url):
|
||||
with open(manifest_file, "w", encoding="utf-8") as fd:
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(
|
||||
manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path")
|
||||
)
|
||||
manifest_insert(manifest, spec, url_util.join(dest_mirror_url, "invalid_path"))
|
||||
json.dump(manifest, fd)
|
||||
|
||||
# Trigger the warning
|
||||
@@ -327,11 +320,37 @@ def test_buildcache_create_install(
|
||||
|
||||
buildcache("push", "--unsigned", str(tmpdir), pkg)
|
||||
|
||||
mirror_url = f"file://{tmpdir.strpath}"
|
||||
|
||||
spec = spack.concretize.concretize_one(pkg)
|
||||
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
|
||||
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
|
||||
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
|
||||
cache_class = get_url_buildcache_class(
|
||||
layout_version=spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
)
|
||||
cache_entry = cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert spec.name is not None
|
||||
manifest_path = os.path.join(
|
||||
str(tmpdir),
|
||||
*cache_class.get_relative_path_components(BuildcacheComponent.SPEC),
|
||||
spec.name,
|
||||
cache_class.get_manifest_filename(spec),
|
||||
)
|
||||
|
||||
assert os.path.exists(manifest_path)
|
||||
cache_entry.read_manifest()
|
||||
spec_blob_record = cache_entry.get_blob_record(BuildcacheComponent.SPEC)
|
||||
tarball_blob_record = cache_entry.get_blob_record(BuildcacheComponent.TARBALL)
|
||||
|
||||
spec_blob_path = os.path.join(
|
||||
tmpdir.strpath, *cache_class.get_blob_path_components(spec_blob_record)
|
||||
)
|
||||
assert os.path.exists(spec_blob_path)
|
||||
|
||||
tarball_blob_path = os.path.join(
|
||||
tmpdir.strpath, *cache_class.get_blob_path_components(tarball_blob_record)
|
||||
)
|
||||
assert os.path.exists(tarball_blob_path)
|
||||
|
||||
cache_entry.destroy()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -503,3 +522,230 @@ def test_push_without_build_deps(tmp_path, temporary_store, mock_packages, mutab
|
||||
"push", "--update-index", "--without-build-dependencies", "my-mirror", f"/{s.dag_hash()}"
|
||||
)
|
||||
assert spack.binary_distribution.update_cache_and_get_specs() == [s]
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def v2_buildcache_layout(tmp_path):
|
||||
def _layout(signedness: str = "signed"):
|
||||
source_path = str(pathlib.Path(test_path) / "data" / "mirrors" / "v2_layout" / signedness)
|
||||
test_mirror_path = tmp_path / "mirror"
|
||||
copy_tree(source_path, test_mirror_path)
|
||||
return test_mirror_path
|
||||
|
||||
return _layout
|
||||
|
||||
|
||||
def test_check_mirror_for_layout(v2_buildcache_layout, mutable_config, capsys):
|
||||
"""Check printed warning in the presence of v2 layout binary mirrors"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
|
||||
check_mirror_for_layout(spack.mirrors.mirror.Mirror.from_local_path(str(test_mirror_path)))
|
||||
err = str(capsys.readouterr()[1])
|
||||
assert all([word in err for word in ["Warning", "missing", "layout"]])
|
||||
|
||||
|
||||
def test_url_buildcache_entry_v2_exists(
|
||||
capsys, v2_buildcache_layout, mock_packages, mutable_config
|
||||
):
|
||||
"""Test existence check for v2 buildcache entries"""
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror_url = f"file://{test_mirror_path}"
|
||||
mirror("add", "v2mirror", mirror_url)
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "-a", "-l")
|
||||
|
||||
assert "Fetching an index from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
||||
v2_cache_class = URLBuildcacheEntryV2
|
||||
|
||||
# If you don't give it a spec, it returns False
|
||||
build_cache = v2_cache_class(mirror_url)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
spec = spack.concretize.concretize_one("libdwarf")
|
||||
|
||||
# In v2 we have to ask for both, because we need to have the spec to have the tarball
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.TARBALL])
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC])
|
||||
# But if we do ask for both, they should be there in this case
|
||||
assert build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
spec_path = build_cache._get_spec_url(spec, mirror_url, ext=".spec.json")[7:]
|
||||
tarball_path = build_cache._get_tarball_url(spec, mirror_url)[7:]
|
||||
|
||||
os.remove(tarball_path)
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
os.remove(spec_path)
|
||||
build_cache = v2_cache_class(mirror_url, spec, allow_unsigned=True)
|
||||
assert not build_cache.exists([BuildcacheComponent.SPEC, BuildcacheComponent.TARBALL])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("signing", ["unsigned", "signed"])
|
||||
def test_install_v2_layout(
|
||||
signing,
|
||||
capsys,
|
||||
v2_buildcache_layout,
|
||||
mock_packages,
|
||||
mutable_config,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_gnupghome,
|
||||
monkeypatch,
|
||||
):
|
||||
"""Ensure we can still install from signed and unsigned v2 buildcache"""
|
||||
test_mirror_path = v2_buildcache_layout(signing)
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
# Trust original signing key (no-op if this is the unsigned pass)
|
||||
buildcache("keys", "--install", "--trust")
|
||||
|
||||
with capsys.disabled():
|
||||
output = install("--fake", "--no-check-signature", "libdwarf")
|
||||
|
||||
assert "Extracting libelf" in output
|
||||
assert "libelf: Successfully installed" in output
|
||||
assert "Extracting libdwarf" in output
|
||||
assert "libdwarf: Successfully installed" in output
|
||||
assert "Installing a spec from a v2 binary mirror layout" in output
|
||||
assert "is deprecated" in output
|
||||
|
||||
|
||||
def test_basic_migrate_unsigned(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Make sure first unsigned migration results in usable buildcache,
|
||||
leaving the previous layout in place. Also test that a subsequent one
|
||||
doesn't need to migrate anything, and that using --delete-existing
|
||||
removes the previous layout"""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "--unsigned", "my-mirror")
|
||||
|
||||
# The output indicates both specs were migrated
|
||||
assert output.count("Successfully migrated") == 6
|
||||
|
||||
build_cache_path = str(test_mirror_path / "build_cache")
|
||||
|
||||
# Without "--delete-existing" and "--yes-to-all", migration leaves the
|
||||
# previous layout in place
|
||||
assert os.path.exists(build_cache_path)
|
||||
assert os.path.isdir(build_cache_path)
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache(
|
||||
"migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror"
|
||||
)
|
||||
|
||||
# A second migration of the same mirror indicates neither spec
|
||||
# needs to be migrated
|
||||
assert output.count("No need to migrate") == 6
|
||||
|
||||
# When we provide "--delete-existing" and "--yes-to-all", migration
|
||||
# removes the old layout
|
||||
assert not os.path.exists(build_cache_path)
|
||||
|
||||
|
||||
def test_basic_migrate_signed(
|
||||
capsys, v2_buildcache_layout, monkeypatch, mock_gnupghome, mutable_config
|
||||
):
|
||||
"""Test a signed migration requires a signing key, requires the public
|
||||
key originally used to sign the pkgs, fails and prints reasonable messages
|
||||
if those requirements are unmet, and eventually succeeds when they are met."""
|
||||
test_mirror_path = v2_buildcache_layout("signed")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with pytest.raises(migrate.MigrationException) as error:
|
||||
buildcache("migrate", "my-mirror")
|
||||
|
||||
# Without a signing key spack fails and explains why
|
||||
assert error.value.message == "Signed migration requires exactly one secret key in keychain"
|
||||
|
||||
# Create a signing key and trust the key used to sign the pkgs originally
|
||||
gpg("create", "New Test Signing Key", "noone@nowhere.org")
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "my-mirror")
|
||||
|
||||
# Without trusting the original signing key, spack fails with an explanation
|
||||
assert "Failed to verify signature of libelf" in output
|
||||
assert "Failed to verify signature of libdwarf" in output
|
||||
assert "did you mean to perform an unsigned migration" in output
|
||||
|
||||
# Trust original signing key (since it's in the original layout location,
|
||||
# this is where the monkeypatched attribute is used)
|
||||
with capsys.disabled():
|
||||
output = buildcache("keys", "--install", "--trust")
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache("migrate", "my-mirror")
|
||||
|
||||
# Once we have the proper keys, migration should succeed
|
||||
assert "Successfully migrated libelf" in output
|
||||
assert "Successfully migrated libelf" in output
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
|
||||
def test_unsigned_migrate_of_signed_mirror(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Test spack can do an unsigned migration of a signed buildcache by
|
||||
ignoring signatures and skipping re-signing."""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("signed")
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with capsys.disabled():
|
||||
output = buildcache(
|
||||
"migrate", "--unsigned", "--delete-existing", "--yes-to-all", "my-mirror"
|
||||
)
|
||||
|
||||
# Now list the specs available under the new layout
|
||||
with capsys.disabled():
|
||||
output = buildcache("list", "--allarch")
|
||||
|
||||
assert "libdwarf" in output and "libelf" in output
|
||||
|
||||
# We should find two spec manifest files, one for each spec
|
||||
file_list = find(test_mirror_path, "*.spec.manifest.json")
|
||||
assert len(file_list) == 6
|
||||
assert any(["libdwarf" in file for file in file_list])
|
||||
assert any(["libelf" in file for file in file_list])
|
||||
|
||||
# The two spec manifest files should be unsigned
|
||||
for file_path in file_list:
|
||||
with open(file_path, "r", encoding="utf-8") as fd:
|
||||
assert json.load(fd)
|
||||
|
||||
|
||||
def test_migrate_requires_index(capsys, v2_buildcache_layout, mutable_config):
|
||||
"""Test spack fails with a reasonable error message when mirror does
|
||||
not have an index"""
|
||||
|
||||
test_mirror_path = v2_buildcache_layout("unsigned")
|
||||
v2_index_path = test_mirror_path / "build_cache" / "index.json"
|
||||
v2_index_hash_path = test_mirror_path / "build_cache" / "index.json.hash"
|
||||
os.remove(str(v2_index_path))
|
||||
os.remove(str(v2_index_hash_path))
|
||||
|
||||
mirror("add", "my-mirror", str(test_mirror_path))
|
||||
|
||||
with pytest.raises(migrate.MigrationException) as error:
|
||||
buildcache("migrate", "--unsigned", "my-mirror")
|
||||
|
||||
# If the buildcache has no index, spack fails and explains why
|
||||
assert error.value.message == "Buildcache migration requires a buildcache index"
|
||||
|
||||
@@ -31,11 +31,8 @@
|
||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||
from spack.ci.generator_registry import generator
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.error import SpackError
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import MockHTTPResponse
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
@@ -718,7 +715,7 @@ def test_ci_nothing_to_rebuild(
|
||||
)
|
||||
|
||||
install_cmd("archive-files")
|
||||
buildcache_cmd("push", "-f", "-u", mirror_url, "archive-files")
|
||||
buildcache_cmd("push", "-f", "-u", "--update-index", mirror_url, "archive-files")
|
||||
|
||||
with working_dir(tmp_path):
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
@@ -855,18 +852,18 @@ def test_push_to_build_cache(
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
# Validate resulting buildcache (database) index
|
||||
layout_version = spack.binary_distribution.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
url_and_version = spack.binary_distribution.MirrorURLAndVersion(
|
||||
mirror_url, layout_version
|
||||
)
|
||||
index_fetcher = spack.binary_distribution.DefaultIndexFetcher(url_and_version, None)
|
||||
result = index_fetcher.conditional_fetch()
|
||||
jsonschema.validate(json.loads(result.data), db_idx_schema)
|
||||
|
||||
# Now that index is regenerated, validate "buildcache list" output
|
||||
assert "patchelf" in buildcache_cmd("list", output=str)
|
||||
# Also test buildcache_spec schema
|
||||
for file_name in os.listdir(mirror_dir / "build_cache"):
|
||||
if file_name.endswith(".spec.json.sig"):
|
||||
with open(mirror_dir / "build_cache" / file_name, encoding="utf-8") as f:
|
||||
spec_dict = Spec.extract_json_from_clearsig(f.read())
|
||||
jsonschema.validate(spec_dict, specfile_schema)
|
||||
|
||||
logs_dir = scratch / "logs_dir"
|
||||
logs_dir.mkdir()
|
||||
@@ -1032,7 +1029,7 @@ def test_ci_generate_override_runner_attrs(
|
||||
|
||||
|
||||
def test_ci_rebuild_index(
|
||||
tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch
|
||||
tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch, capsys
|
||||
):
|
||||
scratch = tmp_path / "working_dir"
|
||||
mirror_dir = scratch / "mirror"
|
||||
@@ -1069,8 +1066,9 @@ def test_ci_rebuild_index(
|
||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f:
|
||||
jsonschema.validate(json.load(f), db_idx_schema)
|
||||
with capsys.disabled():
|
||||
output = buildcache_cmd("list", "--allarch")
|
||||
assert "callpath" in output
|
||||
|
||||
|
||||
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
|
||||
@@ -2030,13 +2028,12 @@ def test_ci_verify_versions_valid(
|
||||
tmpdir,
|
||||
):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
with spack.repo.use_repositories(repo):
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
||||
assert "Validated diff-test@2.1.5" in out
|
||||
assert "Validated diff-test@2.1.6" in out
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3])
|
||||
assert "Validated diff-test@2.1.5" in out
|
||||
assert "Validated diff-test@2.1.6" in out
|
||||
|
||||
|
||||
def test_ci_verify_versions_standard_invalid(
|
||||
@@ -2047,23 +2044,21 @@ def test_ci_verify_versions_standard_invalid(
|
||||
verify_git_versions_invalid,
|
||||
):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
with spack.repo.use_repositories(repo):
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
||||
assert "Invalid commit for diff-test@2.1.6" in out
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
|
||||
assert "Invalid checksum found diff-test@2.1.5" in out
|
||||
assert "Invalid commit for diff-test@2.1.6" in out
|
||||
|
||||
|
||||
def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes):
|
||||
repo, _, commits = mock_git_package_changes
|
||||
spack.repo.PATH.put_first(repo)
|
||||
with spack.repo.use_repositories(repo):
|
||||
monkeypatch.setattr(spack.repo, "builtin_repo", lambda: repo)
|
||||
|
||||
monkeypatch.setattr(spack.repo, "packages_path", mock_packages_path(repo.packages_path))
|
||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
||||
|
||||
pkg_class = spack.spec.Spec("diff-test").package_class
|
||||
monkeypatch.setattr(pkg_class, "manual_download", True)
|
||||
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-2])
|
||||
assert "Skipping manual download package: diff-test" in out
|
||||
out = ci_cmd("verify-versions", commits[-1], commits[-2])
|
||||
assert "Skipping manual download package: diff-test" in out
|
||||
|
||||
@@ -20,6 +20,8 @@
|
||||
config = spack.main.SpackCommand("config")
|
||||
env = spack.main.SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("mock_packages")
|
||||
|
||||
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
|
||||
@@ -1829,7 +1829,7 @@ def test_indirect_build_dep(tmp_path):
|
||||
build-only dep. Make sure this concrete DAG is preserved when writing the
|
||||
environment out and reading it back.
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None)])
|
||||
@@ -1862,7 +1862,7 @@ def test_store_different_build_deps(tmp_path):
|
||||
z1
|
||||
|
||||
"""
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
|
||||
builder = spack.repo.MockRepositoryBuilder(tmp_path)
|
||||
builder.add_package("z")
|
||||
builder.add_package("y", dependencies=[("z", "build", None)])
|
||||
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
|
||||
|
||||
@@ -448,7 +448,7 @@ def test_find_loaded(database, working_env):
|
||||
|
||||
|
||||
@pytest.mark.regression("37712")
|
||||
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path, mock_packages):
|
||||
"""Tests that having an active environment with a root spec containing a compiler constrained
|
||||
by a version range (i.e. @X.Y rather the single version than @=X.Y) doesn't result in an error
|
||||
when invoking "spack find".
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.util.executable
|
||||
import spack.util.gpg
|
||||
from spack.main import SpackCommand
|
||||
@@ -172,23 +173,25 @@ def test_gpg(tmpdir, mutable_config, mock_gnupghome):
|
||||
# Verification should now succeed again.
|
||||
gpg("verify", str(test_path))
|
||||
|
||||
relative_keys_path = bindist.buildcache_relative_keys_path()
|
||||
|
||||
# Publish the keys using a directory path
|
||||
test_path = tmpdir.join("dir_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
os.makedirs(f"{test_path}")
|
||||
gpg("publish", "--rebuild-index", "-d", str(test_path))
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
||||
# Publish the keys using a mirror url
|
||||
test_path = tmpdir.join("url_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
test_url = "file://%s" % test_path
|
||||
os.makedirs(f"{test_path}")
|
||||
test_url = f"file://{test_path}"
|
||||
gpg("publish", "--rebuild-index", "--mirror-url", test_url)
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
||||
# Publish the keys using a mirror name
|
||||
test_path = tmpdir.join("named_cache")
|
||||
os.makedirs("%s" % test_path)
|
||||
mirror_url = "file://%s" % test_path
|
||||
os.makedirs(f"{test_path}")
|
||||
mirror_url = f"file://{test_path}"
|
||||
mirror("add", "gpg", mirror_url)
|
||||
gpg("publish", "--rebuild-index", "-m", "gpg")
|
||||
assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
|
||||
assert os.path.exists(f"{test_path}/{relative_keys_path}/keys.manifest.json")
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
import spack.cmd.info
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
info = SpackCommand("info")
|
||||
|
||||
|
||||
@@ -31,15 +33,12 @@ def _print(*args, **kwargs):
|
||||
return buffer
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk", "gasnet", "warpx"]
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_it_just_runs(pkg, extra_args):
|
||||
info(pkg, *extra_args)
|
||||
def test_it_just_runs(extra_args):
|
||||
info("vtk-m", *extra_args)
|
||||
|
||||
|
||||
def test_info_noversion(mock_packages, print_buffer):
|
||||
def test_info_noversion(print_buffer):
|
||||
"""Check that a mock package with no versions outputs None."""
|
||||
info("noversion")
|
||||
|
||||
@@ -58,7 +57,7 @@ def test_info_noversion(mock_packages, print_buffer):
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query,expected", [("zlib", "False"), ("find-externals1", "True (version)")]
|
||||
)
|
||||
def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, print_buffer):
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
|
||||
args = parser.parse_args(["--detectable", pkg_query])
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
@@ -70,13 +69,7 @@ def test_is_externally_detectable(mock_packages, pkg_query, expected, parser, pr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"pkg_query",
|
||||
[
|
||||
"hdf5",
|
||||
"cloverleaf3d",
|
||||
"trilinos",
|
||||
"gcc", # This should ensure --test's c_names processing loop covered
|
||||
],
|
||||
"pkg_query", ["vtk-m", "gcc"] # This should ensure --test's c_names processing loop covered
|
||||
)
|
||||
@pytest.mark.parametrize("extra_args", [[], ["--variants-by-name"]])
|
||||
def test_info_fields(pkg_query, extra_args, parser, print_buffer):
|
||||
|
||||
@@ -6,16 +6,20 @@
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.main import SpackCommand
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
list = SpackCommand("list")
|
||||
|
||||
|
||||
def test_list():
|
||||
output = list()
|
||||
assert "cloverleaf3d" in output
|
||||
assert "bzip2" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
@@ -41,7 +45,7 @@ def test_list_cli_output_format(mock_tty_stdout):
|
||||
assert out == out_str
|
||||
|
||||
|
||||
def test_list_filter(mock_packages):
|
||||
def test_list_filter():
|
||||
output = list("py-*")
|
||||
assert "py-extension1" in output
|
||||
assert "py-extension2" in output
|
||||
@@ -57,18 +61,18 @@ def test_list_filter(mock_packages):
|
||||
assert "mpich" not in output
|
||||
|
||||
|
||||
def test_list_search_description(mock_packages):
|
||||
def test_list_search_description():
|
||||
output = list("--search-description", "one build dependency")
|
||||
assert "depb" in output
|
||||
|
||||
|
||||
def test_list_format_name_only(mock_packages):
|
||||
def test_list_format_name_only():
|
||||
output = list("--format", "name_only")
|
||||
assert "zmpi" in output
|
||||
assert "hdf5" in output
|
||||
|
||||
|
||||
def test_list_format_version_json(mock_packages):
|
||||
def test_list_format_version_json():
|
||||
output = list("--format", "version_json")
|
||||
assert '{"name": "zmpi",' in output
|
||||
assert '{"name": "dyninst",' in output
|
||||
@@ -77,7 +81,7 @@ def test_list_format_version_json(mock_packages):
|
||||
json.loads(output)
|
||||
|
||||
|
||||
def test_list_format_html(mock_packages):
|
||||
def test_list_format_html():
|
||||
output = list("--format", "html")
|
||||
assert '<div class="section" id="zmpi">' in output
|
||||
assert "<h1>zmpi" in output
|
||||
@@ -86,7 +90,7 @@ def test_list_format_html(mock_packages):
|
||||
assert "<h1>hdf5" in output
|
||||
|
||||
|
||||
def test_list_update(tmpdir, mock_packages):
|
||||
def test_list_update(tmpdir):
|
||||
update_file = tmpdir.join("output")
|
||||
|
||||
# not yet created when list is run
|
||||
@@ -113,7 +117,7 @@ def test_list_update(tmpdir, mock_packages):
|
||||
assert f.read() == "empty\n"
|
||||
|
||||
|
||||
def test_list_tags(mock_packages):
|
||||
def test_list_tags():
|
||||
output = list("--tag", "tag1")
|
||||
assert "mpich" in output
|
||||
assert "mpich2" in output
|
||||
@@ -127,7 +131,7 @@ def test_list_tags(mock_packages):
|
||||
assert "mpich2" in output
|
||||
|
||||
|
||||
def test_list_count(mock_packages):
|
||||
def test_list_count():
|
||||
output = list("--count")
|
||||
assert int(output.strip()) == len(spack.repo.all_package_names())
|
||||
|
||||
@@ -137,11 +141,10 @@ def test_list_count(mock_packages):
|
||||
)
|
||||
|
||||
|
||||
# def test_list_repos(mock_packages, builder_test_repository):
|
||||
def test_list_repos():
|
||||
with spack.repo.use_repositories(
|
||||
os.path.join(spack.paths.repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.repos_path, "builder.test"),
|
||||
os.path.join(spack.paths.test_repos_path, "builtin.mock"),
|
||||
os.path.join(spack.paths.test_repos_path, "builder.test"),
|
||||
):
|
||||
total_pkgs = len(list().strip().split())
|
||||
mock_pkgs = len(list("-r", "builtin.mock").strip().split())
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user