Compare commits
270 Commits
develop-20
...
packages/m
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c034e1ba9f | ||
![]() |
84b6266190 | ||
![]() |
94a42d786f | ||
![]() |
0e71ea51ce | ||
![]() |
b3cef1072d | ||
![]() |
e8ae9a403c | ||
![]() |
1a8ef161c8 | ||
![]() |
d3913938bc | ||
![]() |
4179880fe6 | ||
![]() |
125dd0368e | ||
![]() |
fd68f8916c | ||
![]() |
93e6f5fa4e | ||
![]() |
54acda3f11 | ||
![]() |
663e20fcc4 | ||
![]() |
6428132ebb | ||
![]() |
171958cf09 | ||
![]() |
0d0f7ab030 | ||
![]() |
35f8b43a54 | ||
![]() |
6f7eb3750c | ||
![]() |
2121eb31ba | ||
![]() |
c68d739825 | ||
![]() |
c468697b35 | ||
![]() |
c4094cf051 | ||
![]() |
9ff9ca61e6 | ||
![]() |
826e0c0405 | ||
![]() |
1b86a842ea | ||
![]() |
558a28bf52 | ||
![]() |
411576e1fa | ||
![]() |
cab4f92960 | ||
![]() |
c6c13f6782 | ||
![]() |
cf11fab5ad | ||
![]() |
1d8b35c840 | ||
![]() |
5dc46a976d | ||
![]() |
05f5596cdd | ||
![]() |
6942c7f35b | ||
![]() |
18f0ac0f94 | ||
![]() |
d9196ee3f8 | ||
![]() |
ef0bb6fe6b | ||
![]() |
3fed320013 | ||
![]() |
1aa77e695d | ||
![]() |
3a0efeecf1 | ||
![]() |
5ffb5657c9 | ||
![]() |
2b3e7fd10a | ||
![]() |
cb315e18f0 | ||
![]() |
10c637aca0 | ||
![]() |
fb4e1cad45 | ||
![]() |
3054b71e2e | ||
![]() |
47163f7435 | ||
![]() |
e322a8382f | ||
![]() |
53fb4795ca | ||
![]() |
4517c7fa9b | ||
![]() |
efaed17f91 | ||
![]() |
2c17cd365d | ||
![]() |
dfe537f688 | ||
![]() |
be0002b460 | ||
![]() |
743ee5f3de | ||
![]() |
b6caf0156f | ||
![]() |
ec00ffc244 | ||
![]() |
f020256b9f | ||
![]() |
04377e39e0 | ||
![]() |
ba2703fea6 | ||
![]() |
92b1c8f763 | ||
![]() |
2b29ecd9b6 | ||
![]() |
5b43bf1b58 | ||
![]() |
37d9770e02 | ||
![]() |
0e016ba6f5 | ||
![]() |
7afa949da1 | ||
![]() |
b81d7d0aac | ||
![]() |
e78484f501 | ||
![]() |
6fd43b4e75 | ||
![]() |
14edb55288 | ||
![]() |
f062f1c5b3 | ||
![]() |
7756c8f4fc | ||
![]() |
69c8a9e4ba | ||
![]() |
47c0736952 | ||
![]() |
8b89287084 | ||
![]() |
8bd6283b52 | ||
![]() |
179e4f3ad1 | ||
![]() |
e97787691b | ||
![]() |
5932ee901c | ||
![]() |
3bdebeba3c | ||
![]() |
d390ee1902 | ||
![]() |
4f9fe6f9bf | ||
![]() |
df6d6d9b5c | ||
![]() |
e57d33b29f | ||
![]() |
85c6d6dbab | ||
![]() |
5f9228746e | ||
![]() |
9f2451ddff | ||
![]() |
a05eb11b7b | ||
![]() |
ae2d0ff1cd | ||
![]() |
7e906ced75 | ||
![]() |
647e89f6bc | ||
![]() |
3239c29fb0 | ||
![]() |
abced0e87d | ||
![]() |
300fc2ee42 | ||
![]() |
13c4258e54 | ||
![]() |
f29cb7f953 | ||
![]() |
826b8f25c5 | ||
![]() |
ebaeea7820 | ||
![]() |
f76eb993aa | ||
![]() |
0b2c370a83 | ||
![]() |
6a9ee480bf | ||
![]() |
cc80d52b62 | ||
![]() |
b9c7d3b89b | ||
![]() |
c1be6a5483 | ||
![]() |
42550208c3 | ||
![]() |
be231face6 | ||
![]() |
89ac747a76 | ||
![]() |
5d8f36d667 | ||
![]() |
6c3fed351f | ||
![]() |
b9cbd15674 | ||
![]() |
b8f633246a | ||
![]() |
a2f3e98ab9 | ||
![]() |
acffe37313 | ||
![]() |
249e5415e8 | ||
![]() |
e2a942d07e | ||
![]() |
32deca2a4c | ||
![]() |
e4c64865f1 | ||
![]() |
1175f37577 | ||
![]() |
faa183331f | ||
![]() |
bbac33871c | ||
![]() |
6d4dd33c46 | ||
![]() |
579bad05a8 | ||
![]() |
27a8eb0f68 | ||
![]() |
4cd993070f | ||
![]() |
4c55c6a268 | ||
![]() |
a4a27fb1e4 | ||
![]() |
66345e7185 | ||
![]() |
8f76f1b0d8 | ||
![]() |
4cab6f3af5 | ||
![]() |
0d4665583b | ||
![]() |
5d0ef9e4f4 | ||
![]() |
e145baf619 | ||
![]() |
6c912b30a2 | ||
![]() |
f4da453f6b | ||
![]() |
7e9caed8c2 | ||
![]() |
69509a6d9a | ||
![]() |
0841050d20 | ||
![]() |
321ffd732b | ||
![]() |
22922323e3 | ||
![]() |
0b5b192c18 | ||
![]() |
1275c57d88 | ||
![]() |
29a39ac6a0 | ||
![]() |
ae9c86a930 | ||
![]() |
83199a981d | ||
![]() |
ed40c3210e | ||
![]() |
be96460ab2 | ||
![]() |
95caf55fe7 | ||
![]() |
960af24270 | ||
![]() |
899bef2aa8 | ||
![]() |
f0f092d9f1 | ||
![]() |
6eaac2270d | ||
![]() |
a9f3f6c007 | ||
![]() |
08a04ebd46 | ||
![]() |
d8e642ecb7 | ||
![]() |
669ed69d8e | ||
![]() |
7ebb21a0da | ||
![]() |
93ffa9ba5d | ||
![]() |
e5fdb90496 | ||
![]() |
303a0b3653 | ||
![]() |
9f07544bde | ||
![]() |
9b046a39a8 | ||
![]() |
0c9a53ba3a | ||
![]() |
1fd4353289 | ||
![]() |
fcb8ed6409 | ||
![]() |
2f11862832 | ||
![]() |
bff11ce8e7 | ||
![]() |
218693431c | ||
![]() |
e036cd9ef6 | ||
![]() |
cd5bef6780 | ||
![]() |
159e9a20d1 | ||
![]() |
99bb288db7 | ||
![]() |
99744a766b | ||
![]() |
ddd8be51a0 | ||
![]() |
bba66b1063 | ||
![]() |
1c3c21d9c7 | ||
![]() |
cbe9b3d01c | ||
![]() |
0abf5ba43c | ||
![]() |
9ab3c1332b | ||
![]() |
b6425da50f | ||
![]() |
937a4dbf69 | ||
![]() |
cd779ee54d | ||
![]() |
7ddcb13325 | ||
![]() |
7666046ce3 | ||
![]() |
8e89e61402 | ||
![]() |
d0dbfaa5d6 | ||
![]() |
26f562b5a7 | ||
![]() |
2967804da1 | ||
![]() |
c3eaf4d6cf | ||
![]() |
397334a4be | ||
![]() |
434836be81 | ||
![]() |
7b9b976f40 | ||
![]() |
4746e8a048 | ||
![]() |
69c684fef9 | ||
![]() |
2314aeb884 | ||
![]() |
d33e10a695 | ||
![]() |
7668a0889a | ||
![]() |
d7a74bde9f | ||
![]() |
fedf8128ae | ||
![]() |
f70af2cc57 | ||
![]() |
50562e6a0e | ||
![]() |
4ac51b2127 | ||
![]() |
81c9e346dc | ||
![]() |
73e16a7881 | ||
![]() |
af8868fa47 | ||
![]() |
cfd4e356f8 | ||
![]() |
fc87dcad4c | ||
![]() |
65472159c7 | ||
![]() |
d1f9d8f06d | ||
![]() |
67ac9c46a8 | ||
![]() |
aa39465188 | ||
![]() |
09810a5e7c | ||
![]() |
446c0f2325 | ||
![]() |
c4ce51c9be | ||
![]() |
1f63a764ac | ||
![]() |
384e198304 | ||
![]() |
2303332415 | ||
![]() |
0eb1957999 | ||
![]() |
de1f9593c6 | ||
![]() |
65fa71c1b4 | ||
![]() |
9802649716 | ||
![]() |
8d9d721f07 | ||
![]() |
ecef72c471 | ||
![]() |
485b6e2170 | ||
![]() |
ba02c6b70f | ||
![]() |
7028669d50 | ||
![]() |
2f0a73f7ef | ||
![]() |
7cb0dbf77a | ||
![]() |
ac8800ffc7 | ||
![]() |
eb11fa7d18 | ||
![]() |
4d8381a775 | ||
![]() |
de5e20fc21 | ||
![]() |
c33af49ed5 | ||
![]() |
3addda6c4d | ||
![]() |
33f6f55d6b | ||
![]() |
41d20d3731 | ||
![]() |
dde8fa5561 | ||
![]() |
588a94bc8c | ||
![]() |
06392f2c01 | ||
![]() |
f16e29559e | ||
![]() |
ea96403157 | ||
![]() |
b659eac453 | ||
![]() |
ab590cc03a | ||
![]() |
1a007a842b | ||
![]() |
9756354998 | ||
![]() |
3984dd750c | ||
![]() |
d5c1e16e43 | ||
![]() |
56ace9a087 | ||
![]() |
6e0bab1706 | ||
![]() |
193386f6ac | ||
![]() |
755131fcdf | ||
![]() |
9a71733adb | ||
![]() |
cd919d51ea | ||
![]() |
12adf66d07 | ||
![]() |
c02f58da8f | ||
![]() |
9662d181a0 | ||
![]() |
282df7aecc | ||
![]() |
b4c0e6f03b | ||
![]() |
4cd8488139 | ||
![]() |
69a052841c | ||
![]() |
a3f39890c2 | ||
![]() |
02d126ce2b | ||
![]() |
339a63370f | ||
![]() |
fef6aed627 | ||
![]() |
3445da807e | ||
![]() |
429c3598af | ||
![]() |
3d8136493a | ||
![]() |
8cd160db85 | ||
![]() |
a7dd756b34 | ||
![]() |
53be280681 |
4
.devcontainer/devcontainer.json
Normal file
4
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
20
.devcontainer/postCreateCommand.sh
Executable file
20
.devcontainer/postCreateCommand.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Load spack environment at terminal startup
|
||||
cat <<EOF >> /root/.bashrc
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
EOF
|
||||
|
||||
# Load spack environment in this script
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
|
||||
# Ensure generic targets for maximum matching with buildcaches
|
||||
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
|
||||
spack config --scope site add "concretizer:targets:granularity:generic"
|
||||
|
||||
# Find compiler and install gcc-runtime
|
||||
spack compiler find --scope site
|
||||
|
||||
# Setup buildcaches
|
||||
spack mirror add --scope site develop https://binaries.spack.io/develop
|
||||
spack buildcache keys --install --trust
|
8
.github/workflows/audit.yaml
vendored
8
.github/workflows/audit.yaml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -43,7 +43,9 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
|
||||
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@af5a7ed5ba88268d5278f7203fb52cd833f66d6e
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
@@ -35,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -70,14 +71,17 @@ jobs:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
37
.github/workflows/unit_tests.yaml
vendored
37
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,17 +91,19 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -122,9 +124,11 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -137,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -181,20 +185,23 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -216,6 +223,8 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
7
.github/workflows/valid-style.yml
vendored
7
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -56,6 +56,7 @@ jobs:
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
@@ -69,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
14
.github/workflows/windows_python.yml
vendored
14
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,16 +33,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,16 +59,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -42,3 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
@@ -101,6 +101,12 @@ config:
|
||||
verify_ssl: true
|
||||
|
||||
|
||||
# This is where custom certs for proxy/firewall are stored.
|
||||
# It can be a path or environment variable. To match ssl env configuration
|
||||
# the default is the environment variable SSL_CERT_FILE
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
|
||||
|
||||
# Suppress gpg warnings from binary package verification
|
||||
# Only suppresses warnings, gpg failure will still fail the install
|
||||
# Potential rationale to set True: users have already explicitly trusted the
|
||||
|
@@ -24,6 +24,7 @@ packages:
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
@@ -34,7 +35,9 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1119,6 +1119,9 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Versions are ordered lexicograpically by its components. For more details
|
||||
on the order, see :ref:`the packaging guide <version-comparison>`.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
|
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator("ninja")
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
|
@@ -145,6 +145,22 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||
this can expose you to attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to a file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
--------------------
|
||||
|
@@ -893,26 +893,50 @@ as an option to the ``version()`` directive. Example situations would be a
|
||||
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
||||
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
||||
|
||||
|
||||
.. _version-comparison:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Version comparison
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack imposes a generic total ordering on the set of versions,
|
||||
independently from the package they are associated with.
|
||||
|
||||
Most Spack versions are numeric, a tuple of integers; for example,
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
|
||||
numeric versions.
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
|
||||
comparison is lexicographical on the numeric components:
|
||||
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
|
||||
|
||||
Some Spack versions involve slight extensions of numeric syntax; for
|
||||
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
Spack can also supports string components such as ``1.1.1a`` and
|
||||
``1.y.0``. String components are considered less than numeric
|
||||
components, so ``1.y.0 < 1.0``. This is for consistency with
|
||||
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
|
||||
components do not have to be separated by dots or any other delimiter.
|
||||
So, the contrived version ``1y0`` is identical to ``1.y.0``.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``develop``, ``master``, ``local``.
|
||||
Pre-release suffixes also contain string parts, but they are handled
|
||||
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
|
||||
of the version ``1.2.3``. This allows Spack to order it before the
|
||||
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
|
||||
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
|
||||
suffix not recognized as a pre-release is treated as an ordinary
|
||||
string component, so ``1.2 < 1.2-mysuffix``.
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
Finally, there are a few special string components that are considered
|
||||
"infinity versions". They include ``develop``, ``main``, ``master``,
|
||||
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
|
||||
These are useful for specifying the most recent development version of
|
||||
a package (often a moving target like a git branch), without assigning
|
||||
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
|
||||
|
||||
More formally, the order on versions is defined as follows. A version
|
||||
string is split into a list of components based on delimiters such as
|
||||
``.`` and ``-`` and string boundaries. The components are split into
|
||||
the **release** and a possible **pre-release** (if the last component
|
||||
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
|
||||
The release components are ordered lexicographically, with comparsion
|
||||
between different types of components as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
@@ -925,6 +949,9 @@ as follows:
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
|
||||
Finally, if the release components are equal, the pre-release components
|
||||
are used to break the tie, in the obvious way.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
#. Non-numeric versions are usually used for special cases while
|
||||
|
5
lib/spack/env/cc
vendored
5
lib/spack/env/cc
vendored
@@ -248,7 +248,7 @@ case "$command" in
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -526,7 +526,7 @@ categorize_arguments() {
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
@@ -913,4 +913,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
@@ -357,10 +357,8 @@ def strip_version_suffixes(path_or_url: str) -> str:
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
# PyPI wheels
|
||||
r"-(?:py|cp)[23].*",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
@@ -403,7 +401,7 @@ def expand_contracted_extension_in_path(
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
for ext in EXTENSIONS:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@@ -111,10 +110,6 @@ def __init__(self, errors):
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex:
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -541,83 +536,6 @@ def binary_index_location():
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
@@ -992,15 +910,10 @@ def url_read_method(url):
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
# If we got some kind of S3 (access denied or other connection error), the first non
|
||||
# boto-specific class in the exception is Exception. Just print a warning and return
|
||||
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
@@ -1047,11 +960,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
"""
|
||||
try:
|
||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||
except ListMirrorSpecsError as err:
|
||||
tty.error("Unable to generate package index, {0}".format(err))
|
||||
return
|
||||
except ListMirrorSpecsError as e:
|
||||
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
@@ -1061,27 +973,22 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug("\n" + traceback.format_exc())
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing package index to {cache_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def generate_key_index(key_prefix, tmpdir=None):
|
||||
"""Create the key index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
key_prefix. This page contains an entry for each key (.pub) under
|
||||
key_prefix.
|
||||
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
|
||||
contains an entry for each key (.pub) under key_prefix.
|
||||
"""
|
||||
|
||||
tty.debug(
|
||||
" ".join(
|
||||
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
|
||||
)
|
||||
)
|
||||
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
|
||||
|
||||
try:
|
||||
fingerprints = (
|
||||
@@ -1089,17 +996,8 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
msg = "No keys at {0}: {1}".format(key_prefix, inst)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as e:
|
||||
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
|
||||
|
||||
remove_tmpdir = False
|
||||
|
||||
@@ -1124,12 +1022,13 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing key index to {key_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
if remove_tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
@@ -1200,7 +1099,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
|
||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||
spec.json file already exist in the buildcache.
|
||||
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
|
||||
when the tarball or spec.json file cannot be pushed to the buildcache.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
@@ -1278,13 +1178,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
key = select_signing_key(options.key)
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
try:
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
except Exception as e:
|
||||
raise PushToBuildCacheError(
|
||||
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
|
||||
) from e
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
@@ -1296,8 +1201,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
if options.regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
@@ -1352,28 +1255,6 @@ def specs_to_be_packaged(
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
"""Create and push binary package for a single spec to the specified
|
||||
mirror url.
|
||||
|
||||
Args:
|
||||
spec: Spec to package and push
|
||||
mirror_url: Desired destination url for binary package
|
||||
options:
|
||||
|
||||
Returns:
|
||||
True if package was pushed, False otherwise.
|
||||
|
||||
"""
|
||||
try:
|
||||
push_or_raise(spec, mirror_url, options)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
@@ -2706,3 +2587,96 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class GenerateIndexError(spack.error.SpackError):
|
||||
"""Raised when unable to generate key or package index for mirror"""
|
||||
|
||||
|
||||
class CannotListKeys(GenerateIndexError):
|
||||
"""Raised when unable to list keys when generating key index"""
|
||||
|
||||
|
||||
class PushToBuildCacheError(spack.error.SpackError):
|
||||
"""Raised when unable to push objects to binary mirror"""
|
||||
|
@@ -789,7 +789,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target.module_name:
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
@@ -434,11 +434,6 @@ def _do_patch_libtool(self):
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -15,6 +16,12 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def spec_uses_toolchain(spec):
|
||||
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
|
||||
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
|
||||
return using_toolchain
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
@@ -132,6 +139,11 @@ def initconfig_compiler_entries(self):
|
||||
"endif()\n",
|
||||
]
|
||||
|
||||
# We defined hipcc as top-level compiler for packages when +rocm.
|
||||
# This avoid problems coming from rocm flags being applied to another compiler.
|
||||
if "+rocm" in spec:
|
||||
entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc))
|
||||
|
||||
flags = spec.compiler_flags
|
||||
|
||||
# use global spack compiler flags
|
||||
@@ -213,7 +225,7 @@ def initconfig_mpi_entries(self):
|
||||
else:
|
||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||
# vs the older versions which expect MPIEXEC
|
||||
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
||||
if spec["cmake"].satisfies("@3.10:"):
|
||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||
else:
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
@@ -248,12 +260,17 @@ def initconfig_hardware_entries(self):
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
# CUDA_FLAGS
|
||||
cuda_flags = []
|
||||
|
||||
if not spec.satisfies("cuda_arch=none"):
|
||||
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
|
||||
|
||||
if spec_uses_toolchain(spec):
|
||||
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
|
||||
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
@@ -262,9 +279,6 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
@@ -277,11 +291,9 @@ def initconfig_hardware_entries(self):
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.directives import conflicts, license, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -26,6 +26,7 @@ class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
|
||||
homepage = "https://software.intel.com/oneapi"
|
||||
license("https://intel.ly/393CijO")
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
|
@@ -75,6 +75,8 @@
|
||||
# does not like its directory structure.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
@@ -154,6 +156,32 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
# ASAN
|
||||
@staticmethod
|
||||
def asan_on(env, llvm_path):
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, dirs, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
try:
|
||||
# This will throw an error if imported on a non-Linux platform.
|
||||
import distro
|
||||
|
||||
distname = distro.id()
|
||||
except ImportError:
|
||||
distname = "unknown"
|
||||
if "rhel" in distname or "sles" in distname:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
|
||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
||||
# TODO: add a bunch of lines like:
|
||||
|
@@ -70,7 +70,7 @@
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
)
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
spack_compiler = spack.main.SpackCommand("compiler")
|
||||
|
||||
@@ -103,7 +103,7 @@ def get_job_name(spec: spack.spec.Spec, build_group: str = ""):
|
||||
job_name = spec.format(JOB_NAME_FORMAT)
|
||||
|
||||
if build_group:
|
||||
job_name = "{0} {1}".format(job_name, build_group)
|
||||
job_name = f"{job_name} {build_group}"
|
||||
|
||||
return job_name[:255]
|
||||
|
||||
@@ -114,7 +114,7 @@ def _remove_reserved_tags(tags):
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
||||
return f"{s.name}/{s.dag_hash(7)}"
|
||||
|
||||
|
||||
def _add_dependency(spec_label, dep_label, deps):
|
||||
@@ -213,7 +213,7 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(" {0}".format(m.fetch_url))
|
||||
tty.msg(f" {m.fetch_url}")
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
@@ -296,7 +296,7 @@ def append_dep(s, d):
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
tty.msg(f"Will not stage external pkg: {s}")
|
||||
continue
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
@@ -305,7 +305,7 @@ def append_dep(s, d):
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg("Will not stage external dep: {0}".format(d))
|
||||
tty.msg(f"Will not stage external dep: {d}")
|
||||
continue
|
||||
|
||||
append_dep(skey, dkey)
|
||||
@@ -374,8 +374,8 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||
|
||||
for path in lines:
|
||||
if ".gitlab-ci.yml" in path or path in env_path:
|
||||
tty.debug("env represented by {0} changed".format(env_path))
|
||||
tty.debug("touched file: {0}".format(path))
|
||||
tty.debug(f"env represented by {env_path} changed")
|
||||
tty.debug(f"touched file: {path}")
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -419,7 +419,7 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||
all_concrete_specs = env.all_specs()
|
||||
tty.debug("All concrete environment specs:")
|
||||
for s in all_concrete_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
tty.debug(f" {s.name}/{s.dag_hash()[:7]}")
|
||||
affected_pkgs = frozenset(affected_pkgs)
|
||||
env_matches = [s for s in all_concrete_specs if s.name in affected_pkgs]
|
||||
visited = set()
|
||||
@@ -510,7 +510,7 @@ def __is_named(self, section):
|
||||
and if so return the name otherwise return none.
|
||||
"""
|
||||
for _name in self.named_jobs:
|
||||
keys = ["{0}-job".format(_name), "{0}-job-remove".format(_name)]
|
||||
keys = [f"{_name}-job", f"{_name}-job-remove"]
|
||||
if any([key for key in keys if key in section]):
|
||||
return _name
|
||||
|
||||
@@ -525,9 +525,9 @@ def __job_name(name, suffix=""):
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
jname = "{0}-job{1}".format(name, suffix)
|
||||
jname = f"{name}-job{suffix}"
|
||||
else:
|
||||
jname = "{0}-job".format(name)
|
||||
jname = f"{name}-job"
|
||||
|
||||
return jname
|
||||
|
||||
@@ -739,7 +739,7 @@ def generate_gitlab_ci_yaml(
|
||||
# Requested to prune untouched packages, but assume we won't do that
|
||||
# unless we're actually in a git repo.
|
||||
rev1, rev2 = get_change_revisions()
|
||||
tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
|
||||
tty.debug(f"Got following revisions: rev1={rev1}, rev2={rev2}")
|
||||
if rev1 and rev2:
|
||||
# If the stack file itself did not change, proceed with pruning
|
||||
if not get_stack_changed(env.manifest_path, rev1, rev2):
|
||||
@@ -747,13 +747,13 @@ def generate_gitlab_ci_yaml(
|
||||
affected_pkgs = compute_affected_packages(rev1, rev2)
|
||||
tty.debug("affected pkgs:")
|
||||
for p in affected_pkgs:
|
||||
tty.debug(" {0}".format(p))
|
||||
tty.debug(f" {p}")
|
||||
affected_specs = get_spec_filter_list(
|
||||
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||
)
|
||||
tty.debug("all affected specs:")
|
||||
for s in affected_specs:
|
||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||
tty.debug(f" {s.name}/{s.dag_hash()[:7]}")
|
||||
|
||||
# Allow overriding --prune-dag cli opt with environment variable
|
||||
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
||||
@@ -978,7 +978,7 @@ def generate_gitlab_ci_yaml(
|
||||
rebuild_decisions = {}
|
||||
|
||||
for stage_jobs in stages:
|
||||
stage_name = "stage-{0}".format(stage_id)
|
||||
stage_name = f"stage-{stage_id}"
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
|
||||
@@ -1009,7 +1009,7 @@ def generate_gitlab_ci_yaml(
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not job_object:
|
||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||
tty.warn(f"No match found for {release_spec}, skipping it")
|
||||
continue
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
@@ -1119,7 +1119,7 @@ def main_script_replacements(cmd):
|
||||
|
||||
if artifacts_root:
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
{"job": generate_job_name, "pipeline": f"{parent_pipeline_id}"}
|
||||
)
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
@@ -1185,19 +1185,17 @@ def main_script_replacements(cmd):
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||
|
||||
if job_id > 0:
|
||||
tty.debug(
|
||||
"The max_needs_job is {0}, with {1} needs".format(max_needs_job, max_length_needs)
|
||||
)
|
||||
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
|
||||
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn("Problem populating buildgroup: {0}".format(err))
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
||||
@@ -1211,9 +1209,7 @@ def main_script_replacements(cmd):
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
if artifacts_root:
|
||||
sync_job["needs"] = [
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
]
|
||||
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{parent_pipeline_id}"}]
|
||||
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
@@ -1230,6 +1226,7 @@ def main_script_replacements(cmd):
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
sync_job["dependencies"] = []
|
||||
|
||||
output_object["copy"] = sync_job
|
||||
job_id += 1
|
||||
@@ -1348,7 +1345,7 @@ def main_script_replacements(cmd):
|
||||
|
||||
copy_specs_file = os.path.join(
|
||||
copy_specs_dir,
|
||||
"copy_{}_specs.json".format(spack_stack_name if spack_stack_name else "rebuilt"),
|
||||
f"copy_{spack_stack_name if spack_stack_name else 'rebuilt'}_specs.json",
|
||||
)
|
||||
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
@@ -1440,7 +1437,7 @@ def import_signing_key(base64_signing_key):
|
||||
fd.write(decoded_key)
|
||||
|
||||
key_import_output = spack_gpg("trust", sign_key_path, output=str)
|
||||
tty.debug("spack gpg trust {0}".format(sign_key_path))
|
||||
tty.debug(f"spack gpg trust {sign_key_path}")
|
||||
tty.debug(key_import_output)
|
||||
|
||||
# Now print the keys we have for verifying and signing
|
||||
@@ -1466,45 +1463,39 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
|
||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||
return bindist.push(input_spec, push_url, bindist.PushOptions(force=True, unsigned=unsigned))
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
spack.mirror.Mirror.from_url(mirror_url).push_url,
|
||||
bindist.PushOptions(force=True, unsigned=not sign_binaries),
|
||||
)
|
||||
|
||||
|
||||
def push_mirror_contents(input_spec: spack.spec.Spec, mirror_url, sign_binaries):
|
||||
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
|
||||
"""Push one or more binary packages to the mirror.
|
||||
|
||||
Arguments:
|
||||
|
||||
input_spec(spack.spec.Spec): Installed spec to push
|
||||
mirror_url (str): Base url of target mirror
|
||||
sign_binaries (bool): If True, spack will attempt to sign binary
|
||||
package before pushing.
|
||||
spec: Installed spec to push
|
||||
mirror_url: URL of target mirror
|
||||
sign_binaries: If True, spack will attempt to sign binary package before pushing.
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
try:
|
||||
return _push_mirror_contents(input_spec, sign_binaries, mirror_url)
|
||||
except Exception as inst:
|
||||
# If the mirror we're pushing to is on S3 and there's some
|
||||
# permissions problem, for example, we can't just target
|
||||
# that exception type here, since users of the
|
||||
# `spack ci rebuild' may not need or want any dependency
|
||||
# on boto3. So we use the first non-boto exception type
|
||||
# in the heirarchy:
|
||||
# boto3.exceptions.S3UploadFailedError
|
||||
# boto3.exceptions.Boto3Error
|
||||
# Exception
|
||||
# BaseException
|
||||
# object
|
||||
err_msg = "Error msg: {0}".format(inst)
|
||||
if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.msg("Permission problem writing to {0}".format(mirror_url))
|
||||
tty.msg(err_msg)
|
||||
_push_to_build_cache(spec, sign_binaries, mirror_url)
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(str(e))
|
||||
return False
|
||||
except Exception as e:
|
||||
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
|
||||
# exception instead of parsing str(e)...
|
||||
msg = str(e)
|
||||
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
|
||||
return False
|
||||
else:
|
||||
raise inst
|
||||
raise
|
||||
|
||||
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
@@ -1531,8 +1522,9 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||
src, artifacts_dir, str(err)
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
@@ -1548,23 +1540,23 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_spec: spec associated with spack install log
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug("job spec: {0}".format(job_spec))
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
if not job_spec:
|
||||
msg = "Cannot copy stage logs: job spec ({0}) is required"
|
||||
tty.error(msg.format(job_spec))
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
tty.debug(f"job package: {job_pkg}")
|
||||
except AssertionError:
|
||||
msg = "Cannot copy stage logs: job spec ({0}) must be concrete"
|
||||
tty.error(msg.format(job_spec))
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug("stage dir: {0}".format(stage_dir))
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
|
||||
@@ -1577,10 +1569,10 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
test_stage (str): test stage path
|
||||
job_test_dir (str): the destination artifacts test directory
|
||||
"""
|
||||
tty.debug("test stage: {0}".format(test_stage))
|
||||
tty.debug(f"test stage: {test_stage}")
|
||||
if not os.path.exists(test_stage):
|
||||
msg = "Cannot copy test logs: job test stage ({0}) does not exist"
|
||||
tty.error(msg.format(test_stage))
|
||||
msg = f"Cannot copy test logs: job test stage ({test_stage}) does not exist"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
@@ -1595,7 +1587,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
url (str): Complete url to artifacts.zip file
|
||||
work_dir (str): Path to destination where artifacts should be extracted
|
||||
"""
|
||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
||||
tty.msg(f"Fetching artifacts from: {url}\n")
|
||||
|
||||
headers = {"Content-Type": "application/zip"}
|
||||
|
||||
@@ -1612,7 +1604,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = "Error response code ({0}) in reproduce_ci_job".format(response_code)
|
||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||
raise SpackError(msg)
|
||||
|
||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||
@@ -1642,7 +1634,7 @@ def get_spack_info():
|
||||
|
||||
return git_log
|
||||
|
||||
return "no git repo, use spack {0}".format(spack.spack_version)
|
||||
return f"no git repo, use spack {spack.spack_version}"
|
||||
|
||||
|
||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
@@ -1665,8 +1657,8 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
"""
|
||||
# figure out the path to the spack git version being used for the
|
||||
# reproduction
|
||||
print("checkout_commit: {0}".format(checkout_commit))
|
||||
print("merge_commit: {0}".format(merge_commit))
|
||||
print(f"checkout_commit: {checkout_commit}")
|
||||
print(f"merge_commit: {merge_commit}")
|
||||
|
||||
dot_git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if not os.path.exists(dot_git_path):
|
||||
@@ -1685,14 +1677,14 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
git("log", "-1", checkout_commit, output=str, error=os.devnull, fail_on_error=False)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error("Missing commit: {0}".format(checkout_commit))
|
||||
tty.error(f"Missing commit: {checkout_commit}")
|
||||
return False
|
||||
|
||||
if merge_commit:
|
||||
git("log", "-1", merge_commit, output=str, error=os.devnull, fail_on_error=False)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error("Missing commit: {0}".format(merge_commit))
|
||||
tty.error(f"Missing commit: {merge_commit}")
|
||||
return False
|
||||
|
||||
# Next attempt to clone your local spack repo into the repro dir
|
||||
@@ -1715,7 +1707,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error("Unable to checkout {0}".format(checkout_commit))
|
||||
tty.error(f"Unable to checkout {checkout_commit}")
|
||||
tty.msg(co_out)
|
||||
return False
|
||||
|
||||
@@ -1734,7 +1726,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error("Unable to merge {0}".format(merge_commit))
|
||||
tty.error(f"Unable to merge {merge_commit}")
|
||||
tty.msg(merge_out)
|
||||
return False
|
||||
|
||||
@@ -1755,6 +1747,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
@@ -1765,13 +1758,13 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
tty.debug("Found lock file in: {0}".format(repro_lock_dir))
|
||||
tty.debug(f"Found lock file in: {repro_lock_dir}")
|
||||
|
||||
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
||||
|
||||
tty.debug("yaml files:")
|
||||
for yaml_file in yaml_files:
|
||||
tty.debug(" {0}".format(yaml_file))
|
||||
tty.debug(f" {yaml_file}")
|
||||
|
||||
pipeline_yaml = None
|
||||
|
||||
@@ -1786,10 +1779,10 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
pipeline_yaml = yaml_obj
|
||||
|
||||
if pipeline_yaml:
|
||||
tty.debug("\n{0} is likely your pipeline file".format(yf))
|
||||
tty.debug(f"\n{yf} is likely your pipeline file")
|
||||
|
||||
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
|
||||
tty.debug("Relative environment path used by cloud job: {0}".format(relative_concrete_env_dir))
|
||||
tty.debug(f"Relative environment path used by cloud job: {relative_concrete_env_dir}")
|
||||
|
||||
# Using the relative concrete environment path found in the generated
|
||||
# pipeline variable above, copy the spack environment files so they'll
|
||||
@@ -1803,10 +1796,11 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
shutil.copyfile(orig_yaml_path, copy_yaml_path)
|
||||
|
||||
# Find the install script in the unzipped artifacts and make it executable
|
||||
install_script = fs.find(work_dir, "install.sh")[0]
|
||||
st = os.stat(install_script)
|
||||
os.chmod(install_script, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
install_script = fs.find(work_dir, f"install.{platform_script_ext}")[0]
|
||||
if not IS_WINDOWS:
|
||||
# pointless on Windows
|
||||
st = os.stat(install_script)
|
||||
os.chmod(install_script, st.st_mode | stat.S_IEXEC)
|
||||
# Find the repro details file. This just includes some values we wrote
|
||||
# during `spack ci rebuild` to make reproduction easier. E.g. the job
|
||||
# name is written here so we can easily find the configuration of the
|
||||
@@ -1844,7 +1838,7 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
job_image = job_image_elt["name"]
|
||||
else:
|
||||
job_image = job_image_elt
|
||||
tty.msg("Job ran with the following image: {0}".format(job_image))
|
||||
tty.msg(f"Job ran with the following image: {job_image}")
|
||||
|
||||
# Because we found this job was run with a docker image, so we will try
|
||||
# to print a "docker run" command that bind-mounts the directory where
|
||||
@@ -1919,65 +1913,75 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
job_tags = None
|
||||
if "tags" in job_yaml:
|
||||
job_tags = job_yaml["tags"]
|
||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
||||
tty.msg(f"Job ran with the following tags: {job_tags}")
|
||||
|
||||
entrypoint_script = [
|
||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
||||
[
|
||||
".",
|
||||
os.path.join(
|
||||
mount_as_dir if job_image else work_dir,
|
||||
f"share/spack/setup-env.{platform_script_ext}",
|
||||
),
|
||||
],
|
||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
||||
[
|
||||
(
|
||||
os.path.join(mounted_repro_dir, f"install.{platform_script_ext}")
|
||||
if job_image
|
||||
else install_script
|
||||
)
|
||||
],
|
||||
]
|
||||
|
||||
entry_script = os.path.join(mounted_workdir, f"entrypoint.{platform_script_ext}")
|
||||
inst_list = []
|
||||
# Finally, print out some instructions to reproduce the build
|
||||
if job_image:
|
||||
# Allow interactive
|
||||
entrypoint_script.extend(
|
||||
[
|
||||
[
|
||||
"echo",
|
||||
"Re-run install script using:\n\t{0}".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh")
|
||||
if job_image
|
||||
else install_script
|
||||
),
|
||||
],
|
||||
# Allow interactive
|
||||
["exec", "$@"],
|
||||
]
|
||||
install_mechanism = (
|
||||
os.path.join(mounted_repro_dir, f"install.{platform_script_ext}")
|
||||
if job_image
|
||||
else install_script
|
||||
)
|
||||
entrypoint_script.append(["echo", f"Re-run install script using:\n\t{install_mechanism}"])
|
||||
# Allow interactive
|
||||
if IS_WINDOWS:
|
||||
entrypoint_script.append(["&", "($args -Join ' ')", "-NoExit"])
|
||||
else:
|
||||
entrypoint_script.append(["exec", "$@"])
|
||||
|
||||
process_command(
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
docker_command = [
|
||||
[
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
||||
job_image,
|
||||
"bash",
|
||||
]
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
docker_command.extend(["powershell.exe", job_image, entry_script, "powershell.exe"])
|
||||
else:
|
||||
docker_command.extend([entry_script, job_image, "bash"])
|
||||
docker_command = [docker_command]
|
||||
autostart = autostart and setup_result
|
||||
process_command("start", docker_command, work_dir, run=autostart)
|
||||
|
||||
@@ -1986,22 +1990,26 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
inst_list.extend(
|
||||
[
|
||||
" - Start the docker container install",
|
||||
" $ {0}/start.sh".format(work_dir),
|
||||
f" $ {work_dir}/start.{platform_script_ext}",
|
||||
]
|
||||
)
|
||||
else:
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||
autostart = autostart and setup_result
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=autostart)
|
||||
|
||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||
inst_list.extent(
|
||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
||||
[
|
||||
" - Run the reproducer script",
|
||||
f" $ {work_dir}/reproducer.{platform_script_ext}",
|
||||
]
|
||||
)
|
||||
|
||||
if not setup_result:
|
||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||
inst_list.append("\n {0}\n".format(spack_info))
|
||||
inst_list.append(f"\n {spack_info}\n")
|
||||
inst_list.append("\n")
|
||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
||||
inst_list.append(f"\n Path to clone spack: {work_dir}/spack\n\n")
|
||||
|
||||
tty.msg("".join(inst_list))
|
||||
|
||||
@@ -2020,50 +2028,78 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
tty.debug("spack {0} arguments: {1}".format(name, commands))
|
||||
|
||||
tty.debug(f"spack {name} arguments: {commands}")
|
||||
if len(commands) == 0 or isinstance(commands[0], str):
|
||||
commands = [commands]
|
||||
|
||||
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
||||
# quoted using double quotes.
|
||||
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
|
||||
full_command = " \n ".join(map(args_to_string, commands))
|
||||
def compose_command_err_handling(args):
|
||||
if not IS_WINDOWS:
|
||||
args = [f'"{arg}"' for arg in args]
|
||||
arg_str = " ".join(args)
|
||||
result = arg_str + "\n"
|
||||
# ErrorActionPreference will handle PWSH commandlets (Spack calls),
|
||||
# but we need to handle EXEs (git, etc) ourselves
|
||||
catch_exe_failure = (
|
||||
"""
|
||||
if ($LASTEXITCODE -ne 0){
|
||||
throw "Command {} has failed"
|
||||
}
|
||||
"""
|
||||
if IS_WINDOWS
|
||||
else ""
|
||||
)
|
||||
if exit_on_failure and catch_exe_failure:
|
||||
result += catch_exe_failure.format(arg_str)
|
||||
return result
|
||||
|
||||
# Write the command to a shell script
|
||||
script = "{0}.sh".format(name)
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
# Create a string [command 1] \n [command 2] \n ... \n [command n] with
|
||||
# commands composed into a platform dependent shell script, pwsh on Windows,
|
||||
full_command = "\n".join(map(compose_command_err_handling, commands))
|
||||
# Write the command to a python script
|
||||
if IS_WINDOWS:
|
||||
script = f"{name}.ps1"
|
||||
script_content = [f"\n# spack {name} command\n"]
|
||||
if exit_on_failure:
|
||||
fd.write("set -e\n")
|
||||
script_content.append('$ErrorActionPreference = "Stop"\n')
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
fd.write("\n")
|
||||
script_content.append("Set-PSDebug -Trace 2\n")
|
||||
else:
|
||||
script = f"{name}.sh"
|
||||
script_content = ["#!/bin/sh\n\n", f"\n# spack {name} command\n"]
|
||||
if exit_on_failure:
|
||||
script_content.append("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
script_content.append("set -x\n")
|
||||
script_content.append(full_command)
|
||||
script_content.append("\n")
|
||||
|
||||
st = os.stat(script)
|
||||
os.chmod(script, st.st_mode | stat.S_IEXEC)
|
||||
with open(script, "w") as fd:
|
||||
for line in script_content:
|
||||
fd.write(line)
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
if not IS_WINDOWS:
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# Run the generated shell script as if it were being run in
|
||||
# a login shell.
|
||||
exit_code = None
|
||||
if run:
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
# We use sh as executor on Linux like platforms, pwsh on Windows
|
||||
interpreter = "powershell.exe" if IS_WINDOWS else "/bin/sh"
|
||||
cmd_process = subprocess.Popen([interpreter, f"./{script}"])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(f"Encountered error running {name} script")
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
tty.debug(f"spack {name} exited {exit_code}")
|
||||
else:
|
||||
# Delete the script, it is copied to the destination dir
|
||||
os.remove(script)
|
||||
@@ -2088,7 +2124,7 @@ def create_buildcache(
|
||||
for mirror_url in destination_mirror_urls:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
|
||||
success=push_to_build_cache(input_spec, mirror_url, sign_binaries), url=mirror_url
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2122,7 +2158,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Error writing to broken specs list {0}: {1}".format(url, err)
|
||||
msg = f"Error writing to broken specs list {url}: {err}"
|
||||
tty.warn(msg)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
@@ -2135,7 +2171,7 @@ def read_broken_spec(broken_spec_url):
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn("Unable to read broken spec from {0}".format(broken_spec_url))
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
broken_spec_contents = codecs.getreader("utf-8")(fs).read()
|
||||
@@ -2150,14 +2186,14 @@ def display_broken_spec_messages(base_url, hashes):
|
||||
for spec_hash, broken_spec in [tup for tup in broken_specs if tup[1]]:
|
||||
details = broken_spec["broken-spec"]
|
||||
if "job-name" in details:
|
||||
item_name = "{0}/{1}".format(details["job-name"], spec_hash[:7])
|
||||
item_name = f"{details['job-name']}/{spec_hash[:7]}"
|
||||
else:
|
||||
item_name = spec_hash
|
||||
|
||||
if "job-stack" in details:
|
||||
item_name = "{0} (in stack {1})".format(item_name, details["job-stack"])
|
||||
item_name = f"{item_name} (in stack {details['job-stack']})"
|
||||
|
||||
msg = " {0} was reported broken here: {1}".format(item_name, details["job-url"])
|
||||
msg = f" {item_name} was reported broken here: {details['job-url']}"
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
@@ -2180,7 +2216,7 @@ def run_standalone_tests(**kwargs):
|
||||
log_file = kwargs.get("log_file")
|
||||
|
||||
if cdash and log_file:
|
||||
tty.msg("The test log file {0} option is ignored with CDash reporting".format(log_file))
|
||||
tty.msg(f"The test log file {log_file} option is ignored with CDash reporting")
|
||||
log_file = None
|
||||
|
||||
# Error out but do NOT terminate if there are missing required arguments.
|
||||
@@ -2206,10 +2242,10 @@ def run_standalone_tests(**kwargs):
|
||||
test_args.extend(["--log-file", log_file])
|
||||
test_args.append(job_spec.name)
|
||||
|
||||
tty.debug("Running {0} stand-alone tests".format(job_spec.name))
|
||||
tty.debug(f"Running {job_spec.name} stand-alone tests")
|
||||
exit_code = process_command("test", test_args, repro_dir)
|
||||
|
||||
tty.debug("spack test exited {0}".format(exit_code))
|
||||
tty.debug(f"spack test exited {exit_code}")
|
||||
|
||||
|
||||
class CDashHandler:
|
||||
@@ -2232,7 +2268,7 @@ def __init__(self, ci_cdash):
|
||||
# append runner description to the site if available
|
||||
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||
if runner:
|
||||
self.site += " ({0})".format(runner)
|
||||
self.site += f" ({runner})"
|
||||
|
||||
# track current spec, if any
|
||||
self.current_spec = None
|
||||
@@ -2260,21 +2296,13 @@ def build_name(self):
|
||||
Returns: (str) current spec's CDash build name."""
|
||||
spec = self.current_spec
|
||||
if spec:
|
||||
build_name = "{0}@{1}%{2} hash={3} arch={4} ({5})".format(
|
||||
spec.name,
|
||||
spec.version,
|
||||
spec.compiler,
|
||||
spec.dag_hash(),
|
||||
spec.architecture,
|
||||
self.build_group,
|
||||
)
|
||||
tty.debug(
|
||||
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
||||
)
|
||||
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
||||
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||
return build_name
|
||||
|
||||
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
tty.debug(f"Using CDash build name ({build_name}) from the environment")
|
||||
return build_name
|
||||
|
||||
@property # type: ignore
|
||||
@@ -2288,25 +2316,25 @@ def build_stamp(self):
|
||||
Returns: (str) current CDash build stamp"""
|
||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||
if build_stamp:
|
||||
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
|
||||
return build_stamp
|
||||
|
||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||
tty.debug("Generated new build stamp ({0})".format(build_stamp))
|
||||
tty.debug(f"Generated new build stamp ({build_stamp})")
|
||||
return build_stamp
|
||||
|
||||
@property # type: ignore
|
||||
@memoized
|
||||
def project_enc(self):
|
||||
tty.debug("Encoding project ({0}): {1})".format(type(self.project), self.project))
|
||||
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
|
||||
encode = urlencode({"project": self.project})
|
||||
index = encode.find("=") + 1
|
||||
return encode[index:]
|
||||
|
||||
@property
|
||||
def upload_url(self):
|
||||
url_format = "{0}/submit.php?project={1}"
|
||||
return url_format.format(self.url, self.project_enc)
|
||||
url_format = f"{self.url}/submit.php?project={self.project_enc}"
|
||||
return url_format
|
||||
|
||||
def copy_test_results(self, source, dest):
|
||||
"""Copy test results to artifacts directory."""
|
||||
@@ -2324,7 +2352,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = "Creating buildgroup failed (response code = {0})".format(response_code)
|
||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
@@ -2335,10 +2363,10 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
return build_group_id
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = "{0}/api/v1/buildgroup.php".format(self.url)
|
||||
url = f"{self.url}/api/v1/buildgroup.php"
|
||||
|
||||
headers = {
|
||||
"Authorization": "Bearer {0}".format(self.auth_token),
|
||||
"Authorization": f"Bearer {self.auth_token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
@@ -2346,11 +2374,11 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = "Failed to create or retrieve buildgroups for {0}".format(self.build_group)
|
||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
@@ -2370,7 +2398,7 @@ def populate_buildgroup(self, job_names):
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = "Error response code ({0}) in populate_buildgroup".format(response_code)
|
||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||
tty.warn(msg)
|
||||
|
||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||
|
@@ -275,23 +275,37 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
|
||||
sync_manifest_source = sync.add_argument_group(
|
||||
"Manifest Source",
|
||||
"Specify a list of build cache objects to sync using manifest file(s)."
|
||||
'This option takes the place of the "source mirror" for synchronization'
|
||||
'and optionally takes a "destination mirror" ',
|
||||
)
|
||||
sync.add_argument(
|
||||
sync_manifest_source.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
|
||||
)
|
||||
sync_source_mirror = sync.add_argument_group(
|
||||
"Named Source",
|
||||
"Specify a single registered source mirror to synchronize from. This option requires"
|
||||
"the specification of a destination mirror.",
|
||||
)
|
||||
sync_source_mirror.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
@@ -1070,7 +1084,17 @@ def sync_fn(args):
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
# Passing the args.src_mirror here because it is not possible to
|
||||
# have the destination be required when specifying a named source
|
||||
# mirror and optional for the --manifest-glob argument. In the case
|
||||
# of manifest glob sync, the source mirror positional argument is the
|
||||
# destination mirror if it is specified. If there are two mirrors
|
||||
# specified, the second is ignored and the first is the override
|
||||
# destination.
|
||||
if args.dest_mirror:
|
||||
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
|
||||
|
||||
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
|
||||
return 0
|
||||
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
@@ -1121,7 +1145,7 @@ def sync_fn(args):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list):
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -1135,10 +1159,17 @@ def manifest_copy(manifest_file_list):
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
@@ -1165,14 +1196,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
bindist.generate_key_index(keys_url)
|
||||
try:
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
tty.warn(f"did not update the key index: {e}")
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
return args.func(args)
|
||||
|
@@ -183,7 +183,7 @@ def checksum(parser, args):
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
add_versions_to_package(pkg, version_lines, args.batch)
|
||||
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
@@ -229,7 +229,7 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
@@ -282,5 +282,5 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
if sys.stdout.isatty() and not is_batch:
|
||||
editor(filename)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
import spack.cmd
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -32,6 +33,7 @@
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
@@ -705,11 +707,9 @@ def ci_rebuild(args):
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
@@ -721,13 +721,12 @@ def ci_rebuild(args):
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
if not result.success:
|
||||
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
|
||||
(tty.msg if result.success else tty.error)(
|
||||
f'{"Pushed" if result.success else "Failed to push"} '
|
||||
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
|
||||
f"to {result.url}"
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
@@ -748,22 +747,22 @@ def ci_rebuild(args):
|
||||
tty.warn(msg.format(broken_spec_path, err))
|
||||
|
||||
else:
|
||||
# If the install did not succeed, print out some instructions on how to reproduce this
|
||||
# build failure outside of the pipeline environment.
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
)
|
||||
|
||||
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
|
||||
# Control characters cause this to be printed in blue so it stands out
|
||||
reproduce_msg = """
|
||||
print(
|
||||
f"""
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -771,12 +770,9 @@ def ci_rebuild(args):
|
||||
|
||||
... then follow the printed instructions.\033[0;0m
|
||||
|
||||
""".format(
|
||||
repro_job_url
|
||||
"""
|
||||
)
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
@@ -44,6 +45,7 @@
|
||||
"deactivate",
|
||||
"create",
|
||||
["remove", "rm"],
|
||||
["rename", "mv"],
|
||||
["list", "ls"],
|
||||
["status", "st"],
|
||||
"loads",
|
||||
@@ -472,11 +474,82 @@ def env_remove(args):
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
if not ev.is_env_dir(args.mv_from):
|
||||
tty.die("The specified path does not correspond to a valid spack environment")
|
||||
from_path = Path(args.mv_from)
|
||||
if not args.force:
|
||||
if ev.is_env_dir(args.mv_to):
|
||||
tty.die(
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
if Path(args.mv_to).exists():
|
||||
tty.die("The new path already exists; specify the --force flag to overwrite it.")
|
||||
to_path = Path(args.mv_to)
|
||||
|
||||
# Name option being used
|
||||
elif ev.exists(args.mv_from):
|
||||
from_path = ev.environment.environment_dir_from_name(args.mv_from)
|
||||
if not args.force and ev.exists(args.mv_to):
|
||||
tty.die(
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
to_path = ev.environment.root(args.mv_to)
|
||||
|
||||
# Neither
|
||||
else:
|
||||
tty.die("The specified name does not correspond to a managed spack environment")
|
||||
|
||||
# Guard against renaming from or to an active environment
|
||||
active_env = ev.active_environment()
|
||||
if active_env:
|
||||
from_env = ev.Environment(from_path)
|
||||
if from_env.path == active_env.path:
|
||||
tty.die("Cannot rename active environment")
|
||||
if to_path == active_env.path:
|
||||
tty.die(f"{args.mv_to} is an active environment")
|
||||
|
||||
shutil.rmtree(to_path, ignore_errors=True)
|
||||
fs.rename(from_path, to_path)
|
||||
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
|
||||
|
||||
|
||||
#
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list available environments"""
|
||||
"""list managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
|
@@ -140,6 +140,12 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
default="all",
|
||||
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -168,6 +174,12 @@ def query_arguments(args):
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
if install_tree in upstreams.keys():
|
||||
install_tree = upstreams[install_tree]["install_tree"]
|
||||
q_args["install_tree"] = install_tree
|
||||
|
||||
# Time window of installation
|
||||
for attribute in ("start_date", "end_date"):
|
||||
date = getattr(args, attribute)
|
||||
|
@@ -420,10 +420,9 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
if env.views:
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
@@ -70,16 +68,6 @@ def setup_parser(subparser):
|
||||
help="load the first match if multiple packages match the spec",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
@@ -110,11 +98,6 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
|
@@ -116,39 +116,38 @@ def ipython_interpreter(args):
|
||||
|
||||
def python_interpreter(args):
|
||||
"""A python interpreter is the default interpreter"""
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
elif args.python_args:
|
||||
propagate_exceptions_from(console)
|
||||
if args.python_args and not args.python_command:
|
||||
sys.argv = args.python_args
|
||||
with open(args.python_args[0]) as file:
|
||||
console.runsource(file.read(), args.python_args[0], "exec")
|
||||
runpy.run_path(args.python_args[0], run_name="__main__")
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def propagate_exceptions_from(console):
|
||||
|
@@ -91,7 +91,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
|
@@ -34,6 +34,13 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="show full pytest help, with advanced options",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-n",
|
||||
"--numprocesses",
|
||||
type=int,
|
||||
default=1,
|
||||
help="run tests in parallel up to this wide, default 1 for sequential",
|
||||
)
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
@@ -229,6 +236,16 @@ def unit_test(parser, args, unknown_args):
|
||||
if args.extension:
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
"loadfile",
|
||||
"--tx",
|
||||
f"{args.numprocesses}*popen//python=spack-tmpconfig spack python",
|
||||
]
|
||||
)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
if args.list:
|
||||
|
@@ -112,18 +112,18 @@ def _to_dict(compiler):
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
merged_config = spack.config.CONFIG.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.oneapi
|
||||
|
||||
|
||||
class Dpcpp(spack.compilers.oneapi.Oneapi):
|
||||
"""This is the same as the oneAPI compiler but uses dpcpp instead of
|
||||
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
|
||||
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
|
||||
detect it as dpcpp.
|
||||
|
||||
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
|
||||
compiler definition, but two things are needed for that: (a) a way to
|
||||
tell the compiler that it should be using dpcpp and (b) a way to
|
||||
customize the link_paths
|
||||
|
||||
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
|
||||
"""
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["dpcpp"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("oneapi", "icx"),
|
||||
"cxx": os.path.join("oneapi", "dpcpp"),
|
||||
"f77": os.path.join("oneapi", "ifx"),
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
@@ -199,7 +199,7 @@ def __init__(self, *args, **kwargs):
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
# If this found, it sets all the vars
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root = os.path.join(self.cc, "../../..")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
|
@@ -749,7 +749,6 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
|
@@ -107,7 +107,7 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user,command_line}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
@@ -1621,15 +1621,32 @@ def query_local(self, *args, **kwargs):
|
||||
query_local.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query(self, *args, **kwargs):
|
||||
"""Query the Spack database including all upstream databases."""
|
||||
"""Query the Spack database including all upstream databases.
|
||||
|
||||
Additional Arguments:
|
||||
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
|
||||
"""
|
||||
install_tree = kwargs.pop("install_tree", "all")
|
||||
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
|
||||
if install_tree not in valid_trees:
|
||||
msg = "Invalid install_tree argument to Database.query()\n"
|
||||
msg += f"Try one of {', '.join(valid_trees)}"
|
||||
tty.error(msg)
|
||||
return []
|
||||
|
||||
upstream_results = []
|
||||
for upstream_db in self.upstream_dbs:
|
||||
upstreams = self.upstream_dbs
|
||||
if install_tree not in ("all", "upstream"):
|
||||
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
|
||||
for upstream_db in upstreams:
|
||||
# queries for upstream DBs need to *not* lock - we may not
|
||||
# have permissions to do this and the upstream DBs won't know about
|
||||
# us anyway (so e.g. they should never uninstall specs)
|
||||
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
|
||||
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
local_results = []
|
||||
if install_tree in ("all", "local") or self.root == install_tree:
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
|
||||
|
@@ -9,8 +9,6 @@
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
@@ -85,6 +83,8 @@ def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
import jinja2
|
||||
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
|
@@ -94,6 +94,9 @@ class OpenMpi(Package):
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
"""Create a ``Spec`` that indicates when a directive should be applied.
|
||||
|
||||
@@ -585,6 +588,9 @@ def depends_on(
|
||||
@see The section "Dependency specs" in the Spack Packaging Guide.
|
||||
|
||||
"""
|
||||
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
|
||||
assert type == "build", "languages must be of 'build' type"
|
||||
return _language(lang_spec_str=spec, when=when)
|
||||
|
||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
@@ -921,9 +927,9 @@ def maintainers(*names: str):
|
||||
"""
|
||||
|
||||
def _execute_maintainer(pkg):
|
||||
maintainers_from_base = getattr(pkg, "maintainers", [])
|
||||
# Here it is essential to copy, otherwise we might add to an empty list in the parent
|
||||
pkg.maintainers = list(sorted(set(maintainers_from_base + list(names))))
|
||||
maintainers = set(getattr(pkg, "maintainers", []))
|
||||
maintainers.update(names)
|
||||
pkg.maintainers = sorted(maintainers)
|
||||
|
||||
return _execute_maintainer
|
||||
|
||||
@@ -967,7 +973,6 @@ def license(
|
||||
checked_by: string or list of strings indicating which github user checked the
|
||||
license (if any).
|
||||
when: A spec specifying when the license applies.
|
||||
when: A spec specifying when the license applies.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_license(pkg, license_identifier, when)
|
||||
@@ -1014,6 +1019,21 @@ def _execute_requires(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@directive("languages")
|
||||
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
|
||||
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
|
||||
|
||||
def _execute_languages(pkg: "spack.package_base.PackageBase"):
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
languages = pkg.languages.setdefault(when_spec, set())
|
||||
languages.add(lang_spec_str)
|
||||
|
||||
return _execute_languages
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
@@ -119,7 +119,7 @@ def __init__(self, pkg_count: int):
|
||||
self.pkg_ids: Set[str] = set()
|
||||
|
||||
def next_pkg(self, pkg: "spack.package_base.PackageBase"):
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
|
||||
if pkg_id not in self.pkg_ids:
|
||||
self.pkg_num += 1
|
||||
@@ -221,12 +221,12 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
|
||||
# consists in module file generation and registration in the DB.
|
||||
if pkg.spec.external:
|
||||
_process_external_package(pkg, explicit)
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})")
|
||||
return True
|
||||
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose(
|
||||
f"{package_id(pkg)} is installed in an upstream Spack instance at "
|
||||
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
|
||||
f"{pkg.spec.prefix}"
|
||||
)
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
@@ -403,7 +403,7 @@ def _install_from_cache(
|
||||
return False
|
||||
t.stop()
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
@@ -484,7 +484,7 @@ def _process_binary_cache_tarball(
|
||||
if download_result is None:
|
||||
return False
|
||||
|
||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
@@ -513,7 +513,7 @@ def _try_install_from_binary_cache(
|
||||
if not spack.mirror.MirrorCollection(binary=True):
|
||||
return False
|
||||
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
|
||||
|
||||
with timer.measure("search"):
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
@@ -610,7 +610,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
|
||||
Returns: list of package ids
|
||||
"""
|
||||
return [package_id(d.package) for d in spec.dependents()]
|
||||
return [package_id(d) for d in spec.dependents()]
|
||||
|
||||
|
||||
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
|
||||
@@ -720,7 +720,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
|
||||
def package_id(pkg: "spack.package_base.PackageBase") -> str:
|
||||
def package_id(spec: "spack.spec.Spec") -> str:
|
||||
"""A "unique" package identifier for installation purposes
|
||||
|
||||
The identifier is used to track build tasks, locks, install, and
|
||||
@@ -732,10 +732,10 @@ def package_id(pkg: "spack.package_base.PackageBase") -> str:
|
||||
Args:
|
||||
pkg: the package from which the identifier is derived
|
||||
"""
|
||||
if not pkg.spec.concrete:
|
||||
if not spec.concrete:
|
||||
raise ValueError("Cannot provide a unique, readable id when the spec is not concretized.")
|
||||
|
||||
return f"{pkg.name}-{pkg.version}-{pkg.spec.dag_hash()}"
|
||||
return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class BuildRequest:
|
||||
@@ -765,7 +765,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
||||
|
||||
# Cache the package id for convenience
|
||||
self.pkg_id = package_id(pkg)
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
|
||||
# Save off the original install arguments plus standard defaults
|
||||
# since they apply to the requested package *and* dependencies.
|
||||
@@ -780,9 +780,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# are not able to return full dependents for all packages across
|
||||
# environment specs.
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
package_id(d)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@@ -832,7 +832,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
depflag = dt.LINK | dt.RUN
|
||||
include_build_deps = self.install_args.get("include_build_deps")
|
||||
|
||||
if self.pkg_id == package_id(pkg):
|
||||
if self.pkg_id == package_id(pkg.spec):
|
||||
cache_only = self.install_args.get("package_cache_only")
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
@@ -927,7 +927,7 @@ def __init__(
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# The "unique" identifier for the task's package
|
||||
self.pkg_id = package_id(self.pkg)
|
||||
self.pkg_id = package_id(self.pkg.spec)
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
@@ -965,9 +965,9 @@ def __init__(
|
||||
# if use traverse for transitive dependencies, then must remove
|
||||
# transitive dependents on failure.
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
package_id(d)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
# Handle bootstrapped compiler
|
||||
@@ -983,7 +983,7 @@ def __init__(
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package)
|
||||
dep_id = package_id(dep)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
@@ -1194,7 +1194,7 @@ def _add_bootstrap_compilers(
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for comp_pkg, is_compiler in packages:
|
||||
pkgid = package_id(comp_pkg)
|
||||
pkgid = package_id(comp_pkg.spec)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
elif is_compiler:
|
||||
@@ -1241,7 +1241,7 @@ def _add_init_task(
|
||||
"""
|
||||
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
for dep_id in task.dependencies:
|
||||
all_deps[dep_id].add(package_id(pkg))
|
||||
all_deps[dep_id].add(package_id(pkg.spec))
|
||||
|
||||
self._push_task(task)
|
||||
|
||||
@@ -1276,7 +1276,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
err = "Cannot proceed with {0}: {1}"
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
dep_id = package_id(dep_pkg)
|
||||
dep_id = package_id(dep)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
@@ -1409,7 +1409,7 @@ def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
"""
|
||||
self._remove_task(package_id(pkg))
|
||||
self._remove_task(package_id(pkg.spec))
|
||||
|
||||
# Ensure we have a read lock to prevent others from uninstalling the
|
||||
# spec during our installation.
|
||||
@@ -1423,7 +1423,7 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
Args:
|
||||
pkg: the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
pre = f"{pkg_id} cannot be installed locally:"
|
||||
|
||||
# External packages cannot be installed locally.
|
||||
@@ -1465,7 +1465,7 @@ def _ensure_locked(
|
||||
"write",
|
||||
], f'"{lock_type}" is not a supported package management lock type'
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
|
||||
if lock and ltype == lock_type:
|
||||
return ltype, lock
|
||||
@@ -1601,7 +1601,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
|
||||
dep_id = package_id(dep_pkg)
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, request, False, all_deps)
|
||||
|
||||
@@ -1913,7 +1913,7 @@ def _flag_installed(
|
||||
dependent_ids: set of the package's dependent ids, or None if the dependent ids are
|
||||
limited to those maintained in the package (dependency DAG)
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
|
||||
if pkg_id in self.installed:
|
||||
# Already determined the package has been installed
|
||||
@@ -2274,11 +2274,15 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# whether to install source code with the packag
|
||||
self.install_source = install_args.get("install_source", False)
|
||||
|
||||
is_develop = pkg.spec.is_develop
|
||||
# whether to keep the build stage after installation
|
||||
self.keep_stage = install_args.get("keep_stage", False)
|
||||
|
||||
# Note: user commands do not have an explicit choice to disable
|
||||
# keeping stages (i.e., we have a --keep-stage option, but not
|
||||
# a --destroy-stage option), so we can override a default choice
|
||||
# to destroy
|
||||
self.keep_stage = is_develop or install_args.get("keep_stage", False)
|
||||
# whether to restage
|
||||
self.restage = install_args.get("restage", False)
|
||||
self.restage = (not is_develop) and install_args.get("restage", False)
|
||||
|
||||
# whether to skip the patch phase
|
||||
self.skip_patch = install_args.get("skip_patch", False)
|
||||
@@ -2305,7 +2309,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
|
||||
# info/debug information
|
||||
self.pre = _log_prefix(pkg.name)
|
||||
self.pkg_id = package_id(pkg)
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
|
||||
def run(self) -> bool:
|
||||
"""Main entry point from ``build_process`` to kick off install in child."""
|
||||
|
@@ -567,6 +567,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
provided_together: Dict["spack.spec.Spec", List[Set[str]]]
|
||||
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
|
||||
variants: Dict[str, Tuple["spack.variant.Variant", "spack.spec.Spec"]]
|
||||
languages: Dict["spack.spec.Spec", Set[str]]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
|
@@ -160,10 +160,15 @@ def detect(cls):
|
||||
system, as the Cray compiler wrappers and other components of the Cray
|
||||
programming environment are irrelevant without module support.
|
||||
"""
|
||||
craype_type, craype_version = cls.craype_type_and_version()
|
||||
if craype_type == "EX" and craype_version >= spack.version.Version("21.10"):
|
||||
if "opt/cray" not in os.environ.get("MODULEPATH", ""):
|
||||
return False
|
||||
return "opt/cray" in os.environ.get("MODULEPATH", "")
|
||||
|
||||
craype_type, craype_version = cls.craype_type_and_version()
|
||||
if craype_type == "XC":
|
||||
return True
|
||||
if craype_type == "EX" and craype_version < spack.version.Version("21.10"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _default_target_from_env(self):
|
||||
"""Set and return the default CrayPE target loaded in a clean login
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import xml.sax.saxutils
|
||||
from typing import Dict, Optional
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -27,6 +27,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import urllib_ssl_cert_handler
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -427,7 +428,7 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
|
@@ -34,6 +34,7 @@
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -73,6 +73,7 @@
|
||||
"environments_root": {"type": "string"},
|
||||
"connect_timeout": {"type": "integer", "minimum": 0},
|
||||
"verify_ssl": {"type": "boolean"},
|
||||
"ssl_certs": {"type": "string"},
|
||||
"suppress_gpg_warnings": {"type": "boolean"},
|
||||
"install_missing_compilers": {"type": "boolean"},
|
||||
"debug": {"type": "boolean"},
|
||||
|
@@ -541,6 +541,7 @@ def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict
|
||||
info.get("preferred", False),
|
||||
not info.get("deprecated", False),
|
||||
not version.isdevelop(),
|
||||
not version.is_prerelease(),
|
||||
version,
|
||||
)
|
||||
|
||||
@@ -762,7 +763,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
timer.stop("ground")
|
||||
|
||||
# With a grounded program, we can run the solve.
|
||||
result = Result(specs)
|
||||
models = [] # stable models if things go well
|
||||
cores = [] # unsatisfiable cores if they do not
|
||||
|
||||
@@ -783,6 +783,7 @@ def on_model(model):
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
result = Result(specs)
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
@@ -823,6 +824,8 @@ def on_model(model):
|
||||
print("Statistics:")
|
||||
pprint.pprint(self.control.statistics)
|
||||
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
@@ -1039,6 +1042,25 @@ def conflict_rules(self, pkg):
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_languages(self, pkg):
|
||||
for when_spec, languages in pkg.languages.items():
|
||||
condition_msg = f"{pkg.name} needs the {', '.join(sorted(languages))} language"
|
||||
if when_spec != spack.spec.Spec():
|
||||
condition_msg += f" when {when_spec}"
|
||||
condition_id = self.condition(when_spec, name=pkg.name, msg=condition_msg)
|
||||
for language in sorted(languages):
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.language(condition_id, language)))
|
||||
self.gen.newline()
|
||||
|
||||
def config_compatible_os(self):
|
||||
"""Facts about compatible os's specified in configs"""
|
||||
self.gen.h2("Compatible OS from concretizer config file")
|
||||
os_data = spack.config.get("concretizer:os_compatible", {})
|
||||
for recent, reusable in os_data.items():
|
||||
for old in reusable:
|
||||
self.gen.fact(fn.os_compatible(recent, old))
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_facts(self):
|
||||
"""Facts about available compilers."""
|
||||
|
||||
@@ -1088,6 +1110,9 @@ def pkg_rules(self, pkg, tests):
|
||||
self.pkg_version_rules(pkg)
|
||||
self.gen.newline()
|
||||
|
||||
# languages
|
||||
self.package_languages(pkg)
|
||||
|
||||
# variants
|
||||
self.variant_rules(pkg)
|
||||
|
||||
@@ -2158,7 +2183,7 @@ def versions_for(v):
|
||||
if isinstance(v, vn.StandardVersion):
|
||||
return [v]
|
||||
elif isinstance(v, vn.ClosedOpenRange):
|
||||
return [v.lo, vn.prev_version(v.hi)]
|
||||
return [v.lo, vn._prev_version(v.hi)]
|
||||
elif isinstance(v, vn.VersionList):
|
||||
return sum((versions_for(e) for e in v), [])
|
||||
else:
|
||||
@@ -2293,8 +2318,6 @@ def setup(
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
self.pkgs.update(spack.repo.PATH.packages_with_tags("runtime"))
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
@@ -2308,7 +2331,6 @@ def setup(
|
||||
self.explicitly_required_namespaces[node.name] = node.namespace
|
||||
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
|
||||
@@ -2347,6 +2369,7 @@ def setup(
|
||||
self.gen.newline()
|
||||
|
||||
self.gen.h1("General Constraints")
|
||||
self.config_compatible_os()
|
||||
self.compiler_facts()
|
||||
|
||||
# architecture defaults
|
||||
@@ -2438,14 +2461,14 @@ def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
for compiler in self.possible_compilers:
|
||||
if compiler.name != "gcc":
|
||||
continue
|
||||
compiler_with_different_cls_names = {"oneapi": "intel-oneapi-compilers"}
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(compiler.name, compiler.name)
|
||||
try:
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler.name)
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
continue
|
||||
if hasattr(compiler_cls, "runtime_constraints"):
|
||||
compiler_cls.runtime_constraints(compiler=compiler, pkg=recorder)
|
||||
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
|
||||
|
||||
recorder.consume_facts()
|
||||
|
||||
@@ -2857,13 +2880,24 @@ def reset(self):
|
||||
"""Resets the current state."""
|
||||
self.current_package = None
|
||||
|
||||
def depends_on(self, dependency_str: str, *, when: str, type: str, description: str) -> None:
|
||||
def depends_on(
|
||||
self,
|
||||
dependency_str: str,
|
||||
*,
|
||||
when: str,
|
||||
type: str,
|
||||
description: str,
|
||||
languages: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Injects conditional dependencies on packages.
|
||||
|
||||
Conditional dependencies can be either "real" packages or virtual dependencies.
|
||||
|
||||
Args:
|
||||
dependency_str: the dependency spec to inject
|
||||
when: anonymous condition to be met on a package to have the dependency
|
||||
type: dependency type
|
||||
languages: languages needed by the package for the dependency to be considered
|
||||
description: human-readable description of the rule for adding the dependency
|
||||
"""
|
||||
# TODO: The API for this function is not final, and is still subject to change. At
|
||||
@@ -2889,26 +2923,45 @@ def depends_on(self, dependency_str: str, *, when: str, type: str, description:
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{placeholder}"', f"{node_variable}")
|
||||
if languages:
|
||||
body_str += ",\n"
|
||||
for language in languages:
|
||||
body_str += f' attr("language", {node_variable}, "{language}")'
|
||||
|
||||
head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
|
||||
|
||||
runtime_pkg = dependency_spec.name
|
||||
|
||||
is_virtual = head_clauses[0].args[0] == "virtual_node"
|
||||
main_rule = (
|
||||
f"% {description}\n"
|
||||
f'1 {{ attr("depends_on", {node_variable}, node(0..X-1, "{runtime_pkg}"), "{type}") :'
|
||||
f' max_dupes("gcc-runtime", X)}} 1:-\n'
|
||||
f' max_dupes("{runtime_pkg}", X)}} 1:-\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
if is_virtual:
|
||||
main_rule = (
|
||||
f"% {description}\n"
|
||||
f'attr("dependency_holds", {node_variable}, "{runtime_pkg}", "{type}") :-\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
|
||||
self.rules.append(main_rule)
|
||||
for clause in head_clauses:
|
||||
if clause.args[0] == "node":
|
||||
continue
|
||||
runtime_node = f'node(RuntimeID, "{runtime_pkg}")'
|
||||
head_str = str(clause).replace(f'"{runtime_pkg}"', runtime_node)
|
||||
rule = (
|
||||
f"{head_str} :-\n"
|
||||
depends_on_constraint = (
|
||||
f' attr("depends_on", {node_variable}, {runtime_node}, "{type}"),\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
if is_virtual:
|
||||
depends_on_constraint = (
|
||||
f' attr("depends_on", {node_variable}, ProviderNode, "{type}"),\n'
|
||||
f" provider(ProviderNode, {runtime_node}),\n"
|
||||
)
|
||||
|
||||
rule = f"{head_str} :-\n" f"{depends_on_constraint}" f"{body_str}.\n\n"
|
||||
self.rules.append(rule)
|
||||
|
||||
self.reset()
|
||||
@@ -3318,7 +3371,7 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
return False
|
||||
|
||||
if not spec.external:
|
||||
return True
|
||||
return _has_runtime_dependencies(spec)
|
||||
|
||||
# Cray external manifest externals are always reusable
|
||||
if local:
|
||||
@@ -3343,6 +3396,19 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
|
||||
if not WITH_RUNTIME:
|
||||
return True
|
||||
|
||||
if spec.compiler.name == "gcc" and not spec.dependencies("gcc-runtime"):
|
||||
return False
|
||||
|
||||
if spec.compiler.name == "oneapi" and not spec.dependencies("intel-oneapi-runtime"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Solver:
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
@@ -3484,9 +3550,14 @@ def solve_in_rounds(
|
||||
if not result.unsolved_specs:
|
||||
break
|
||||
|
||||
# This means we cannot progress with solving the input
|
||||
if not result.satisfiable or not result.specs:
|
||||
break
|
||||
if not result.specs:
|
||||
# This is also a problem: no specs were solved for, which
|
||||
# means we would be in a loop if we tried again
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: a subset of input specs could not"
|
||||
f" be solved for.\n\t{unsolved_str}"
|
||||
)
|
||||
|
||||
input_specs = list(x for (x, y) in result.unsolved_specs)
|
||||
for spec in result.specs:
|
||||
|
@@ -80,6 +80,7 @@ unification_set(SetID, VirtualNode)
|
||||
|
||||
|
||||
#defined multiple_unification_sets/1.
|
||||
#defined runtime/1.
|
||||
|
||||
%----
|
||||
% Rules to break symmetry and speed-up searches
|
||||
@@ -158,6 +159,14 @@ error(100, multiple_values_error, Attribute, Package)
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, node(ID, Package), Value) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Languages used
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
attr("language", node(X, Package), Language) :-
|
||||
condition_holds(ConditionID, node(X, Package)),
|
||||
pkg_fact(Package,language(ConditionID, Language)).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1011,16 +1020,6 @@ node_os_weight(PackageNode, Weight)
|
||||
attr("node_os", PackageNode, OS),
|
||||
os(OS, Weight).
|
||||
|
||||
% match semantics for OS's
|
||||
node_os_match(PackageNode, DependencyNode) :-
|
||||
depends_on(PackageNode, DependencyNode),
|
||||
attr("node_os", PackageNode, OS),
|
||||
attr("node_os", DependencyNode, OS).
|
||||
|
||||
node_os_mismatch(PackageNode, DependencyNode) :-
|
||||
depends_on(PackageNode, DependencyNode),
|
||||
not node_os_match(PackageNode, DependencyNode).
|
||||
|
||||
% every OS is compatible with itself. We can use `os_compatible` to declare
|
||||
os_compatible(OS, OS) :- os(OS).
|
||||
|
||||
@@ -1188,7 +1187,8 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
|
||||
node_compiler(node(X, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
not compiler_os(CompilerID, OS),
|
||||
compiler_os(CompilerID, CompilerOS),
|
||||
not os_compatible(CompilerOS, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(node(X, Package)).
|
||||
|
||||
@@ -1495,28 +1495,20 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@240: #true }.
|
||||
#minimize{ 0@40: #true }.
|
||||
#minimize{
|
||||
1@40+Priority,PackageNode,DependencyNode
|
||||
: compiler_mismatch(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@40+Priority,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@239: #true }.
|
||||
#minimize{ 0@39: #true }.
|
||||
#minimize{
|
||||
1@39+Priority,PackageNode,DependencyNode
|
||||
: compiler_mismatch_required(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(35, "OS mismatches").
|
||||
#minimize{ 0@235: #true }.
|
||||
#minimize{ 0@35: #true }.
|
||||
#minimize{
|
||||
1@35+Priority,PackageNode,DependencyNode
|
||||
: node_os_mismatch(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@39+Priority,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch_required(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(30, "non-preferred OS's").
|
||||
@@ -1533,9 +1525,10 @@ opt_criterion(25, "version badness").
|
||||
#minimize{ 0@225: #true }.
|
||||
#minimize{ 0@25: #true }.
|
||||
#minimize{
|
||||
Weight@25+Priority,PackageNode
|
||||
: version_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@25+Priority,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
% Try to use all the default values of variants
|
||||
@@ -1554,9 +1547,10 @@ opt_criterion(15, "non-preferred compilers").
|
||||
#minimize{ 0@215: #true }.
|
||||
#minimize{ 0@15: #true }.
|
||||
#minimize{
|
||||
Weight@15+Priority,PackageNode
|
||||
: node_compiler_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@15+Priority,node(X, Package)
|
||||
: node_compiler_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
% Minimize the number of mismatches for targets in the DAG, try
|
||||
@@ -1565,18 +1559,55 @@ opt_criterion(10, "target mismatches").
|
||||
#minimize{ 0@210: #true }.
|
||||
#minimize{ 0@10: #true }.
|
||||
#minimize{
|
||||
1@10+Priority,PackageNode,Dependency
|
||||
: node_target_mismatch(PackageNode, Dependency),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@10+Priority,PackageNode,node(ID, Dependency)
|
||||
: node_target_mismatch(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(5, "non-preferred targets").
|
||||
#minimize{ 0@205: #true }.
|
||||
#minimize{ 0@5: #true }.
|
||||
#minimize{
|
||||
Weight@5+Priority,PackageNode
|
||||
: node_target_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@5+Priority,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
|
||||
% Minimize the number of compiler mismatches for runtimes
|
||||
opt_criterion(4, "compiler mismatches (runtimes)").
|
||||
#minimize{ 0@204: #true }.
|
||||
#minimize{ 0@4: #true }.
|
||||
#minimize{
|
||||
1@4,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch(PackageNode, node(ID, Dependency)), runtime(Dependency)
|
||||
}.
|
||||
#minimize{
|
||||
1@4,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch_required(PackageNode, node(ID, Dependency)), runtime(Dependency)
|
||||
}.
|
||||
|
||||
|
||||
% Choose more recent versions for runtimes
|
||||
opt_criterion(3, "version badness (runtimes)").
|
||||
#minimize{ 0@203: #true }.
|
||||
#minimize{ 0@3: #true }.
|
||||
#minimize{
|
||||
Weight@3,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose best target for runtimes
|
||||
opt_criterion(2, "non-preferred targets (runtimes)").
|
||||
#minimize{ 0@202: #true }.
|
||||
#minimize{ 0@2: #true }.
|
||||
#minimize{
|
||||
Weight@2,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
@@ -24,7 +25,13 @@ class Counter:
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
|
||||
runtime_virtuals = set()
|
||||
for x in runtime_pkgs:
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(x)
|
||||
runtime_virtuals.update(pkg_class.provided_virtual_names())
|
||||
|
||||
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
|
||||
|
||||
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
|
||||
self.all_types: dt.DepFlag = dt.ALL
|
||||
@@ -33,7 +40,9 @@ def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
self._possible_virtuals: Set[str] = (
|
||||
set(x.name for x in specs if x.virtual) | runtime_virtuals
|
||||
)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
|
@@ -1408,6 +1408,13 @@ def external_path(self, ext_path):
|
||||
def external(self):
|
||||
return bool(self.external_path) or bool(self.external_modules)
|
||||
|
||||
@property
|
||||
def is_develop(self):
|
||||
"""Return whether the Spec represents a user-developed package
|
||||
in a Spack ``Environment`` (i.e. using `spack develop`).
|
||||
"""
|
||||
return bool(self.variants.get("dev_path", False))
|
||||
|
||||
def clear_dependencies(self):
|
||||
"""Trim the dependencies of this spec."""
|
||||
self._dependencies.clear()
|
||||
@@ -2961,7 +2968,6 @@ def _new_concretize(self, tests=False):
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
|
||||
result.raise_if_unsat()
|
||||
|
||||
# take the best answer
|
||||
opt, i, answer = min(result.answers)
|
||||
|
@@ -927,6 +927,10 @@ def destroy(self):
|
||||
shutil.rmtree(self.path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.remove(self.reference_link)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
self.created = False
|
||||
|
||||
def restage(self):
|
||||
|
@@ -142,7 +142,7 @@ def optimization_flags(self, compiler):
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||
if not version_number or suffix not in ("", "apple"):
|
||||
if not version_number or suffix:
|
||||
# Try to deduce the underlying version of the compiler, regardless
|
||||
# of its name in compilers.yaml. Depending on where this function
|
||||
# is called we might get either a CompilerSpec or a fully fledged
|
||||
@@ -155,4 +155,6 @@ def optimization_flags(self, compiler):
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
return self.microarchitecture.optimization_flags(compiler.name, str(compiler_version))
|
||||
return self.microarchitecture.optimization_flags(
|
||||
compiler.name, compiler_version.dotted_numeric_string
|
||||
)
|
||||
|
@@ -8,13 +8,16 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
import spack.target
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
from spack.spec import ArchSpec, Spec
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -123,52 +126,60 @@ def test_arch_spec_container_semantic(item, architecture_str):
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_spec,target_name,expected_flags",
|
||||
[
|
||||
# Check compilers with version numbers from a single toolchain
|
||||
# Homogeneous compilers
|
||||
("gcc@4.7.2", "ivybridge", "-march=core-avx-i -mtune=core-avx-i"),
|
||||
# Check mixed toolchains
|
||||
("clang@8.0.0", "broadwell", ""),
|
||||
("clang@3.5", "x86_64", "-march=x86-64 -mtune=generic"),
|
||||
# Check Apple's Clang compilers
|
||||
("apple-clang@9.1.0", "x86_64", "-march=x86-64"),
|
||||
# Mixed toolchain
|
||||
("clang@8.0.0", "broadwell", ""),
|
||||
],
|
||||
)
|
||||
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
|
||||
def test_optimization_flags(compiler_spec, target_name, expected_flags, config):
|
||||
def test_optimization_flags(compiler_spec, target_name, expected_flags, compiler_factory):
|
||||
target = spack.target.Target(target_name)
|
||||
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
|
||||
compiler_dict = compiler_factory(spec=compiler_spec, operating_system="")["compiler"]
|
||||
if compiler_spec == "clang@8.0.0":
|
||||
compiler_dict["paths"] = {
|
||||
"cc": "/path/to/clang-8",
|
||||
"cxx": "/path/to/clang++-8",
|
||||
"f77": "/path/to/gfortran-9",
|
||||
"fc": "/path/to/gfortran-9",
|
||||
}
|
||||
compiler = spack.compilers.compiler_from_dict(compiler_dict)
|
||||
|
||||
opt_flags = target.optimization_flags(compiler)
|
||||
assert opt_flags == expected_flags
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler,real_version,target_str,expected_flags",
|
||||
"compiler_str,real_version,target_str,expected_flags",
|
||||
[
|
||||
(CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
("gcc@=9.2.0", None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
# Check that custom string versions are accepted
|
||||
(
|
||||
CompilerSpec("gcc@=10foo"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
("gcc@=10foo", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
|
||||
# Check that we run version detection (4.4.0 doesn't support icelake)
|
||||
(
|
||||
CompilerSpec("gcc@=4.4.0-special"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
("gcc@=4.4.0-special", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
|
||||
# Check that the special case for Apple's clang is treated correctly
|
||||
# i.e. it won't try to detect the version again
|
||||
(CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"),
|
||||
("apple-clang@=9.1.0", None, "x86_64", "-march=x86-64"),
|
||||
],
|
||||
)
|
||||
def test_optimization_flags_with_custom_versions(
|
||||
compiler, real_version, target_str, expected_flags, monkeypatch, config
|
||||
compiler_str,
|
||||
real_version,
|
||||
target_str,
|
||||
expected_flags,
|
||||
monkeypatch,
|
||||
mutable_config,
|
||||
compiler_factory,
|
||||
):
|
||||
target = spack.target.Target(target_str)
|
||||
compiler_dict = compiler_factory(spec=compiler_str, operating_system="redhat6")
|
||||
mutable_config.set("compilers", [compiler_dict])
|
||||
if real_version:
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", lambda x: real_version)
|
||||
compiler = spack.compilers.compiler_from_dict(compiler_dict["compiler"])
|
||||
|
||||
opt_flags = target.optimization_flags(compiler)
|
||||
assert opt_flags == expected_flags
|
||||
|
||||
@@ -203,9 +214,10 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||
)
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec.concretize()
|
||||
|
@@ -19,6 +19,8 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -34,7 +36,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import get_buildfile_manifest
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError, get_buildfile_manifest
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
@@ -463,50 +465,57 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
assert "libelf" not in cache_list
|
||||
|
||||
|
||||
def test_generate_indices_key_error(monkeypatch, capfd):
|
||||
def test_generate_key_index_failure(monkeypatch):
|
||||
def list_url(url, recursive=False):
|
||||
if "fails-listing" in url:
|
||||
raise Exception("Couldn't list the directory")
|
||||
return ["first.pub", "second.pub"]
|
||||
|
||||
def push_to_url(*args, **kwargs):
|
||||
raise Exception("Couldn't upload the file")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", list_url)
|
||||
monkeypatch.setattr(web_util, "push_to_url", push_to_url)
|
||||
|
||||
with pytest.raises(CannotListKeys, match="Encountered problem listing keys"):
|
||||
bindist.generate_key_index("s3://non-existent/fails-listing")
|
||||
|
||||
with pytest.raises(GenerateIndexError, match="problem pushing .* Couldn't upload"):
|
||||
bindist.generate_key_index("s3://non-existent/fails-uploading")
|
||||
|
||||
|
||||
def test_generate_package_index_failure(monkeypatch, capfd):
|
||||
def mock_list_url(url, recursive=False):
|
||||
print("mocked list_url({0}, {1})".format(url, recursive))
|
||||
raise KeyError("Test KeyError handling")
|
||||
raise Exception("Some HTTP error")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", mock_list_url)
|
||||
|
||||
test_url = "file:///fake/keys/dir"
|
||||
|
||||
# Make sure generate_key_index handles the KeyError
|
||||
bindist.generate_key_index(test_url)
|
||||
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Warning: No keys at {0}".format(test_url) in err
|
||||
|
||||
# Make sure generate_package_index handles the KeyError
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Warning: No packages at {0}".format(test_url) in err
|
||||
assert (
|
||||
f"Warning: Encountered problem listing packages at {test_url}: Some HTTP error"
|
||||
in capfd.readouterr().err
|
||||
)
|
||||
|
||||
|
||||
def test_generate_indices_exception(monkeypatch, capfd):
|
||||
def mock_list_url(url, recursive=False):
|
||||
print("mocked list_url({0}, {1})".format(url, recursive))
|
||||
raise Exception("Test Exception handling")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", mock_list_url)
|
||||
|
||||
test_url = "file:///fake/keys/dir"
|
||||
url = "file:///fake/keys/dir"
|
||||
|
||||
# Make sure generate_key_index handles the Exception
|
||||
bindist.generate_key_index(test_url)
|
||||
with pytest.raises(GenerateIndexError, match=f"Encountered problem listing keys at {url}"):
|
||||
bindist.generate_key_index(url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
expect = "Encountered problem listing keys at {0}".format(test_url)
|
||||
assert expect in err
|
||||
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
|
||||
bindist.generate_package_index(url)
|
||||
|
||||
# Make sure generate_package_index handles the Exception
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
expect = "Encountered problem listing packages at {0}".format(test_url)
|
||||
assert expect in err
|
||||
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
|
||||
@@ -573,11 +582,20 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
|
||||
|
||||
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
||||
)
|
||||
def test_install_legacy_buildcache_layout(
|
||||
mutable_config, compiler_factory, install_mockery_mutable_config
|
||||
):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
compressed archive file containing the install tree. This test
|
||||
makes sure we can still read that layout."""
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="debian6")]
|
||||
)
|
||||
legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
|
||||
mirror_url = "file://{0}".format(legacy_layout_dir)
|
||||
filename = (
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import py.path
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_systems.autotools
|
||||
@@ -209,6 +211,9 @@ def test_autotools_gnuconfig_replacement_disabled(
|
||||
assert "gnuconfig version of config.guess" not in f.read()
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database, monkeypatch):
|
||||
"""
|
||||
Tests whether a useful error message is shown when patch_config_files is
|
||||
|
@@ -448,7 +448,7 @@ def _fail(self, args):
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
"""Test that create_buildcache returns a list of objects with the correct
|
||||
keys and types."""
|
||||
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
|
||||
monkeypatch.setattr(spack.ci, "_push_to_build_cache", lambda a, b, c: True)
|
||||
|
||||
results = ci.create_buildcache(
|
||||
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
@@ -234,10 +235,71 @@ def verify_mirror_contents():
|
||||
# Use mirror names to specify mirrors
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
mirror("add", "ignored", "file:///dummy/io")
|
||||
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
def manifest_insert(manifest, spec, dest_url):
|
||||
manifest[spec.dag_hash()] = [
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
test_env = ev.active_environment()
|
||||
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(manifest, spec, dest_mirror_url)
|
||||
json.dump(manifest, fd)
|
||||
|
||||
buildcache("sync", "--manifest-glob", manifest_file)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_bad_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(
|
||||
manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path")
|
||||
)
|
||||
json.dump(manifest, fd)
|
||||
|
||||
# Trigger the warning
|
||||
output = buildcache("sync", "--manifest-glob", manifest_file, "dest", "ignored")
|
||||
|
||||
assert "Ignoring unused arguemnt: ignored" in output
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
|
||||
def test_buildcache_create_install(
|
||||
|
@@ -26,6 +26,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
@@ -47,6 +48,8 @@
|
||||
@pytest.fixture()
|
||||
def ci_base_environment(working_env, tmpdir):
|
||||
os.environ["CI_PROJECT_DIR"] = tmpdir.strpath
|
||||
os.environ["CI_PIPELINE_ID"] = "7192"
|
||||
os.environ["CI_JOB_NAME"] = "mock"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -776,6 +779,43 @@ def test_ci_rebuild_mock_success(
|
||||
assert "Cannot copy test logs" in out
|
||||
|
||||
|
||||
def test_ci_rebuild_mock_failure_to_push(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
mock_gnupghome,
|
||||
mock_stage,
|
||||
mock_fetch,
|
||||
mock_binary_index,
|
||||
ci_base_environment,
|
||||
monkeypatch,
|
||||
):
|
||||
pkg_name = "trivial-install-test-package"
|
||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name)
|
||||
|
||||
# Mock the install script succuess
|
||||
def mock_success(*args, **kwargs):
|
||||
return 0
|
||||
|
||||
monkeypatch.setattr(spack.ci, "process_command", mock_success)
|
||||
|
||||
# Mock failure to push to the build cache
|
||||
def mock_push_or_raise(*args, **kwargs):
|
||||
raise spack.binary_distribution.PushToBuildCacheError(
|
||||
"Encountered problem pushing binary <url>: <expection>"
|
||||
)
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise)
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||
|
||||
expect = f"Command exited with code {FAILED_CREATE_BUILDCACHE_CODE}"
|
||||
with pytest.raises(spack.main.SpackCommandError, match=expect):
|
||||
ci_cmd("rebuild", fail_on_error=True)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="fails intermittently and covered by gitlab ci")
|
||||
def test_ci_rebuild(
|
||||
tmpdir,
|
||||
@@ -1063,7 +1103,7 @@ def test_ci_generate_mirror_override(
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_push_mirror_contents(
|
||||
def test_push_to_build_cache(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
@@ -1124,7 +1164,7 @@ def test_push_mirror_contents(
|
||||
install_cmd("--add", "--keep-stage", json_path)
|
||||
|
||||
for s in concrete_spec.traverse():
|
||||
ci.push_mirror_contents(s, mirror_url, True)
|
||||
ci.push_to_build_cache(s, mirror_url, True)
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
|
||||
@@ -1217,21 +1257,16 @@ def test_push_mirror_contents(
|
||||
assert len(dl_dir_list) == 2
|
||||
|
||||
|
||||
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
|
||||
def failing_access(*args, **kwargs):
|
||||
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
|
||||
def _push_to_build_cache(spec, sign_binaries, mirror_url):
|
||||
raise Exception("Error: Access Denied")
|
||||
|
||||
monkeypatch.setattr(spack.ci, "_push_mirror_contents", failing_access)
|
||||
monkeypatch.setattr(spack.ci, "_push_to_build_cache", _push_to_build_cache)
|
||||
|
||||
# Input doesn't matter, as wwe are faking exceptional output
|
||||
url = "fakejunk"
|
||||
ci.push_mirror_contents(None, url, None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
std_out = captured[0]
|
||||
expect_msg = "Permission problem writing to {0}".format(url)
|
||||
|
||||
assert expect_msg in std_out
|
||||
# Input doesn't matter, as we are faking exceptional output
|
||||
url = tmp_path.as_uri()
|
||||
ci.push_to_build_cache(None, url, None)
|
||||
assert f"Permission problem writing to {url}" in capsys.readouterr().err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("match_behavior", ["first", "merge"])
|
||||
@@ -1461,26 +1496,24 @@ def test_ci_rebuild_index(
|
||||
working_dir = tmpdir.join("working_dir")
|
||||
|
||||
mirror_dir = working_dir.join("mirror")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
mirror_url = url_util.path_to_file_url(str(mirror_dir))
|
||||
|
||||
spack_yaml_contents = """
|
||||
spack_yaml_contents = f"""
|
||||
spack:
|
||||
specs:
|
||||
- callpath
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
specs:
|
||||
- callpath
|
||||
mirrors:
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
"""
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
|
@@ -112,10 +112,10 @@ def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executa
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
@@ -124,10 +124,10 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
|
@@ -20,7 +20,10 @@
|
||||
install = SpackCommand("install")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
pytestmark = [
|
||||
pytest.mark.not_on_windows("does not run on windows"),
|
||||
pytest.mark.disable_clean_stage_check,
|
||||
]
|
||||
|
||||
|
||||
def test_dev_build_basics(tmpdir, install_mockery):
|
||||
@@ -41,7 +44,6 @@ def test_dev_build_basics(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_before(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -58,7 +60,6 @@ def test_dev_build_before(tmpdir, install_mockery):
|
||||
assert not os.path.exists(spec.prefix)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_until(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -76,7 +77,6 @@ def test_dev_build_until(tmpdir, install_mockery):
|
||||
assert not spack.store.STORE.db.query(spec, installed=True)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
# Test that we ignore the last_phase argument if it is already last
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
@@ -96,7 +96,6 @@ def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_before_until(tmpdir, install_mockery, capsys):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -134,7 +133,6 @@ def mock_module_noop(*args):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
||||
monkeypatch.setattr(os, "execvp", print_spack_cc)
|
||||
monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)
|
||||
|
@@ -188,6 +188,127 @@ def test_env_remove(capfd):
|
||||
assert "bar" not in out
|
||||
|
||||
|
||||
def test_env_rename_managed(capfd):
|
||||
# Need real environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "foo", "bar")
|
||||
assert (
|
||||
"The specified name does not correspond to a managed spack environment"
|
||||
in capfd.readouterr()[0]
|
||||
)
|
||||
|
||||
env("create", "foo")
|
||||
|
||||
out = env("list")
|
||||
assert "foo" in out
|
||||
|
||||
out = env("rename", "foo", "bar")
|
||||
assert "Successfully renamed environment foo to bar" in out
|
||||
|
||||
out = env("list")
|
||||
assert "foo" not in out
|
||||
assert "bar" in out
|
||||
|
||||
bar = ev.read("bar")
|
||||
with bar:
|
||||
# Cannot rename active environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "bar", "baz")
|
||||
assert "Cannot rename active environment" in capfd.readouterr()[0]
|
||||
|
||||
env("create", "qux")
|
||||
|
||||
# Cannot rename to an active environment (even with force flag)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-f", "qux", "bar")
|
||||
assert "bar is an active environment" in capfd.readouterr()[0]
|
||||
|
||||
# Can rename inactive environment when another's active
|
||||
out = env("rename", "qux", "quux")
|
||||
assert "Successfully renamed environment qux to quux" in out
|
||||
|
||||
out = env("list")
|
||||
assert "bar" in out
|
||||
assert "baz" not in out
|
||||
|
||||
env("create", "baz")
|
||||
|
||||
# Cannot rename to existing environment without --force
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "bar", "baz")
|
||||
errmsg = (
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
|
||||
env("rename", "-f", "bar", "baz")
|
||||
out = env("list")
|
||||
assert "bar" not in out
|
||||
assert "baz" in out
|
||||
|
||||
|
||||
def test_env_rename_anonymous(capfd, tmpdir):
|
||||
# Need real environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", "./non-existing", "./also-non-existing")
|
||||
assert (
|
||||
"The specified path does not correspond to a valid spack environment"
|
||||
in capfd.readouterr()[0]
|
||||
)
|
||||
|
||||
anon_foo = str(tmpdir / "foo")
|
||||
env("create", "-d", anon_foo)
|
||||
|
||||
anon_bar = str(tmpdir / "bar")
|
||||
out = env("rename", "-d", anon_foo, anon_bar)
|
||||
assert f"Successfully renamed environment {anon_foo} to {anon_bar}" in out
|
||||
assert not ev.is_env_dir(anon_foo)
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
|
||||
# Cannot rename active environment
|
||||
anon_baz = str(tmpdir / "baz")
|
||||
env("activate", "--sh", "-d", anon_bar)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_bar, anon_baz)
|
||||
assert "Cannot rename active environment" in capfd.readouterr()[0]
|
||||
env("deactivate", "--sh")
|
||||
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
assert not ev.is_env_dir(anon_baz)
|
||||
|
||||
# Cannot rename to existing environment without --force
|
||||
env("create", "-d", anon_baz)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_bar, anon_baz)
|
||||
errmsg = (
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
assert ev.is_env_dir(anon_baz)
|
||||
|
||||
env("rename", "-f", "-d", anon_bar, anon_baz)
|
||||
assert not ev.is_env_dir(anon_bar)
|
||||
assert ev.is_env_dir(anon_baz)
|
||||
|
||||
# Cannot rename to existing (non-environment) path without --force
|
||||
qux = tmpdir / "qux"
|
||||
qux.mkdir()
|
||||
anon_qux = str(qux)
|
||||
assert not ev.is_env_dir(anon_qux)
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_baz, anon_qux)
|
||||
errmsg = "The new path already exists; specify the --force flag to overwrite it."
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
|
||||
env("rename", "-f", "-d", anon_baz, anon_qux)
|
||||
assert not ev.is_env_dir(anon_baz)
|
||||
assert ev.is_env_dir(anon_qux)
|
||||
|
||||
|
||||
def test_concretize():
|
||||
e = ev.create("test")
|
||||
e.add("mpileaks")
|
||||
@@ -856,6 +977,7 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("original concretizer does not support requirements")
|
||||
def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
|
||||
"""Test ``config change`` with config in the ``spack.yaml`` as well as an
|
||||
included file scope.
|
||||
@@ -931,6 +1053,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
spack.spec.Spec("bowtie@1.2.2").concretized()
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("original concretizer does not support requirements")
|
||||
def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
@@ -3038,6 +3161,41 @@ def test_modules_exist_after_env_install(
|
||||
assert spec.prefix in contents
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_install_develop_keep_stage(
|
||||
environment_from_manifest, install_mockery, mock_fetch, monkeypatch, tmpdir
|
||||
):
|
||||
"""Develop a dependency of a package and make sure that the associated
|
||||
stage for the package is retained after a successful install.
|
||||
"""
|
||||
environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
monkeypatch.setattr(spack.stage.DevelopStage, "destroy", _always_fail)
|
||||
|
||||
with ev.read("test") as e:
|
||||
libelf_dev_path = tmpdir.ensure("libelf-test-dev-path", dir=True)
|
||||
develop(f"--path={libelf_dev_path}", "libelf@0.8.13")
|
||||
concretize()
|
||||
(libelf_spec,) = e.all_matching_specs("libelf")
|
||||
(mpileaks_spec,) = e.all_matching_specs("mpileaks")
|
||||
assert not os.path.exists(libelf_spec.package.stage.path)
|
||||
assert not os.path.exists(mpileaks_spec.package.stage.path)
|
||||
install()
|
||||
assert os.path.exists(libelf_spec.package.stage.path)
|
||||
assert not os.path.exists(mpileaks_spec.package.stage.path)
|
||||
|
||||
|
||||
# Helper method for test_install_develop_keep_stage
|
||||
def _always_fail(cls, *args, **kwargs):
|
||||
raise Exception("Restage or destruction of dev stage detected during install")
|
||||
|
||||
|
||||
@pytest.mark.regression("24148")
|
||||
def test_virtual_spec_concretize_together(tmpdir):
|
||||
# An environment should permit to concretize "mpi"
|
||||
@@ -3131,7 +3289,7 @@ def test_create_and_activate_managed(tmp_path):
|
||||
env("deactivate")
|
||||
|
||||
|
||||
def test_create_and_activate_unmanaged(tmp_path):
|
||||
def test_create_and_activate_anonymous(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
env_dir = os.path.join(str(tmp_path), "foo")
|
||||
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)
|
||||
|
@@ -64,6 +64,7 @@ def test_query_arguments():
|
||||
implicit=False,
|
||||
start_date="2018-02-23",
|
||||
end_date=None,
|
||||
install_tree="all",
|
||||
)
|
||||
|
||||
q_args = query_arguments(args)
|
||||
@@ -75,6 +76,7 @@ def test_query_arguments():
|
||||
assert q_args["explicit"] is any
|
||||
assert "start_date" in q_args
|
||||
assert "end_date" not in q_args
|
||||
assert q_args["install_tree"] == "all"
|
||||
|
||||
# Check that explicit works correctly
|
||||
args.explicit = True
|
||||
|
@@ -31,7 +31,7 @@ def test_spec():
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("Known failure of the original concretizer")
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database, do_not_check_runtimes_on_reuse):
|
||||
"""End-to-end test of CLI concretizer prefs.
|
||||
|
||||
It's here to make sure that everything works from CLI
|
||||
|
@@ -62,10 +62,16 @@ def test_multiple_conflicting_compiler_definitions(mutable_config):
|
||||
assert cmp.f77 == "f77"
|
||||
|
||||
|
||||
def test_get_compiler_duplicates(config):
|
||||
def test_get_compiler_duplicates(mutable_config, compiler_factory):
|
||||
# In this case there is only one instance of the specified compiler in
|
||||
# the test configuration (so it is not actually a duplicate), but the
|
||||
# method behaves the same.
|
||||
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
|
||||
# CNL compiler has no target attribute, and this is essential to make detection pass
|
||||
del cnl_compiler["compiler"]["target"]
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="SuSE11"), cnl_compiler]
|
||||
)
|
||||
cfg_file_to_duplicates = spack.compilers.get_compiler_duplicates(
|
||||
"gcc@4.5.0", spack.spec.ArchSpec("cray-CNL-xeon")
|
||||
)
|
||||
@@ -75,13 +81,6 @@ def test_get_compiler_duplicates(config):
|
||||
assert len(duplicates) == 1
|
||||
|
||||
|
||||
def test_all_compilers(config):
|
||||
all_compilers = spack.compilers.all_compilers()
|
||||
filtered = [x for x in all_compilers if str(x.spec) == "clang@=3.3"]
|
||||
filtered = [x for x in filtered if x.operating_system == "SuSE11"]
|
||||
assert len(filtered) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_version,expected_version,expected_error",
|
||||
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
|
||||
@@ -654,7 +653,25 @@ def test_xl_r_flags():
|
||||
"compiler_spec,expected_result",
|
||||
[("gcc@4.7.2", False), ("clang@3.3", False), ("clang@8.0.0", True)],
|
||||
)
|
||||
def test_detecting_mixed_toolchains(compiler_spec, expected_result, config):
|
||||
def test_detecting_mixed_toolchains(
|
||||
compiler_spec, expected_result, mutable_config, compiler_factory
|
||||
):
|
||||
mixed_c = compiler_factory(spec="clang@8.0.0", operating_system="debian6")
|
||||
mixed_c["compiler"]["paths"] = {
|
||||
"cc": "/path/to/clang-8",
|
||||
"cxx": "/path/to/clang++-8",
|
||||
"f77": "/path/to/gfortran-9",
|
||||
"fc": "/path/to/gfortran-9",
|
||||
}
|
||||
mutable_config.set(
|
||||
"compilers",
|
||||
[
|
||||
compiler_factory(spec="gcc@4.7.2", operating_system="debian6"),
|
||||
compiler_factory(spec="clang@3.3", operating_system="debian6"),
|
||||
mixed_c,
|
||||
],
|
||||
)
|
||||
|
||||
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
|
||||
assert spack.compilers.is_mixed_toolchain(compiler) is expected_result
|
||||
|
||||
|
@@ -237,10 +237,24 @@ def change(self, changes=None):
|
||||
yield _changing_pkg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def clang12_with_flags(compiler_factory):
|
||||
c = compiler_factory(spec="clang@12.2.0", operating_system="redhat6")
|
||||
c["compiler"]["flags"] = {"cflags": "-O3", "cxxflags": "-O3"}
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def gcc11_with_flags(compiler_factory):
|
||||
c = compiler_factory(spec="gcc@11.1.0", operating_system="redhat6")
|
||||
c["compiler"]["flags"] = {"cflags": "-O0 -g", "cxxflags": "-O0 -g", "fflags": "-O0 -g"}
|
||||
return c
|
||||
|
||||
|
||||
# This must use the mutable_config fixture because the test
|
||||
# adjusting_default_target_based_on_compiler uses the current_host fixture,
|
||||
# which changes the config.
|
||||
@pytest.mark.usefixtures("mutable_config", "mock_packages")
|
||||
@pytest.mark.usefixtures("mutable_config", "mock_packages", "do_not_check_runtimes_on_reuse")
|
||||
class TestConcretize:
|
||||
def test_concretize(self, spec):
|
||||
check_concretize(spec)
|
||||
@@ -329,18 +343,34 @@ def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
|
||||
|
||||
def test_different_compilers_get_different_flags(self):
|
||||
def test_different_compilers_get_different_flags(
|
||||
self, mutable_config, clang12_with_flags, gcc11_with_flags
|
||||
):
|
||||
"""Tests that nodes get the flags of the associated compiler."""
|
||||
mutable_config.set("compilers", [clang12_with_flags, gcc11_with_flags])
|
||||
client = Spec(
|
||||
"cmake-client %gcc@11.1.0 platform=test os=fe target=fe"
|
||||
+ " ^cmake %clang@12.2.0 platform=test os=fe target=fe"
|
||||
)
|
||||
client.concretize()
|
||||
" ^cmake %clang@12.2.0 platform=test os=fe target=fe"
|
||||
).concretized()
|
||||
cmake = client["cmake"]
|
||||
assert set(client.compiler_flags["cflags"]) == set(["-O0", "-g"])
|
||||
assert set(cmake.compiler_flags["cflags"]) == set(["-O3"])
|
||||
assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
|
||||
assert set(client.compiler_flags["cflags"]) == {"-O0", "-g"}
|
||||
assert set(cmake.compiler_flags["cflags"]) == {"-O3"}
|
||||
assert set(client.compiler_flags["fflags"]) == {"-O0", "-g"}
|
||||
assert not set(cmake.compiler_flags["fflags"])
|
||||
|
||||
@pytest.mark.regression("9908")
|
||||
def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
|
||||
"""Tests that Spack assembles flags in a consistent way (i.e. with the same ordering),
|
||||
for successive concretizations.
|
||||
"""
|
||||
mutable_config.set("compilers", [gcc11_with_flags])
|
||||
spec_str = "libelf %gcc@11.1.0 os=redhat6"
|
||||
for _ in range(3):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert all(
|
||||
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
|
||||
)
|
||||
|
||||
@pytest.mark.xfail(reason="Broken, needs to be fixed")
|
||||
def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
|
||||
@@ -349,7 +379,8 @@ def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
for spec in [client, cmake]:
|
||||
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
|
||||
|
||||
def test_compiler_flags_differ_identical_compilers(self):
|
||||
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
|
||||
mutable_config.set("compilers", [clang12_with_flags])
|
||||
# Correct arch to use test compiler that has flags
|
||||
spec = Spec("a %clang@12.2.0 platform=test os=fe target=fe")
|
||||
|
||||
@@ -404,25 +435,20 @@ def test_compiler_inherited_upwards(self):
|
||||
for dep in spec.traverse():
|
||||
assert "%clang" in dep
|
||||
|
||||
def test_architecture_inheritance(self):
|
||||
"""test_architecture_inheritance is likely to fail with an
|
||||
UnavailableCompilerVersionError if the architecture is concretized
|
||||
incorrectly.
|
||||
"""
|
||||
spec = Spec("cmake-client %gcc@11.1.0 os=fe ^ cmake")
|
||||
spec.concretize()
|
||||
assert spec["cmake"].architecture == spec.architecture
|
||||
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer")
|
||||
def test_architecture_deep_inheritance(self, mock_targets):
|
||||
def test_architecture_deep_inheritance(self, mock_targets, compiler_factory):
|
||||
"""Make sure that indirect dependencies receive architecture
|
||||
information from the root even when partial architecture information
|
||||
is provided by an intermediate dependency.
|
||||
"""
|
||||
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL"
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
|
||||
# CNL compiler has no target attribute, and this is essential to make detection pass
|
||||
del cnl_compiler["compiler"]["target"]
|
||||
with spack.config.override("compilers", [cnl_compiler]):
|
||||
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona ^dyninst os=CNL ^callpath os=CNL"
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
|
||||
def test_compiler_flags_from_user_are_grouped(self):
|
||||
spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test')
|
||||
@@ -588,7 +614,7 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
|
||||
spec.normalize()
|
||||
spec.concretize()
|
||||
|
||||
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:12.0.0"])
|
||||
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:15.0.0"])
|
||||
def test_compiler_inheritance(self, compiler_str):
|
||||
spec_str = "mpileaks %{0}".format(compiler_str)
|
||||
spec = Spec(spec_str).concretized()
|
||||
@@ -851,15 +877,18 @@ def test_concretize_anonymous_dep(self, spec_str):
|
||||
# Unconstrained versions select default compiler (gcc@4.5.0)
|
||||
("bowtie@1.4.0", "%gcc@10.2.1"),
|
||||
# Version with conflicts and no valid gcc select another compiler
|
||||
("bowtie@1.3.0", "%clang@12.0.0"),
|
||||
("bowtie@1.3.0", "%clang@15.0.0"),
|
||||
# If a higher gcc is available still prefer that
|
||||
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot work around conflicts")
|
||||
def test_compiler_conflicts_in_package_py(self, spec_str, expected_str):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.satisfies(expected_str)
|
||||
def test_compiler_conflicts_in_package_py(
|
||||
self, spec_str, expected_str, clang12_with_flags, gcc11_with_flags
|
||||
):
|
||||
with spack.config.override("compilers", [clang12_with_flags, gcc11_with_flags]):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.satisfies(expected_str)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected,unexpected",
|
||||
@@ -1157,16 +1186,18 @@ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, witho
|
||||
|
||||
@pytest.mark.regression("20019")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_compiler_match_is_preferred_to_newer_version(self):
|
||||
def test_compiler_match_is_preferred_to_newer_version(self, compiler_factory):
|
||||
# This spec depends on openblas. Openblas has a conflict
|
||||
# that doesn't allow newer versions with gcc@4.4.0. Check
|
||||
# that an old version of openblas is selected, rather than
|
||||
# a different compiler for just that node.
|
||||
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert "openblas@0.2.15" in s
|
||||
assert s["openblas"].satisfies("%gcc@10.1.0")
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="gcc@10.1.0", operating_system="redhat6")]
|
||||
):
|
||||
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
|
||||
s = Spec(spec_str).concretized()
|
||||
assert "openblas@0.2.15" in s
|
||||
assert s["openblas"].satisfies("%gcc@10.1.0")
|
||||
|
||||
@pytest.mark.regression("19981")
|
||||
def test_target_ranges_in_conflicts(self):
|
||||
@@ -1191,7 +1222,10 @@ def test_variant_not_default(self):
|
||||
|
||||
@pytest.mark.regression("20055")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_custom_compiler_version(self):
|
||||
def test_custom_compiler_version(self, mutable_config, compiler_factory):
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
|
||||
)
|
||||
s = Spec("a %gcc@10foo os=redhat6").concretized()
|
||||
assert "%gcc@10foo" in s
|
||||
|
||||
@@ -1391,16 +1425,21 @@ def test_external_with_non_default_variant_as_dependency(self):
|
||||
("mpileaks%gcc@10.2.1 platform=test os=redhat6", "os=redhat6"),
|
||||
],
|
||||
)
|
||||
def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expected_os):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
def test_os_selection_when_multiple_choices_are_possible(
|
||||
self, spec_str, expected_os, compiler_factory
|
||||
):
|
||||
# GCC 10.2.1 is defined both for debian and for redhat
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="gcc@10.2.1", operating_system="redhat6")]
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
|
||||
@pytest.mark.regression("22718")
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_compiler",
|
||||
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@12.0.0", "%clang@12.0.0")],
|
||||
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@15.0.0", "%clang@15.0.0")],
|
||||
)
|
||||
def test_compiler_is_unique(self, spec_str, expected_compiler):
|
||||
s = Spec(spec_str).concretized()
|
||||
@@ -1688,7 +1727,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
|
||||
[
|
||||
(["libelf", "libelf@0.8.10"], 1),
|
||||
(["libdwarf%gcc", "libelf%clang"], 2),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 4),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 3),
|
||||
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
|
||||
(["hdf5", "zmpi"], 3),
|
||||
(["hdf5", "mpich"], 2),
|
||||
@@ -1755,6 +1794,22 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages, config):
|
||||
specs = [Spec(x) for x in ["libdwarf%gcc", "libdwarf%clang"]]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
|
||||
simulate_unsolved_property = list((x, None) for x in specs)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "specs", list())
|
||||
|
||||
with pytest.raises(
|
||||
spack.solver.asp.InternalConcretizerError,
|
||||
match="a subset of input specs could not be solved for",
|
||||
):
|
||||
list(solver.solve_in_rounds(specs))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
reusable_specs = []
|
||||
@@ -1837,6 +1892,25 @@ def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
assert concrete_spec.satisfies("%{}".format(s.compiler))
|
||||
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_succeeds_with_config_compatible_os(self):
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
other_os = s.copy()
|
||||
mock_os = "ubuntu2204"
|
||||
other_os.architecture = spack.spec.ArchSpec(
|
||||
"test-{os}-{target}".format(os=mock_os, target=str(s.architecture.target))
|
||||
)
|
||||
reusable_specs = [other_os]
|
||||
overrides = {"concretizer": {"reuse": True, "os_compatible": {s.os: [mock_os]}}}
|
||||
custom_scope = spack.config.InternalConfigScope("concretize_override", overrides)
|
||||
with spack.config.override(custom_scope):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies("os={}".format(other_os.architecture.os))
|
||||
|
||||
def test_git_hash_assigned_version_is_preferred(self):
|
||||
hash = "a" * 40
|
||||
s = Spec("develop-branch-version@%s=develop" % hash)
|
||||
@@ -2613,3 +2687,28 @@ def test_reusable_externals_different_spec(mock_packages, tmpdir):
|
||||
{"mpich": {"externals": [{"spec": "mpich@4.1 +debug", "prefix": tmpdir.strpath}]}},
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
def test_concretization_version_order():
|
||||
versions = [
|
||||
(Version("develop"), {}),
|
||||
(Version("1.0"), {}),
|
||||
(Version("2.0"), {"deprecated": True}),
|
||||
(Version("1.1"), {}),
|
||||
(Version("1.1alpha1"), {}),
|
||||
(Version("0.9"), {"preferred": True}),
|
||||
]
|
||||
result = [
|
||||
v
|
||||
for v, _ in sorted(
|
||||
versions, key=spack.solver.asp._concretization_version_order, reverse=True
|
||||
)
|
||||
]
|
||||
assert result == [
|
||||
Version("0.9"), # preferred
|
||||
Version("1.1"), # latest non-deprecated final version
|
||||
Version("1.0"), # latest non-deprecated final version
|
||||
Version("1.1alpha1"), # prereleases
|
||||
Version("develop"), # likely development version
|
||||
Version("2.0"), # deprecated
|
||||
]
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
@@ -24,9 +26,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
reused_spec = spack.spec.Spec(reused_str).concretized()
|
||||
setup = spack.solver.asp.SpackSolverSetup(tests=False)
|
||||
driver = spack.solver.asp.PyclingoDriver()
|
||||
result, _, _ = driver.solve(
|
||||
setup, [spack.spec.Spec(f"{root_str} ^{reused_str}")], reuse=[reused_spec]
|
||||
)
|
||||
result, _, _ = driver.solve(setup, [spack.spec.Spec(f"{root_str}")], reuse=[reused_spec])
|
||||
root = result.specs[0]
|
||||
return root, reused_spec
|
||||
|
||||
@@ -47,7 +47,7 @@ def enable_runtimes():
|
||||
|
||||
|
||||
def test_correct_gcc_runtime_is_injected_as_dependency(runtime_repo):
|
||||
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@4.5.0").concretized()
|
||||
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@9.4.0").concretized()
|
||||
a, b = s["a"], s["b"]
|
||||
|
||||
# Both a and b should depend on the same gcc-runtime directly
|
||||
@@ -78,9 +78,28 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
|
||||
"root_str,reused_str,expected,nruntime",
|
||||
[
|
||||
# The reused runtime is older than we need, thus we'll add a more recent one for a
|
||||
("a%gcc@10.2.1", "b%gcc@4.5.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@4.5.0"}, 2),
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@9.4.0"}, 2),
|
||||
# The root is compiled with an older compiler, thus we'll reuse the runtime from b
|
||||
("a%gcc@4.5.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
|
||||
# Same as before, but tests that we can reuse from a more generic target
|
||||
pytest.param(
|
||||
"a%gcc@9.4.0",
|
||||
"b%gcc@10.2.1 target=x86_64",
|
||||
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@10.2.1 target=x86_64"},
|
||||
1,
|
||||
marks=pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"a%gcc@10.2.1",
|
||||
"b%gcc@9.4.0 target=x86_64",
|
||||
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@9.4.0 target=x86_64"},
|
||||
2,
|
||||
marks=pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime, runtime_repo):
|
||||
@@ -104,8 +123,8 @@ def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime
|
||||
[
|
||||
# Ensure that, whether we have multiple runtimes in the DAG or not,
|
||||
# we always link only the latest version
|
||||
("a%gcc@10.2.1", "b%gcc@4.5.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
|
||||
("a%gcc@4.5.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
],
|
||||
)
|
||||
def test_views_can_handle_duplicate_runtime_nodes(
|
||||
|
@@ -105,7 +105,7 @@ def test_preferred_variants_from_wildcard(self):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_str,spec_str",
|
||||
[("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")],
|
||||
[("gcc@=9.4.0", "mpileaks"), ("clang@=15.0.0", "mpileaks"), ("gcc@=9.4.0", "openmpi")],
|
||||
)
|
||||
def test_preferred_compilers(self, compiler_str, spec_str):
|
||||
"""Test preferred compilers are applied correctly"""
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
import archspec.cpu.microarchitecture
|
||||
import archspec.cpu.schema
|
||||
|
||||
@@ -710,7 +711,9 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
t.write(content)
|
||||
|
||||
compilers_yaml = test_config.join("compilers.yaml")
|
||||
content = "".join(compilers_yaml.read()).format(linux_os)
|
||||
content = "".join(compilers_yaml.read()).format(
|
||||
linux_os=linux_os, target=str(archspec.cpu.host().family)
|
||||
)
|
||||
t = tmpdir.join("site", "compilers.yaml")
|
||||
t.write(content)
|
||||
yield tmpdir
|
||||
@@ -1986,3 +1989,36 @@ def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
f.write(pkg_str)
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def compiler_factory():
|
||||
"""Factory for a compiler dict, taking a spec and an OS as arguments."""
|
||||
|
||||
def _factory(*, spec, operating_system):
|
||||
return {
|
||||
"compiler": {
|
||||
"spec": spec,
|
||||
"operating_system": operating_system,
|
||||
"paths": {"cc": "/path/to/cc", "cxx": "/path/to/cxx", "f77": None, "fc": None},
|
||||
"modules": [],
|
||||
"target": str(archspec.cpu.host().family),
|
||||
}
|
||||
}
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def host_architecture_str():
|
||||
"""Returns the broad architecture family (x86_64, aarch64, etc.)"""
|
||||
return str(archspec.cpu.host().family)
|
||||
|
||||
|
||||
def _true(x):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def do_not_check_runtimes_on_reuse(monkeypatch):
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)
|
||||
|
@@ -1,353 +1,41 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: {0.name}{0.version}
|
||||
spec: gcc@=9.4.0
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: gcc@=9.4.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: clang@=15.0.0
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: {0.name}{0.version}
|
||||
spec: gcc@=10.2.1
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: CNL
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: yosemite
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: CNL
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: SuSE11
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: yosemite
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: elcapitan
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: elcapitan
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.7.2
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc472
|
||||
cxx: /path/to/g++472
|
||||
f77: /path/to/gfortran472
|
||||
fc: /path/to/gfortran472
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc440
|
||||
cxx: /path/to/g++440
|
||||
f77: /path/to/gfortran440
|
||||
fc: /path/to/gfortran440
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.5
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@8.0.0
|
||||
operating_system: redhat7
|
||||
paths:
|
||||
cc: /path/to/clang-8
|
||||
cxx: /path/to/clang++-8
|
||||
f77: /path/to/gfortran-9
|
||||
fc: /path/to/gfortran-9
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: apple-clang@9.1.0
|
||||
operating_system: elcapitan
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10foo
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0-special
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@11.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.2.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10foo
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@11.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.2.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
modules: []
|
||||
target: {target}
|
||||
|
@@ -10,6 +10,7 @@ config:
|
||||
source_cache: $user_cache_path/source
|
||||
misc_cache: $user_cache_path/cache
|
||||
verify_ssl: true
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
checksum: true
|
||||
dirty: false
|
||||
concretizer: {0}
|
||||
|
@@ -16,7 +16,7 @@ packages:
|
||||
externalvirtual:
|
||||
buildable: False
|
||||
externals:
|
||||
- spec: externalvirtual@2.0%clang@12.0.0
|
||||
- spec: externalvirtual@2.0%clang@15.0.0
|
||||
prefix: /path/to/external_virtual_clang
|
||||
- spec: externalvirtual@1.0%gcc@10.2.1
|
||||
prefix: /path/to/external_virtual_gcc
|
||||
|
@@ -4,7 +4,7 @@ lmod:
|
||||
hash_length: 0
|
||||
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
||||
- 'clang@15.0.0'
|
||||
|
||||
core_specs:
|
||||
- 'mpich@3.0.1'
|
||||
|
@@ -2,4 +2,4 @@ enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
||||
- 'clang@15.0.0'
|
||||
|
@@ -2,4 +2,4 @@ enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@=12.0.0'
|
||||
- 'clang@=15.0.0'
|
||||
|
@@ -56,11 +56,31 @@ def upstream_and_downstream_db(tmpdir, gen_mock_layout):
|
||||
yield upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"install_tree,result",
|
||||
[("all", ["b", "c"]), ("upstream", ["c"]), ("local", ["b"]), ("{u}", ["c"]), ("{d}", ["b"])],
|
||||
)
|
||||
def test_query_by_install_tree(
|
||||
install_tree, result, upstream_and_downstream_db, mock_packages, monkeypatch, config
|
||||
):
|
||||
up_write_db, up_db, up_layout, down_db, down_layout = upstream_and_downstream_db
|
||||
|
||||
# Set the upstream DB to contain "c" and downstream to contain "b")
|
||||
b = spack.spec.Spec("b").concretized()
|
||||
c = spack.spec.Spec("c").concretized()
|
||||
up_write_db.add(c, up_layout)
|
||||
up_db._read()
|
||||
down_db.add(b, down_layout)
|
||||
|
||||
specs = down_db.query(install_tree=install_tree.format(u=up_db.root, d=down_db.root))
|
||||
assert [s.name for s in specs] == result
|
||||
|
||||
|
||||
def test_spec_installed_upstream(
|
||||
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
|
||||
):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
@@ -86,7 +106,7 @@ def test_spec_installed_upstream(
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
@@ -124,7 +144,7 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir):
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
@@ -156,7 +176,7 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
DB. When a package is recorded as installed in both, the results should
|
||||
refer to the downstream DB.
|
||||
"""
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
|
@@ -11,6 +11,8 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lock as ulk
|
||||
import llnl.util.tty as tty
|
||||
@@ -134,7 +136,7 @@ def test_get_dependent_ids(install_mockery, mock_packages):
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
|
||||
pkg_id = inst.package_id(spec.package)
|
||||
pkg_id = inst.package_id(spec)
|
||||
|
||||
# Grab the sole dependency of 'a', which is 'b'
|
||||
dep = spec.dependencies()[0]
|
||||
@@ -383,7 +385,7 @@ def test_ensure_locked_have(install_mockery, tmpdir, capsys):
|
||||
const_arg = installer_args(["trivial-install-test-package"], {})
|
||||
installer = create_installer(const_arg)
|
||||
spec = installer.build_requests[0].pkg.spec
|
||||
pkg_id = inst.package_id(spec.package)
|
||||
pkg_id = inst.package_id(spec)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
# Test "downgrade" of a read lock (to a read lock)
|
||||
@@ -454,17 +456,15 @@ def _pl(db, spec, timeout):
|
||||
|
||||
def test_package_id_err(install_mockery):
|
||||
s = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
with pytest.raises(ValueError, match="spec is not concretized"):
|
||||
inst.package_id(pkg_cls(s))
|
||||
inst.package_id(s)
|
||||
|
||||
|
||||
def test_package_id_ok(install_mockery):
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
spec.concretize()
|
||||
assert spec.concrete
|
||||
pkg = spec.package
|
||||
assert pkg.name in inst.package_id(pkg)
|
||||
assert spec.name in inst.package_id(spec)
|
||||
|
||||
|
||||
def test_fake_install(install_mockery):
|
||||
@@ -528,6 +528,10 @@ def fake_package_list(compiler, architecture, pkgs):
|
||||
assert installer.build_pq[0][1].compiler
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="OneAPI compiler is not supported on other architectures",
|
||||
)
|
||||
def test_bootstrapping_compilers_with_different_names_from_spec(
|
||||
install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host
|
||||
):
|
||||
@@ -720,7 +724,7 @@ def test_check_deps_status_external(install_mockery, monkeypatch):
|
||||
installer._check_deps_status(request)
|
||||
|
||||
for dep in request.spec.traverse(root=False):
|
||||
assert inst.package_id(dep.package) in installer.installed
|
||||
assert inst.package_id(dep) in installer.installed
|
||||
|
||||
|
||||
def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
@@ -733,7 +737,7 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
installer._check_deps_status(request)
|
||||
|
||||
for dep in request.spec.traverse(root=False):
|
||||
assert inst.package_id(dep.package) in installer.installed
|
||||
assert inst.package_id(dep) in installer.installed
|
||||
|
||||
|
||||
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
|
||||
@@ -1105,7 +1109,7 @@ def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
|
||||
const_arg = installer_args(["b"], {"fail_fast": False})
|
||||
const_arg.extend(installer_args(["c"], {"fail_fast": True}))
|
||||
installer = create_installer(const_arg)
|
||||
pkg_ids = [inst.package_id(spec.package) for spec, _ in const_arg]
|
||||
pkg_ids = [inst.package_id(spec) for spec, _ in const_arg]
|
||||
|
||||
# Make sure all packages are identified as failed
|
||||
#
|
||||
@@ -1180,7 +1184,7 @@ def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
|
||||
const_arg = installer_args(["b"], {})
|
||||
b, _ = const_arg[0]
|
||||
installer = create_installer(const_arg)
|
||||
b_pkg_id = inst.package_id(b.package)
|
||||
b_pkg_id = inst.package_id(b)
|
||||
|
||||
def _prep(installer, task):
|
||||
installer.installed.add(b_pkg_id)
|
||||
@@ -1190,7 +1194,7 @@ def _prep(installer, task):
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
|
||||
|
||||
def _requeued(installer, task, install_status):
|
||||
tty.msg("requeued {0}".format(inst.package_id(task.pkg)))
|
||||
tty.msg("requeued {0}".format(inst.package_id(task.pkg.spec)))
|
||||
|
||||
# Flag the package as installed
|
||||
monkeypatch.setattr(inst.PackageInstaller, "_prepare_for_install", _prep)
|
||||
@@ -1256,7 +1260,7 @@ def test_install_skip_patch(install_mockery, mock_fetch):
|
||||
installer.install()
|
||||
|
||||
spec, install_args = const_arg[0]
|
||||
assert inst.package_id(spec.package) in installer.installed
|
||||
assert inst.package_id(spec) in installer.installed
|
||||
|
||||
|
||||
def test_install_implicit(install_mockery, mock_fetch):
|
||||
|
@@ -84,14 +84,15 @@ def test_get_bad_extension():
|
||||
("astyle_1.23_macosx", "astyle_1.23"),
|
||||
("haxe-2.08-osx", "haxe-2.08"),
|
||||
# PyPI - wheel
|
||||
("entrypoints-0.2.2-py2.py3-none-any.whl", "entrypoints-0.2.2"),
|
||||
("wheel-1.2.3-py3-none-any", "wheel-1.2.3"),
|
||||
("wheel-1.2.3-py2.py3-none-any", "wheel-1.2.3"),
|
||||
("wheel-1.2.3-cp38-abi3-macosx_10_12_x86_64", "wheel-1.2.3"),
|
||||
("entrypoints-0.2.2-py2.py3-none-any", "entrypoints-0.2.2"),
|
||||
(
|
||||
"numpy-1.12.0-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel."
|
||||
"macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
|
||||
"macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64",
|
||||
"numpy-1.12.0",
|
||||
),
|
||||
# PyPI - exe
|
||||
("PyYAML-3.12.win-amd64-py3.5.exe", "PyYAML-3.12"),
|
||||
# Combinations of multiple patterns - bin, release
|
||||
("rocketmq-all-4.5.2-bin-release", "rocketmq-all-4.5.2"),
|
||||
# Combinations of multiple patterns - all
|
||||
@@ -148,6 +149,8 @@ def test_strip_compression_extension(archive_and_expected):
|
||||
assert stripped == "Foo.zip"
|
||||
stripped = llnl.url.strip_compression_extension(archive, "zip")
|
||||
assert stripped == "Foo"
|
||||
elif extension == "whl":
|
||||
assert stripped == "Foo.whl"
|
||||
elif (
|
||||
extension.lower() == "tar"
|
||||
or extension in llnl.url.CONTRACTION_MAP
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.modules.lmod
|
||||
@@ -27,7 +29,7 @@
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(params=["clang@=12.0.0", "gcc@=10.2.1"])
|
||||
@pytest.fixture(params=["clang@=15.0.0", "gcc@=10.2.1"])
|
||||
def compiler(request):
|
||||
return request.param
|
||||
|
||||
@@ -57,7 +59,7 @@ def test_layout_for_specs_compiled_with_core_compilers(
|
||||
we can use both ``compiler@version`` and ``compiler@=version`` to specify a core compiler.
|
||||
"""
|
||||
module_configuration(modules_config)
|
||||
module, spec = factory("libelf%clang@12.0.0")
|
||||
module, spec = factory("libelf%clang@15.0.0")
|
||||
assert "Core" in module.layout.available_path_parts
|
||||
|
||||
def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
@@ -76,7 +78,7 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
# is transformed to r"Core" if the compiler is listed among core
|
||||
# compilers
|
||||
# Check that specs listed as core_specs are transformed to "Core"
|
||||
if compiler == "clang@=12.0.0" or spec_string == "mpich@3.0.1":
|
||||
if compiler == "clang@=15.0.0" or spec_string == "mpich@3.0.1":
|
||||
assert "Core" in layout.available_path_parts
|
||||
else:
|
||||
assert compiler.replace("@=", "/") in layout.available_path_parts
|
||||
@@ -103,14 +105,19 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
else:
|
||||
assert repetitions == 1
|
||||
|
||||
def test_compilers_provided_different_name(self, factory, module_configuration):
|
||||
module_configuration("complex_hierarchy")
|
||||
module, spec = factory("intel-oneapi-compilers%clang@3.3")
|
||||
def test_compilers_provided_different_name(
|
||||
self, factory, module_configuration, compiler_factory
|
||||
):
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="clang@3.3", operating_system="debian6")]
|
||||
):
|
||||
module_configuration("complex_hierarchy")
|
||||
module, spec = factory("intel-oneapi-compilers%clang@3.3")
|
||||
|
||||
provides = module.conf.provides
|
||||
provides = module.conf.provides
|
||||
|
||||
assert "compiler" in provides
|
||||
assert provides["compiler"] == spack.spec.CompilerSpec("oneapi@=3.0")
|
||||
assert "compiler" in provides
|
||||
assert provides["compiler"] == spack.spec.CompilerSpec("oneapi@=3.0")
|
||||
|
||||
def test_simple_case(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of a simple Lua module file."""
|
||||
@@ -139,6 +146,9 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
|
||||
assert len([x for x in content if "depends_on(" in x]) == 5
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
"""Tests modifications to run-time environment."""
|
||||
|
||||
@@ -210,6 +220,9 @@ def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
|
||||
assert len([x for x in content if 'setenv("FOO", "{{name}}, {name}, {{}}, {}")' in x]) == 1
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -333,14 +346,16 @@ def test_override_template_in_package(self, modulefile_content, module_configura
|
||||
|
||||
assert "Override successful!" in content
|
||||
|
||||
def test_override_template_in_modules_yaml(self, modulefile_content, module_configuration):
|
||||
def test_override_template_in_modules_yaml(
|
||||
self, modulefile_content, module_configuration, host_architecture_str
|
||||
):
|
||||
"""Tests overriding a template from `modules.yaml`"""
|
||||
module_configuration("override_template")
|
||||
|
||||
content = modulefile_content("override-module-templates")
|
||||
assert "Override even better!" in content
|
||||
|
||||
content = modulefile_content("mpileaks target=x86_64")
|
||||
content = modulefile_content(f"mpileaks target={host_architecture_str}")
|
||||
assert "Override even better!" in content
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.modules.common
|
||||
import spack.modules.tcl
|
||||
import spack.spec
|
||||
@@ -92,22 +94,29 @@ def test_autoload_all(self, modulefile_content, module_configuration):
|
||||
assert len([x for x in content if "depends-on " in x]) == 2
|
||||
assert len([x for x in content if "module load " in x]) == 2
|
||||
|
||||
def test_prerequisites_direct(self, modulefile_content, module_configuration):
|
||||
def test_prerequisites_direct(
|
||||
self, modulefile_content, module_configuration, host_architecture_str
|
||||
):
|
||||
"""Tests asking direct dependencies as prerequisites."""
|
||||
|
||||
module_configuration("prerequisites_direct")
|
||||
content = modulefile_content("mpileaks target=x86_64")
|
||||
content = modulefile_content(f"mpileaks target={host_architecture_str}")
|
||||
|
||||
assert len([x for x in content if "prereq" in x]) == 2
|
||||
|
||||
def test_prerequisites_all(self, modulefile_content, module_configuration):
|
||||
def test_prerequisites_all(
|
||||
self, modulefile_content, module_configuration, host_architecture_str
|
||||
):
|
||||
"""Tests asking all dependencies as prerequisites."""
|
||||
|
||||
module_configuration("prerequisites_all")
|
||||
content = modulefile_content("mpileaks target=x86_64")
|
||||
content = modulefile_content(f"mpileaks target={host_architecture_str}")
|
||||
|
||||
assert len([x for x in content if "prereq" in x]) == 5
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_alter_environment(self, modulefile_content, module_configuration):
|
||||
"""Tests modifications to run-time environment."""
|
||||
|
||||
@@ -180,6 +189,9 @@ def test_setenv_raw_value(self, modulefile_content, module_configuration):
|
||||
|
||||
assert len([x for x in content if "setenv FOO {{{name}}, {name}, {{}}, {}}" in x]) == 1
|
||||
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
@@ -222,7 +234,7 @@ def test_help_message(self, modulefile_content, module_configuration):
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
def test_exclude(self, modulefile_content, module_configuration):
|
||||
def test_exclude(self, modulefile_content, module_configuration, host_architecture_str):
|
||||
"""Tests excluding the generation of selected modules."""
|
||||
|
||||
module_configuration("exclude")
|
||||
@@ -234,9 +246,9 @@ def test_exclude(self, modulefile_content, module_configuration):
|
||||
# and IOError on Python 2 or common bases like EnvironmentError
|
||||
# which are not officially documented
|
||||
with pytest.raises(Exception):
|
||||
modulefile_content("callpath target=x86_64")
|
||||
modulefile_content(f"callpath target={host_architecture_str}")
|
||||
|
||||
content = modulefile_content("zmpi target=x86_64")
|
||||
content = modulefile_content(f"zmpi target={host_architecture_str}")
|
||||
|
||||
assert len([x for x in content if "module load " in x]) == 1
|
||||
|
||||
@@ -406,14 +418,16 @@ def test_override_template_in_package(self, modulefile_content, module_configura
|
||||
|
||||
assert "Override successful!" in content
|
||||
|
||||
def test_override_template_in_modules_yaml(self, modulefile_content, module_configuration):
|
||||
def test_override_template_in_modules_yaml(
|
||||
self, modulefile_content, module_configuration, host_architecture_str
|
||||
):
|
||||
"""Tests overriding a template from `modules.yaml`"""
|
||||
module_configuration("override_template")
|
||||
|
||||
content = modulefile_content("override-module-templates")
|
||||
assert "Override even better!" in content
|
||||
|
||||
content = modulefile_content("mpileaks target=x86_64")
|
||||
content = modulefile_content(f"mpileaks target={host_architecture_str}")
|
||||
assert "Override even better!" in content
|
||||
|
||||
def test_extend_context(self, modulefile_content, module_configuration):
|
||||
|
@@ -69,9 +69,15 @@ def test_no_version_match(pkg_name):
|
||||
("", "boolean_false_first", "True"),
|
||||
],
|
||||
)
|
||||
def test_multimethod_calls(pkg_name, constraint_str, method_name, expected_result):
|
||||
s = spack.spec.Spec(pkg_name + constraint_str).concretized()
|
||||
msg = "Method {0} from {1} is giving a wrong result".format(method_name, s)
|
||||
def test_multimethod_calls(
|
||||
pkg_name, constraint_str, method_name, expected_result, compiler_factory
|
||||
):
|
||||
# Add apple-clang, as it is required by one of the tests
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="apple-clang@9.1.0", operating_system="elcapitan")]
|
||||
):
|
||||
s = spack.spec.Spec(pkg_name + constraint_str).concretized()
|
||||
msg = f"Method {method_name} from {s} is giving a wrong result"
|
||||
assert getattr(s.package, method_name)() == expected_result, msg
|
||||
|
||||
|
||||
|
@@ -283,6 +283,14 @@ def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
|
||||
|
||||
@pytest.mark.requires_executables("install_name_tool", "file", "cc")
|
||||
def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class("apple-clang")
|
||||
compiler_version = compiler_cls.determine_version("cc")
|
||||
try:
|
||||
# See https://forums.swift.org/t/xcode-ships-llvm-15-but-swift-builds-llvm-16/67377
|
||||
xcode_major_version = int(compiler_version.split(".")[0])
|
||||
except IndexError:
|
||||
pytest.xfail("cannot determine the major version of XCode")
|
||||
|
||||
# For each of these tests except for the "correct" case, the first fixup
|
||||
# should make changes, and the second fixup should be a null-op.
|
||||
fixup_rpath = spack.relocate.fixup_macos_rpath
|
||||
@@ -293,7 +301,9 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
|
||||
# Non-relocatable library id and duplicate rpaths
|
||||
(root, filename) = make_dylib("abs", duplicate_rpaths)
|
||||
assert fixup_rpath(root, filename)
|
||||
# XCode 15 ships a new linker that takes care of deduplication
|
||||
if xcode_major_version < 15:
|
||||
assert fixup_rpath(root, filename)
|
||||
assert not fixup_rpath(root, filename)
|
||||
|
||||
# Hardcoded but relocatable library id (but we do NOT relocate)
|
||||
@@ -302,7 +312,9 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
|
||||
# Library id uses rpath but there are extra duplicate rpaths
|
||||
(root, filename) = make_dylib("rpath", duplicate_rpaths)
|
||||
assert fixup_rpath(root, filename)
|
||||
# XCode 15 ships a new linker that takes care of deduplication
|
||||
if xcode_major_version < 15:
|
||||
assert fixup_rpath(root, filename)
|
||||
assert not fixup_rpath(root, filename)
|
||||
|
||||
# Shared library was constructed with relocatable id from the get-go
|
||||
@@ -325,7 +337,9 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
# Duplicate nonexistent rpath will need *two* passes
|
||||
(root, filename) = make_dylib("rpath", bad_rpath * 2)
|
||||
assert fixup_rpath(root, filename)
|
||||
assert fixup_rpath(root, filename)
|
||||
# XCode 15 ships a new linker that takes care of deduplication
|
||||
if xcode_major_version < 15:
|
||||
assert fixup_rpath(root, filename)
|
||||
assert not fixup_rpath(root, filename)
|
||||
|
||||
# Test on an object file, which *also* has type 'application/x-mach-binary'
|
||||
|
@@ -734,18 +734,6 @@ def test_spec_formatting_escapes(self, default_mock_concretization):
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
spec.format(fmt_str)
|
||||
|
||||
@pytest.mark.regression("9908")
|
||||
def test_spec_flags_maintain_order(self):
|
||||
# Spack was assembling flags in a manner that could result in
|
||||
# different orderings for repeated concretizations of the same
|
||||
# spec and config
|
||||
spec_str = "libelf %gcc@11.1.0 os=redhat6"
|
||||
for _ in range(3):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert all(
|
||||
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
|
||||
)
|
||||
|
||||
def test_combination_of_wildcard_or_none(self):
|
||||
# Test that using 'none' and another value raises
|
||||
with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
|
||||
|
@@ -909,18 +909,20 @@ def test_develop_stage(self, develop_path, tmp_build_stage_dir):
|
||||
"""
|
||||
devtree, srcdir = develop_path
|
||||
stage = DevelopStage("test-stage", srcdir, reference_link="link-to-stage")
|
||||
assert not os.path.exists(stage.reference_link)
|
||||
stage.create()
|
||||
assert os.path.exists(stage.reference_link)
|
||||
srctree1 = _create_tree_from_dir_recursive(stage.source_path)
|
||||
assert os.path.samefile(srctree1["link-to-stage"], stage.path)
|
||||
del srctree1["link-to-stage"]
|
||||
assert srctree1 == devtree
|
||||
|
||||
stage.destroy()
|
||||
assert not os.path.exists(stage.reference_link)
|
||||
# Make sure destroying the stage doesn't change anything
|
||||
# about the path
|
||||
assert not os.path.exists(stage.path)
|
||||
srctree2 = _create_tree_from_dir_recursive(srcdir)
|
||||
del srctree2["link-to-stage"] # Note the symlink persists but is broken
|
||||
assert srctree2 == devtree
|
||||
|
||||
|
||||
|
@@ -210,16 +210,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest, _fetch_method):
|
||||
@pytest.mark.parametrize(
|
||||
"requested_version,tarball,digest",
|
||||
[
|
||||
# This version is in the web data path (test/data/web/4.html), but not in the
|
||||
# These versions are in the web data path (test/data/web/4.html), but not in the
|
||||
# url-list-test package. We expect Spack to generate a URL with the new version.
|
||||
("=4.5.0", "foo-4.5.0.tar.gz", None),
|
||||
# This version is in web data path and not in the package file, BUT the 2.0.0b2
|
||||
# version in the package file satisfies 2.0.0, so Spack will use the known version.
|
||||
# TODO: this is *probably* not what the user wants, but it's here as an example
|
||||
# TODO: for that reason. We can't express "exactly 2.0.0" right now, and we don't
|
||||
# TODO: have special cases that would make 2.0.0b2 less than 2.0.0. We should
|
||||
# TODO: probably revisit this in our versioning scheme.
|
||||
("2.0.0", "foo-2.0.0b2.tar.gz", "000000000000000000000000000200b2"),
|
||||
("=2.0.0", "foo-2.0.0.tar.gz", None),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer doesn't resolve concrete versions to known ones")
|
||||
@@ -228,7 +222,7 @@ def test_new_version_from_list_url(
|
||||
):
|
||||
"""Test non-specific URLs from the url-list-test package."""
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
s = Spec("url-list-test @%s" % requested_version).concretized()
|
||||
s = Spec(f"url-list-test @{requested_version}").concretized()
|
||||
fetch_strategy = fs.from_list_url(s.package)
|
||||
|
||||
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
|
||||
|
@@ -29,7 +29,9 @@
|
||||
]
|
||||
# Spack does not use Python native handling for tarballs or zip
|
||||
# Don't test tarballs or zip in native test
|
||||
native_archive_list = [key for key in ext_archive.keys() if "tar" not in key and "zip" not in key]
|
||||
native_archive_list = [
|
||||
key for key in ext_archive.keys() if "tar" not in key and "zip" not in key and "whl" not in key
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -71,7 +73,9 @@ def test_native_unpacking(tmpdir_factory, archive_file_and_extension):
|
||||
|
||||
@pytest.mark.not_on_windows("Only Python unpacking available on Windows")
|
||||
@pytest.mark.parametrize(
|
||||
"archive_file_and_extension", [(ext, True) for ext in ext_archive.keys()], indirect=True
|
||||
"archive_file_and_extension",
|
||||
[(ext, True) for ext in ext_archive.keys() if "whl" not in ext],
|
||||
indirect=True,
|
||||
)
|
||||
def test_system_unpacking(tmpdir_factory, archive_file_and_extension, compr_support_check):
|
||||
# actually run test
|
||||
|
@@ -213,12 +213,24 @@ def test_nums_and_patch():
|
||||
assert_ver_gt("=6.5p1", "=5.6p1")
|
||||
|
||||
|
||||
def test_rc_versions():
|
||||
assert_ver_gt("=6.0.rc1", "=6.0")
|
||||
assert_ver_lt("=6.0", "=6.0.rc1")
|
||||
def test_prereleases():
|
||||
# pre-releases are special: they are less than final releases
|
||||
assert_ver_lt("=6.0alpha", "=6.0alpha0")
|
||||
assert_ver_lt("=6.0alpha0", "=6.0alpha1")
|
||||
assert_ver_lt("=6.0alpha1", "=6.0alpha2")
|
||||
assert_ver_lt("=6.0alpha2", "=6.0beta")
|
||||
assert_ver_lt("=6.0beta", "=6.0beta0")
|
||||
assert_ver_lt("=6.0beta0", "=6.0beta1")
|
||||
assert_ver_lt("=6.0beta1", "=6.0beta2")
|
||||
assert_ver_lt("=6.0beta2", "=6.0rc")
|
||||
assert_ver_lt("=6.0rc", "=6.0rc0")
|
||||
assert_ver_lt("=6.0rc0", "=6.0rc1")
|
||||
assert_ver_lt("=6.0rc1", "=6.0rc2")
|
||||
assert_ver_lt("=6.0rc2", "=6.0")
|
||||
|
||||
|
||||
def test_alpha_beta():
|
||||
# these are not pre-releases, but ordinary string components.
|
||||
assert_ver_gt("=10b2", "=10a1")
|
||||
assert_ver_lt("=10a2", "=10b2")
|
||||
|
||||
@@ -277,6 +289,39 @@ def test_version_ranges():
|
||||
assert_ver_gt("1.5:1.6", "1.2:1.4")
|
||||
|
||||
|
||||
def test_version_range_with_prereleases():
|
||||
# 1.2.1: means from the 1.2.1 release onwards
|
||||
assert_does_not_satisfy("1.2.1alpha1", "1.2.1:")
|
||||
assert_does_not_satisfy("1.2.1beta2", "1.2.1:")
|
||||
assert_does_not_satisfy("1.2.1rc3", "1.2.1:")
|
||||
|
||||
# Pre-releases of 1.2.1 are included in the 1.2.0: range
|
||||
assert_satisfies("1.2.1alpha1", "1.2.0:")
|
||||
assert_satisfies("1.2.1beta1", "1.2.0:")
|
||||
assert_satisfies("1.2.1rc3", "1.2.0:")
|
||||
|
||||
# In Spack 1.2 and 1.2.0 are distinct with 1.2 < 1.2.0. So a lowerbound on 1.2 includes
|
||||
# pre-releases of 1.2.0 as well.
|
||||
assert_satisfies("1.2.0alpha1", "1.2:")
|
||||
assert_satisfies("1.2.0beta2", "1.2:")
|
||||
assert_satisfies("1.2.0rc3", "1.2:")
|
||||
|
||||
# An upperbound :1.1 does not include 1.2.0 pre-releases
|
||||
assert_does_not_satisfy("1.2.0alpha1", ":1.1")
|
||||
assert_does_not_satisfy("1.2.0beta2", ":1.1")
|
||||
assert_does_not_satisfy("1.2.0rc3", ":1.1")
|
||||
|
||||
assert_satisfies("1.2.0alpha1", ":1.2")
|
||||
assert_satisfies("1.2.0beta2", ":1.2")
|
||||
assert_satisfies("1.2.0rc3", ":1.2")
|
||||
|
||||
# You can also construct ranges from prereleases
|
||||
assert_satisfies("1.2.0alpha2:1.2.0beta1", "1.2.0alpha1:1.2.0beta2")
|
||||
assert_satisfies("1.2.0", "1.2.0alpha1:")
|
||||
assert_satisfies("=1.2.0", "1.2.0alpha1:")
|
||||
assert_does_not_satisfy("=1.2.0", ":1.2.0rc345")
|
||||
|
||||
|
||||
def test_contains():
|
||||
assert_in("=1.3", "1.2:1.4")
|
||||
assert_in("=1.2.5", "1.2:1.4")
|
||||
@@ -417,12 +462,12 @@ def test_basic_version_satisfaction():
|
||||
assert_satisfies("4.7.3", "4.7.3")
|
||||
|
||||
assert_satisfies("4.7.3", "4.7")
|
||||
assert_satisfies("4.7.3b2", "4.7")
|
||||
assert_satisfies("4.7b6", "4.7")
|
||||
assert_satisfies("4.7.3v2", "4.7")
|
||||
assert_satisfies("4.7v6", "4.7")
|
||||
|
||||
assert_satisfies("4.7.3", "4")
|
||||
assert_satisfies("4.7.3b2", "4")
|
||||
assert_satisfies("4.7b6", "4")
|
||||
assert_satisfies("4.7.3v2", "4")
|
||||
assert_satisfies("4.7v6", "4")
|
||||
|
||||
assert_does_not_satisfy("4.8.0", "4.9")
|
||||
assert_does_not_satisfy("4.8", "4.9")
|
||||
@@ -433,12 +478,12 @@ def test_basic_version_satisfaction_in_lists():
|
||||
assert_satisfies(["4.7.3"], ["4.7.3"])
|
||||
|
||||
assert_satisfies(["4.7.3"], ["4.7"])
|
||||
assert_satisfies(["4.7.3b2"], ["4.7"])
|
||||
assert_satisfies(["4.7b6"], ["4.7"])
|
||||
assert_satisfies(["4.7.3v2"], ["4.7"])
|
||||
assert_satisfies(["4.7v6"], ["4.7"])
|
||||
|
||||
assert_satisfies(["4.7.3"], ["4"])
|
||||
assert_satisfies(["4.7.3b2"], ["4"])
|
||||
assert_satisfies(["4.7b6"], ["4"])
|
||||
assert_satisfies(["4.7.3v2"], ["4"])
|
||||
assert_satisfies(["4.7v6"], ["4"])
|
||||
|
||||
assert_does_not_satisfy(["4.8.0"], ["4.9"])
|
||||
assert_does_not_satisfy(["4.8"], ["4.9"])
|
||||
@@ -507,6 +552,11 @@ def test_formatted_strings():
|
||||
assert v.dotted.joined.string == "123b"
|
||||
|
||||
|
||||
def test_dotted_numeric_string():
|
||||
assert Version("1a2b3").dotted_numeric_string == "1.0.2.0.3"
|
||||
assert Version("1a2b3alpha4").dotted_numeric_string == "1.0.2.0.3.0.4"
|
||||
|
||||
|
||||
def test_up_to():
|
||||
v = Version("1.23-4_5b")
|
||||
|
||||
@@ -548,9 +598,18 @@ def check_repr_and_str(vrs):
|
||||
check_repr_and_str("R2016a.2-3_4")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"version_str", ["1.2string3", "1.2-3xyz_4-alpha.5", "1.2beta", "1_x_rc-4"]
|
||||
)
|
||||
def test_stringify_version(version_str):
|
||||
v = Version(version_str)
|
||||
v.string = None
|
||||
assert str(v) == version_str
|
||||
|
||||
|
||||
def test_len():
|
||||
a = Version("1.2.3.4")
|
||||
assert len(a) == len(a.version)
|
||||
assert len(a) == len(a.version[0])
|
||||
assert len(a) == 4
|
||||
b = Version("2018.0")
|
||||
assert len(b) == 2
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import email.message
|
||||
import os
|
||||
import pickle
|
||||
import ssl
|
||||
import urllib.request
|
||||
|
||||
import pytest
|
||||
@@ -363,3 +364,81 @@ def test_detailed_http_error_pickle(tmpdir):
|
||||
assert deserialized.reason == "Not Found"
|
||||
assert str(deserialized.info()) == str(headers)
|
||||
assert str(deserialized) == str(error)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def ssl_scrubbed_env(mutable_config, monkeypatch):
|
||||
"""clear out environment variables that could give false positives for SSL Cert tests"""
|
||||
monkeypatch.delenv("SSL_CERT_FILE", raising=False)
|
||||
monkeypatch.delenv("SSL_CERT_DIR", raising=False)
|
||||
monkeypatch.delenv("CURL_CA_BUNDLE", raising=False)
|
||||
spack.config.set("config:verify_ssl", True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cert_path,cert_creator",
|
||||
[
|
||||
pytest.param(
|
||||
lambda base_path: os.path.join(base_path, "mock_cert.crt"),
|
||||
lambda cert_path: open(cert_path, "w").close(),
|
||||
id="cert_file",
|
||||
),
|
||||
pytest.param(
|
||||
lambda base_path: os.path.join(base_path, "mock_cert"),
|
||||
lambda cert_path: os.mkdir(cert_path),
|
||||
id="cert_directory",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_ssl_urllib(
|
||||
cert_path, cert_creator, tmpdir, ssl_scrubbed_env, mutable_config, monkeypatch
|
||||
):
|
||||
"""
|
||||
create a proposed cert type and then verify that they exist inside ssl's checks
|
||||
"""
|
||||
spack.config.set("config:url_fetch_method", "urllib")
|
||||
|
||||
def mock_verify_locations(self, cafile, capath, cadata):
|
||||
"""overwrite ssl's verification to simply check for valid file/path"""
|
||||
assert cafile or capath
|
||||
if cafile:
|
||||
assert os.path.isfile(cafile)
|
||||
if capath:
|
||||
assert os.path.isdir(capath)
|
||||
|
||||
monkeypatch.setattr(ssl.SSLContext, "load_verify_locations", mock_verify_locations)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
mock_cert = cert_path(tmpdir.strpath)
|
||||
cert_creator(mock_cert)
|
||||
spack.config.set("config:ssl_certs", mock_cert)
|
||||
|
||||
assert mock_cert == spack.config.get("config:ssl_certs", None)
|
||||
|
||||
ssl_context = spack.util.web.urllib_ssl_cert_handler()
|
||||
assert ssl_context.verify_mode == ssl.CERT_REQUIRED
|
||||
|
||||
|
||||
@pytest.mark.parametrize("cert_exists", [True, False], ids=["exists", "missing"])
|
||||
def test_ssl_curl_cert_file(cert_exists, tmpdir, ssl_scrubbed_env, mutable_config, monkeypatch):
|
||||
"""
|
||||
Assure that if a valid cert file is specified curl executes
|
||||
with CURL_CA_BUNDLE in the env
|
||||
"""
|
||||
spack.config.set("config:url_fetch_method", "curl")
|
||||
with tmpdir.as_cwd():
|
||||
mock_cert = str(tmpdir.join("mock_cert.crt"))
|
||||
spack.config.set("config:ssl_certs", mock_cert)
|
||||
if cert_exists:
|
||||
open(mock_cert, "w").close()
|
||||
assert os.path.isfile(mock_cert)
|
||||
curl = spack.util.web._curl()
|
||||
|
||||
# arbitrary call to query the run env
|
||||
dump_env = {}
|
||||
curl("--help", output=str, _dump_env=dump_env)
|
||||
|
||||
if cert_exists:
|
||||
assert dump_env["CURL_CA_BUNDLE"] == mock_cert
|
||||
else:
|
||||
assert "CURL_CA_BUNDLE" not in dump_env
|
||||
|
@@ -9,7 +9,7 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import BinaryIO, Callable, Dict, List, Optional
|
||||
from typing import Any, BinaryIO, Callable, Dict, List, Optional
|
||||
|
||||
import llnl.url
|
||||
from llnl.util import tty
|
||||
@@ -157,6 +157,10 @@ def _system_gunzip(archive_file: str) -> str:
|
||||
return destination_abspath
|
||||
|
||||
|
||||
def _do_nothing(archive_file: str) -> None:
|
||||
return None
|
||||
|
||||
|
||||
def _unzip(archive_file: str) -> str:
|
||||
"""Returns path to extracted zip archive. Extract Zipfile, searching for unzip system
|
||||
executable. If unavailable, search for 'tar' executable on system and use instead.
|
||||
@@ -283,7 +287,7 @@ def decompressor_for(path: str, extension: Optional[str] = None):
|
||||
return decompressor_for_nix(extension)
|
||||
|
||||
|
||||
def decompressor_for_nix(extension: str) -> Callable[[str], str]:
|
||||
def decompressor_for_nix(extension: str) -> Callable[[str], Any]:
|
||||
"""Returns a function pointer to appropriate decompression algorithm based on extension type
|
||||
and unix specific considerations i.e. a reasonable expectation system utils like gzip, bzip2,
|
||||
and xz are available
|
||||
@@ -291,18 +295,19 @@ def decompressor_for_nix(extension: str) -> Callable[[str], str]:
|
||||
Args:
|
||||
extension: path of the archive file requiring decompression
|
||||
"""
|
||||
extension_to_decompressor: Dict[str, Callable[[str], str]] = {
|
||||
extension_to_decompressor: Dict[str, Callable[[str], Any]] = {
|
||||
"zip": _unzip,
|
||||
"gz": _gunzip,
|
||||
"bz2": _bunzip2,
|
||||
"Z": _system_unZ, # no builtin support for .Z files
|
||||
"xz": _lzma_decomp,
|
||||
"whl": _do_nothing,
|
||||
}
|
||||
|
||||
return extension_to_decompressor.get(extension, _system_untar)
|
||||
|
||||
|
||||
def _determine_py_decomp_archive_strategy(extension: str) -> Optional[Callable[[str], str]]:
|
||||
def _determine_py_decomp_archive_strategy(extension: str) -> Optional[Callable[[str], Any]]:
|
||||
"""Returns appropriate python based decompression strategy
|
||||
based on extension type"""
|
||||
extension_to_decompressor: Dict[str, Callable[[str], str]] = {
|
||||
@@ -313,7 +318,7 @@ def _determine_py_decomp_archive_strategy(extension: str) -> Optional[Callable[[
|
||||
return extension_to_decompressor.get(extension, None)
|
||||
|
||||
|
||||
def decompressor_for_win(extension: str) -> Callable[[str], str]:
|
||||
def decompressor_for_win(extension: str) -> Callable[[str], Any]:
|
||||
"""Returns a function pointer to appropriate decompression
|
||||
algorithm based on extension type and Windows specific considerations
|
||||
|
||||
@@ -323,7 +328,7 @@ def decompressor_for_win(extension: str) -> Callable[[str], str]:
|
||||
and files as Python does not provide support for the UNIX compress algorithm
|
||||
"""
|
||||
extension = llnl.url.expand_contracted_extension(extension)
|
||||
extension_to_decompressor: Dict[str, Callable[[str], str]] = {
|
||||
extension_to_decompressor: Dict[str, Callable[[str], Any]] = {
|
||||
# Windows native tar can handle .zip extensions, use standard unzip method
|
||||
"zip": _unzip,
|
||||
# if extension is standard tarball, invoke Windows native tar
|
||||
@@ -333,6 +338,7 @@ def decompressor_for_win(extension: str) -> Callable[[str], str]:
|
||||
# detected
|
||||
"Z": _system_unZ,
|
||||
"xz": _lzma_decomp,
|
||||
"whl": _do_nothing,
|
||||
}
|
||||
|
||||
decompressor = extension_to_decompressor.get(extension)
|
||||
|
@@ -27,6 +27,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.util.path
|
||||
import spack.util.url as url_util
|
||||
|
||||
from .executable import CommandNotFoundError, which
|
||||
@@ -59,6 +60,66 @@ def http_error_default(self, req, fp, code, msg, hdrs):
|
||||
raise DetailedHTTPError(req, code, msg, hdrs, fp)
|
||||
|
||||
|
||||
dbg_msg_no_ssl_cert_config = (
|
||||
"config:ssl_certs not in configuration. "
|
||||
"Default cert configuation and environment will be used."
|
||||
)
|
||||
|
||||
|
||||
def urllib_ssl_cert_handler():
|
||||
"""context for configuring ssl during urllib HTTPS operations"""
|
||||
custom_cert_var = spack.config.get("config:ssl_certs")
|
||||
if custom_cert_var:
|
||||
# custom certs will be a location, so expand env variables, paths etc
|
||||
certs = spack.util.path.canonicalize_path(custom_cert_var)
|
||||
tty.debug("URLLIB: Looking for custom SSL certs at {}".format(certs))
|
||||
if os.path.isfile(certs):
|
||||
tty.debug("URLLIB: Custom SSL certs file found at {}".format(certs))
|
||||
return ssl.create_default_context(cafile=certs)
|
||||
elif os.path.isdir(certs):
|
||||
tty.debug("URLLIB: Custom SSL certs directory found at {}".format(certs))
|
||||
return ssl.create_default_context(capath=certs)
|
||||
else:
|
||||
tty.debug("URLLIB: Custom SSL certs not found")
|
||||
return ssl.create_default_context()
|
||||
else:
|
||||
tty.debug(dbg_msg_no_ssl_cert_config)
|
||||
return ssl.create_default_context()
|
||||
|
||||
|
||||
# curl requires different strategies for custom certs at runtime depending on if certs
|
||||
# are stored as a file or a directory
|
||||
def append_curl_env_for_ssl_certs(curl):
|
||||
"""
|
||||
configure curl to use custom certs in a file at run time
|
||||
see: https://curl.se/docs/sslcerts.html item 4
|
||||
"""
|
||||
custom_cert_var = spack.config.get("config:ssl_certs")
|
||||
if custom_cert_var:
|
||||
# custom certs will be a location, so expand env variables, paths etc
|
||||
certs = spack.util.path.canonicalize_path(custom_cert_var)
|
||||
tty.debug("CURL: Looking for custom SSL certs file at {}".format(certs))
|
||||
if os.path.isfile(certs):
|
||||
tty.debug(
|
||||
"CURL: Configuring curl to use custom"
|
||||
" certs from {} by setting "
|
||||
"CURL_CA_BUNDLE".format(certs)
|
||||
)
|
||||
curl.add_default_env("CURL_CA_BUNDLE", certs)
|
||||
elif os.path.isdir(certs):
|
||||
tty.warn(
|
||||
"CURL config:ssl_certs"
|
||||
" is a directory but cURL only supports files. Default certs will be used instead."
|
||||
)
|
||||
else:
|
||||
tty.debug(
|
||||
"CURL config:ssl_certs "
|
||||
"resolves to {}. This is not a file so default certs will be used.".format(certs)
|
||||
)
|
||||
else:
|
||||
tty.debug(dbg_msg_no_ssl_cert_config)
|
||||
|
||||
|
||||
def _urlopen():
|
||||
s3 = UrllibS3Handler()
|
||||
gcs = GCSHandler()
|
||||
@@ -66,7 +127,7 @@ def _urlopen():
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
s3, gcs, HTTPSHandler(context=ssl.create_default_context()), error_handler
|
||||
s3, gcs, HTTPSHandler(context=urllib_ssl_cert_handler()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
@@ -206,9 +267,7 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non
|
||||
os.remove(local_file_path)
|
||||
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
"Unrecognized URL scheme: {SCHEME}".format(SCHEME=remote_url.scheme)
|
||||
)
|
||||
raise NotImplementedError(f"Unrecognized URL scheme: {remote_url.scheme}")
|
||||
|
||||
|
||||
def base_curl_fetch_args(url, timeout=0):
|
||||
@@ -289,6 +348,7 @@ def _curl(curl=None):
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise spack.error.FetchError("Missing required curl fetch method")
|
||||
append_curl_env_for_ssl_certs(curl)
|
||||
return curl
|
||||
|
||||
|
||||
@@ -535,7 +595,7 @@ def list_url(url, recursive=False):
|
||||
if local_path:
|
||||
if recursive:
|
||||
# convert backslash to forward slash as required for URLs
|
||||
return [str(PurePosixPath(Path(p))) for p in list(_iter_local_prefix(local_path))]
|
||||
return [str(PurePosixPath(Path(p))) for p in _iter_local_prefix(local_path)]
|
||||
return [
|
||||
subpath
|
||||
for subpath in os.listdir(local_path)
|
||||
|
@@ -30,9 +30,9 @@
|
||||
Version,
|
||||
VersionList,
|
||||
VersionRange,
|
||||
_next_version,
|
||||
_prev_version,
|
||||
from_string,
|
||||
next_version,
|
||||
prev_version,
|
||||
ver,
|
||||
)
|
||||
|
||||
@@ -46,8 +46,8 @@
|
||||
"from_string",
|
||||
"is_git_version",
|
||||
"infinity_versions",
|
||||
"prev_version",
|
||||
"next_version",
|
||||
"_prev_version",
|
||||
"_next_version",
|
||||
"VersionList",
|
||||
"ClosedOpenRange",
|
||||
"StandardVersion",
|
||||
|
@@ -15,6 +15,14 @@
|
||||
|
||||
iv_min_len = min(len(s) for s in infinity_versions)
|
||||
|
||||
ALPHA = 0
|
||||
BETA = 1
|
||||
RC = 2
|
||||
FINAL = 3
|
||||
|
||||
PRERELEASE_TO_STRING = ["alpha", "beta", "rc"]
|
||||
STRING_TO_PRERELEASE = {"alpha": ALPHA, "beta": BETA, "rc": RC, "final": FINAL}
|
||||
|
||||
|
||||
def is_git_version(string: str) -> bool:
|
||||
return (
|
||||
|
@@ -11,7 +11,11 @@
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
|
||||
from .common import (
|
||||
ALPHA,
|
||||
COMMIT_VERSION,
|
||||
FINAL,
|
||||
PRERELEASE_TO_STRING,
|
||||
STRING_TO_PRERELEASE,
|
||||
EmptyRangeError,
|
||||
VersionLookupError,
|
||||
infinity_versions,
|
||||
@@ -88,21 +92,50 @@ def parse_string_components(string: str) -> Tuple[tuple, tuple]:
|
||||
raise ValueError("Bad characters in version string: %s" % string)
|
||||
|
||||
segments = SEGMENT_REGEX.findall(string)
|
||||
version = tuple(int(m[0]) if m[0] else VersionStrComponent.from_string(m[1]) for m in segments)
|
||||
separators = tuple(m[2] for m in segments)
|
||||
return version, separators
|
||||
prerelease: Tuple[int, ...]
|
||||
|
||||
# <version>(alpha|beta|rc)<number>
|
||||
if len(segments) >= 3 and segments[-2][1] in STRING_TO_PRERELEASE and segments[-1][0]:
|
||||
prerelease = (STRING_TO_PRERELEASE[segments[-2][1]], int(segments[-1][0]))
|
||||
segments = segments[:-2]
|
||||
|
||||
# <version>(alpha|beta|rc)
|
||||
elif len(segments) >= 2 and segments[-1][1] in STRING_TO_PRERELEASE:
|
||||
prerelease = (STRING_TO_PRERELEASE[segments[-1][1]],)
|
||||
segments = segments[:-1]
|
||||
|
||||
# <version>
|
||||
else:
|
||||
prerelease = (FINAL,)
|
||||
|
||||
release = tuple(int(m[0]) if m[0] else VersionStrComponent.from_string(m[1]) for m in segments)
|
||||
|
||||
return (release, prerelease), separators
|
||||
|
||||
|
||||
class ConcreteVersion:
|
||||
pass
|
||||
|
||||
|
||||
def _stringify_version(versions: Tuple[tuple, tuple], separators: tuple) -> str:
|
||||
release, prerelease = versions
|
||||
string = ""
|
||||
for i in range(len(release)):
|
||||
string += f"{release[i]}{separators[i]}"
|
||||
if prerelease[0] != FINAL:
|
||||
string += f"{PRERELEASE_TO_STRING[prerelease[0]]}{separators[len(release)]}"
|
||||
if len(prerelease) > 1:
|
||||
string += str(prerelease[1])
|
||||
return string
|
||||
|
||||
|
||||
class StandardVersion(ConcreteVersion):
|
||||
"""Class to represent versions"""
|
||||
|
||||
__slots__ = ["version", "string", "separators"]
|
||||
|
||||
def __init__(self, string: Optional[str], version: tuple, separators: tuple):
|
||||
def __init__(self, string: Optional[str], version: Tuple[tuple, tuple], separators: tuple):
|
||||
self.string = string
|
||||
self.version = version
|
||||
self.separators = separators
|
||||
@@ -113,11 +146,13 @@ def from_string(string: str):
|
||||
|
||||
@staticmethod
|
||||
def typemin():
|
||||
return StandardVersion("", (), ())
|
||||
return StandardVersion("", ((), (ALPHA,)), ("",))
|
||||
|
||||
@staticmethod
|
||||
def typemax():
|
||||
return StandardVersion("infinity", (VersionStrComponent(len(infinity_versions)),), ())
|
||||
return StandardVersion(
|
||||
"infinity", ((VersionStrComponent(len(infinity_versions)),), (FINAL,)), ("",)
|
||||
)
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
@@ -164,21 +199,23 @@ def __gt__(self, other):
|
||||
return NotImplemented
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.version)
|
||||
return iter(self.version[0])
|
||||
|
||||
def __len__(self):
|
||||
return len(self.version)
|
||||
return len(self.version[0])
|
||||
|
||||
def __getitem__(self, idx):
|
||||
cls = type(self)
|
||||
|
||||
release = self.version[0]
|
||||
|
||||
if isinstance(idx, numbers.Integral):
|
||||
return self.version[idx]
|
||||
return release[idx]
|
||||
|
||||
elif isinstance(idx, slice):
|
||||
string_arg = []
|
||||
|
||||
pairs = zip(self.version[idx], self.separators[idx])
|
||||
pairs = zip(release[idx], self.separators[idx])
|
||||
for token, sep in pairs:
|
||||
string_arg.append(str(token))
|
||||
string_arg.append(str(sep))
|
||||
@@ -194,18 +231,15 @@ def __getitem__(self, idx):
|
||||
raise TypeError(message.format(cls=cls))
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
self.string
|
||||
if isinstance(self.string, str)
|
||||
else ".".join((str(c) for c in self.version))
|
||||
)
|
||||
return self.string or _stringify_version(self.version, self.separators)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
# Print indirect repr through Version(...)
|
||||
return f'Version("{str(self)}")'
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.version)
|
||||
# If this is a final release, do not hash the prerelease part for backward compat.
|
||||
return hash(self.version if self.is_prerelease() else self.version[0])
|
||||
|
||||
def __contains__(rhs, lhs):
|
||||
# We should probably get rid of `x in y` for versions, since
|
||||
@@ -254,9 +288,23 @@ def intersection(self, other: Union["ClosedOpenRange", "StandardVersion"]):
|
||||
def isdevelop(self):
|
||||
"""Triggers on the special case of the `@develop-like` version."""
|
||||
return any(
|
||||
isinstance(p, VersionStrComponent) and isinstance(p.data, int) for p in self.version
|
||||
isinstance(p, VersionStrComponent) and isinstance(p.data, int) for p in self.version[0]
|
||||
)
|
||||
|
||||
def is_prerelease(self) -> bool:
|
||||
return self.version[1][0] != FINAL
|
||||
|
||||
@property
|
||||
def dotted_numeric_string(self) -> str:
|
||||
"""Replaces all non-numeric components of the version with 0.
|
||||
|
||||
This can be used to pass Spack versions to libraries that have stricter version schema.
|
||||
"""
|
||||
numeric = tuple(0 if isinstance(v, VersionStrComponent) else v for v in self.version[0])
|
||||
if self.is_prerelease():
|
||||
numeric += (0, *self.version[1][1:])
|
||||
return ".".join(str(v) for v in numeric)
|
||||
|
||||
@property
|
||||
def dotted(self):
|
||||
"""The dotted representation of the version.
|
||||
@@ -573,6 +621,9 @@ def __getitem__(self, idx):
|
||||
def isdevelop(self):
|
||||
return self.ref_version.isdevelop()
|
||||
|
||||
def is_prerelease(self) -> bool:
|
||||
return self.ref_version.is_prerelease()
|
||||
|
||||
@property
|
||||
def dotted(self) -> StandardVersion:
|
||||
return self.ref_version.dotted
|
||||
@@ -604,14 +655,14 @@ def __init__(self, lo: StandardVersion, hi: StandardVersion):
|
||||
def from_version_range(cls, lo: StandardVersion, hi: StandardVersion):
|
||||
"""Construct ClosedOpenRange from lo:hi range."""
|
||||
try:
|
||||
return ClosedOpenRange(lo, next_version(hi))
|
||||
return ClosedOpenRange(lo, _next_version(hi))
|
||||
except EmptyRangeError as e:
|
||||
raise EmptyRangeError(f"{lo}:{hi} is an empty range") from e
|
||||
|
||||
def __str__(self):
|
||||
# This simplifies 3.1:<3.2 to 3.1:3.1 to 3.1
|
||||
# 3:3 -> 3
|
||||
hi_prev = prev_version(self.hi)
|
||||
hi_prev = _prev_version(self.hi)
|
||||
if self.lo != StandardVersion.typemin() and self.lo == hi_prev:
|
||||
return str(self.lo)
|
||||
lhs = "" if self.lo == StandardVersion.typemin() else str(self.lo)
|
||||
@@ -623,7 +674,7 @@ def __repr__(self):
|
||||
|
||||
def __hash__(self):
|
||||
# prev_version for backward compat.
|
||||
return hash((self.lo, prev_version(self.hi)))
|
||||
return hash((self.lo, _prev_version(self.hi)))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, StandardVersion):
|
||||
@@ -805,7 +856,7 @@ def concrete_range_as_version(self) -> Optional[ConcreteVersion]:
|
||||
v = self[0]
|
||||
if isinstance(v, ConcreteVersion):
|
||||
return v
|
||||
if isinstance(v, ClosedOpenRange) and next_version(v.lo) == v.hi:
|
||||
if isinstance(v, ClosedOpenRange) and _next_version(v.lo) == v.hi:
|
||||
return v.lo
|
||||
return None
|
||||
|
||||
@@ -976,7 +1027,7 @@ def __repr__(self):
|
||||
return str(self.versions)
|
||||
|
||||
|
||||
def next_str(s: str) -> str:
|
||||
def _next_str(s: str) -> str:
|
||||
"""Produce the next string of A-Z and a-z characters"""
|
||||
return (
|
||||
(s + "A")
|
||||
@@ -985,7 +1036,7 @@ def next_str(s: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def prev_str(s: str) -> str:
|
||||
def _prev_str(s: str) -> str:
|
||||
"""Produce the previous string of A-Z and a-z characters"""
|
||||
return (
|
||||
s[:-1]
|
||||
@@ -994,7 +1045,7 @@ def prev_str(s: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def next_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
def _next_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
"""
|
||||
Produce the next VersionStrComponent, where
|
||||
masteq -> mastes
|
||||
@@ -1007,14 +1058,14 @@ def next_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
|
||||
# Find the next non-infinity string.
|
||||
while True:
|
||||
data = next_str(data)
|
||||
data = _next_str(data)
|
||||
if data not in infinity_versions:
|
||||
break
|
||||
|
||||
return VersionStrComponent(data)
|
||||
|
||||
|
||||
def prev_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
def _prev_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
"""
|
||||
Produce the previous VersionStrComponent, where
|
||||
mastes -> masteq
|
||||
@@ -1027,47 +1078,56 @@ def prev_version_str_component(v: VersionStrComponent) -> VersionStrComponent:
|
||||
|
||||
# Find the next string.
|
||||
while True:
|
||||
data = prev_str(data)
|
||||
data = _prev_str(data)
|
||||
if data not in infinity_versions:
|
||||
break
|
||||
|
||||
return VersionStrComponent(data)
|
||||
|
||||
|
||||
def next_version(v: StandardVersion) -> StandardVersion:
|
||||
if len(v.version) == 0:
|
||||
nxt = VersionStrComponent("A")
|
||||
elif isinstance(v.version[-1], VersionStrComponent):
|
||||
nxt = next_version_str_component(v.version[-1])
|
||||
def _next_version(v: StandardVersion) -> StandardVersion:
|
||||
release, prerelease = v.version
|
||||
separators = v.separators
|
||||
prerelease_type = prerelease[0]
|
||||
if prerelease_type != FINAL:
|
||||
prerelease = (prerelease_type, prerelease[1] + 1 if len(prerelease) > 1 else 0)
|
||||
elif len(release) == 0:
|
||||
release = (VersionStrComponent("A"),)
|
||||
separators = ("",)
|
||||
elif isinstance(release[-1], VersionStrComponent):
|
||||
release = release[:-1] + (_next_version_str_component(release[-1]),)
|
||||
else:
|
||||
nxt = v.version[-1] + 1
|
||||
|
||||
# Construct a string-version for printing
|
||||
string_components = []
|
||||
for part, sep in zip(v.version[:-1], v.separators):
|
||||
string_components.append(str(part))
|
||||
string_components.append(str(sep))
|
||||
string_components.append(str(nxt))
|
||||
|
||||
return StandardVersion("".join(string_components), v.version[:-1] + (nxt,), v.separators)
|
||||
release = release[:-1] + (release[-1] + 1,)
|
||||
components = [""] * (2 * len(release))
|
||||
components[::2] = release
|
||||
components[1::2] = separators[: len(release)]
|
||||
if prerelease_type != FINAL:
|
||||
components.extend((PRERELEASE_TO_STRING[prerelease_type], prerelease[1]))
|
||||
return StandardVersion("".join(str(c) for c in components), (release, prerelease), separators)
|
||||
|
||||
|
||||
def prev_version(v: StandardVersion) -> StandardVersion:
|
||||
if len(v.version) == 0:
|
||||
def _prev_version(v: StandardVersion) -> StandardVersion:
|
||||
# this function does not deal with underflow, because it's always called as
|
||||
# _prev_version(_next_version(v)).
|
||||
release, prerelease = v.version
|
||||
separators = v.separators
|
||||
prerelease_type = prerelease[0]
|
||||
if prerelease_type != FINAL:
|
||||
prerelease = (
|
||||
(prerelease_type,) if prerelease[1] == 0 else (prerelease_type, prerelease[1] - 1)
|
||||
)
|
||||
elif len(release) == 0:
|
||||
return v
|
||||
elif isinstance(v.version[-1], VersionStrComponent):
|
||||
prev = prev_version_str_component(v.version[-1])
|
||||
elif isinstance(release[-1], VersionStrComponent):
|
||||
release = release[:-1] + (_prev_version_str_component(release[-1]),)
|
||||
else:
|
||||
prev = v.version[-1] - 1
|
||||
|
||||
# Construct a string-version for printing
|
||||
string_components = []
|
||||
for part, sep in zip(v.version[:-1], v.separators):
|
||||
string_components.append(str(part))
|
||||
string_components.append(str(sep))
|
||||
string_components.append(str(prev))
|
||||
|
||||
return StandardVersion("".join(string_components), v.version[:-1] + (prev,), v.separators)
|
||||
release = release[:-1] + (release[-1] - 1,)
|
||||
components = [""] * (2 * len(release))
|
||||
components[::2] = release
|
||||
components[1::2] = separators[: len(release)]
|
||||
if prerelease_type != FINAL:
|
||||
components.extend((PRERELEASE_TO_STRING[prerelease_type], *prerelease[1:]))
|
||||
return StandardVersion("".join(str(c) for c in components), (release, prerelease), separators)
|
||||
|
||||
|
||||
def Version(string: Union[str, int]) -> Union[GitVersion, StandardVersion]:
|
||||
|
@@ -39,20 +39,10 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
|
||||
.linux_skylake:
|
||||
.linux_x86_64_v4:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "skylake_avx512"
|
||||
|
||||
.linux_icelake:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "icelake"
|
||||
|
||||
.linux_neoverse_n1:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "neoverse_n1"
|
||||
SPACK_TARGET_ARCH: "x86_64_v4"
|
||||
|
||||
.linux_neoverse_v1:
|
||||
variables:
|
||||
@@ -528,6 +518,30 @@ developer-tools-build:
|
||||
- artifacts: True
|
||||
job: developer-tools-generate
|
||||
|
||||
###########################################
|
||||
# Build tests for different developer tools
|
||||
# manylinux2014
|
||||
###########################################
|
||||
.developer-tools-manylinux2014:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: developer-tools-manylinux2014
|
||||
|
||||
developer-tools-manylinux2014-generate:
|
||||
extends: [ ".developer-tools-manylinux2014", ".generate-x86_64"]
|
||||
image: ecpe4s/manylinux2014:2024.03.28
|
||||
|
||||
developer-tools-manylinux2014-build:
|
||||
extends: [ ".developer-tools-manylinux2014", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: developer-tools-manylinux2014-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: developer-tools-manylinux2014-generate
|
||||
|
||||
#########################################
|
||||
# RADIUSS
|
||||
#########################################
|
||||
@@ -815,104 +829,58 @@ deprecated-ci-build:
|
||||
job: deprecated-ci-generate
|
||||
|
||||
########################################
|
||||
# AWS PCLUSTER
|
||||
# AWS ParallelCluster
|
||||
########################################
|
||||
|
||||
.aws-pcluster-generate-image:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2023-05-25", "entrypoint": [""] }
|
||||
|
||||
.aws-pcluster-generate:
|
||||
image: { "name": "ghcr.io/spack/pcluster-amazonlinux-2:v2024-01-29", "entrypoint": [""] }
|
||||
before_script:
|
||||
# Use gcc from local container buildcache
|
||||
# Use gcc from pre-installed spack store
|
||||
- - . "./share/spack/setup-env.sh"
|
||||
- . /etc/profile.d/modules.sh
|
||||
- spack buildcache rebuild-index /bootstrap/local-cache/
|
||||
- spack mirror add local-cache /bootstrap/local-cache
|
||||
- spack gpg trust /bootstrap/public-key
|
||||
- cd "${CI_PROJECT_DIR}" && curl -sOL https://raw.githubusercontent.com/spack/spack-configs/main/AWS/parallelcluster/postinstall.sh
|
||||
- sed -i -e "s/spack arch -t/echo ${SPACK_TARGET_ARCH}/g" postinstall.sh
|
||||
- sed -i.bkp s/"spack install gcc"/"spack install --cache-only --reuse gcc"/ postinstall.sh
|
||||
- diff postinstall.sh postinstall.sh.bkp || echo Done
|
||||
- /bin/bash postinstall.sh -fg
|
||||
- spack config --scope site add "packages:all:target:[${SPACK_TARGET_ARCH}]"
|
||||
after_script:
|
||||
- - mv "${CI_PROJECT_DIR}/postinstall.sh" "${CI_PROJECT_DIR}/jobs_scratch_dir/"
|
||||
- . "/etc/profile.d/modules.sh"
|
||||
- diff -q "/bootstrap/cloud_pipelines-config.yaml" "share/spack/gitlab/cloud_pipelines/configs/config.yaml" || echo "WARNING Install tree might have changed. You need to rebuild the pcluster-amazonlinux-2 container in spack/gitlab-runners."
|
||||
- cp "share/spack/gitlab/cloud_pipelines/configs/config.yaml" "etc/spack/"
|
||||
- /bin/bash "${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/scripts/pcluster/setup-pcluster.sh"
|
||||
- rm "etc/spack/config.yaml"
|
||||
|
||||
# Icelake (one pipeline per target)
|
||||
.aws-pcluster-icelake:
|
||||
# X86_64_V4 (one pipeline per target)
|
||||
.aws-pcluster-x86_64_v4:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-icelake
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-x86_64_v4
|
||||
|
||||
# aws-pcluster-generate-icelake:
|
||||
# extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".generate-x86_64", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
aws-pcluster-generate-x86_64_v4:
|
||||
extends: [ ".linux_x86_64_v4", ".aws-pcluster-x86_64_v4", ".generate-base", ".tags-x86_64_v4", ".aws-pcluster-generate"]
|
||||
|
||||
# aws-pcluster-build-icelake:
|
||||
# extends: [ ".linux_icelake", ".aws-pcluster-icelake", ".build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: aws-pcluster-generate-icelake
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: aws-pcluster-generate-icelake
|
||||
|
||||
# Skylake_avx512 (one pipeline per target)
|
||||
.aws-pcluster-skylake:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-skylake
|
||||
|
||||
# aws-pcluster-generate-skylake:
|
||||
# extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".generate-x86_64", ".tags-x86_64_v4", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
# aws-pcluster-build-skylake:
|
||||
# extends: [ ".linux_skylake", ".aws-pcluster-skylake", ".build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: aws-pcluster-generate-skylake
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: aws-pcluster-generate-skylake
|
||||
|
||||
# Neoverse_n1 (one pipeline per target)
|
||||
.aws-pcluster-neoverse_n1:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_n1
|
||||
|
||||
# aws-pcluster-generate-neoverse_n1:
|
||||
# extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
|
||||
# aws-pcluster-build-neoverse_n1:
|
||||
# extends: [ ".linux_neoverse_n1", ".aws-pcluster-neoverse_n1", ".build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: aws-pcluster-generate-neoverse_n1
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: aws-pcluster-generate-neoverse_n1
|
||||
aws-pcluster-build-x86_64_v4:
|
||||
extends: [ ".linux_x86_64_v4", ".aws-pcluster-x86_64_v4", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-x86_64_v4
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-x86_64_v4
|
||||
|
||||
# Neoverse_v1 (one pipeline per target)
|
||||
.aws-pcluster-neoverse_v1:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-pcluster-neoverse_v1
|
||||
|
||||
# aws-pcluster-generate-neoverse_v1:
|
||||
# extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".generate-aarch64", ".aws-pcluster-generate", ".aws-pcluster-generate-image" ]
|
||||
aws-pcluster-generate-neoverse_v1:
|
||||
# TODO: Use updated runner tags: https://github.com/spack/spack-infrastructure/pull/694/files
|
||||
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".generate-neoverse_v1", ".aws-pcluster-generate"]
|
||||
|
||||
# aws-pcluster-build-neoverse_v1:
|
||||
# extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: aws-pcluster-generate-neoverse_v1
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: aws-pcluster-generate-neoverse_v1
|
||||
aws-pcluster-build-neoverse_v1:
|
||||
extends: [ ".linux_neoverse_v1", ".aws-pcluster-neoverse_v1", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-pcluster-generate-neoverse_v1
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-pcluster-generate-neoverse_v1
|
||||
|
||||
# Cray definitions
|
||||
.generate-cray:
|
||||
|
@@ -27,12 +27,12 @@ ci:
|
||||
- - spack config blame mirrors
|
||||
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
after_script:
|
||||
- - ./bin/spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
|
||||
--prefix /home/software/spack:${CI_PROJECT_DIR}/opt/spack
|
||||
--log install_times.json
|
||||
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json
|
||||
- - cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- - time python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
|
||||
--prefix /home/software/spack:${CI_PROJECT_DIR}/opt/spack
|
||||
--log install_times.json
|
||||
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json || true
|
||||
variables:
|
||||
CI_JOB_SIZE: "default"
|
||||
CI_GPG_KEY_ROOT: /mnt/key
|
||||
|
@@ -1,4 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job-remove:
|
||||
image: macos-run-on-metal
|
||||
image:: macos-run-on-metal
|
||||
|
@@ -1,7 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
variables:
|
||||
SPACK_TARGET_ARCH: neoverse_n1
|
||||
- build-job:
|
||||
tags: ["aarch64", "graviton2"]
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user