Compare commits
306 Commits
develop-20
...
load-run-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4a607d316 | ||
|
|
da4e825754 | ||
|
|
3969653f1b | ||
|
|
db37672abf | ||
|
|
1f75ca96df | ||
|
|
605835fe42 | ||
|
|
6c2748c37d | ||
|
|
210d221357 | ||
|
|
c9ef5c8152 | ||
|
|
0274091204 | ||
|
|
3f2f0cc146 | ||
|
|
a236fce31f | ||
|
|
f77a38a96b | ||
|
|
6d55066b94 | ||
|
|
78132f2d6b | ||
|
|
fba47e87d7 | ||
|
|
bf8e8d9f5f | ||
|
|
ebc0e9be19 | ||
|
|
b6f08f1d4e | ||
|
|
d9724597ed | ||
|
|
c90c946d52 | ||
|
|
bb66d15d08 | ||
|
|
1cd2d07f0b | ||
|
|
43cb49d87a | ||
|
|
5165524ca6 | ||
|
|
38dc3a6896 | ||
|
|
115e448bd3 | ||
|
|
5c25437c9f | ||
|
|
0e72ff4a0d | ||
|
|
90edd18d1f | ||
|
|
40dbadb868 | ||
|
|
c16546bd4c | ||
|
|
143e6a4fbb | ||
|
|
13816f19fd | ||
|
|
34ff3b408b | ||
|
|
aec88ef3e6 | ||
|
|
9fda22d942 | ||
|
|
550613ee3a | ||
|
|
5341834ebe | ||
|
|
fc027e34d1 | ||
|
|
aef58776b4 | ||
|
|
4e17a40d09 | ||
|
|
d97251a4c6 | ||
|
|
2a66a67132 | ||
|
|
10fd69ec74 | ||
|
|
9889a0cac6 | ||
|
|
65ffd0dd63 | ||
|
|
3d2c779b87 | ||
|
|
b57c2a10d4 | ||
|
|
849a0a5eeb | ||
|
|
7e4a6160b9 | ||
|
|
646e7b4b00 | ||
|
|
eea86c3981 | ||
|
|
37c48fc82c | ||
|
|
47daba3dc1 | ||
|
|
46062d98fd | ||
|
|
4e37084ed4 | ||
|
|
e7c229393d | ||
|
|
87fd9c3e93 | ||
|
|
676f2a3175 | ||
|
|
5fe7f5329b | ||
|
|
c6a3dd03ab | ||
|
|
ebdd5e28f2 | ||
|
|
068abdd105 | ||
|
|
9ec372a86c | ||
|
|
5e05f6b7c6 | ||
|
|
7988d8c67d | ||
|
|
658a3f2fdb | ||
|
|
582f0289af | ||
|
|
95b737d923 | ||
|
|
d7e9a13f53 | ||
|
|
25e45f9f07 | ||
|
|
38cc51ec36 | ||
|
|
5c10c29923 | ||
|
|
daf95227bf | ||
|
|
480a5e0848 | ||
|
|
783e253f7d | ||
|
|
716196930a | ||
|
|
f42402129a | ||
|
|
419878f035 | ||
|
|
32927fd1c1 | ||
|
|
092a5a8d75 | ||
|
|
f082b19058 | ||
|
|
eb7e26006f | ||
|
|
2d3d9640dc | ||
|
|
f95080393a | ||
|
|
0b8203940a | ||
|
|
3bd7859e7f | ||
|
|
599b612edf | ||
|
|
6f9126d738 | ||
|
|
59399ab1f8 | ||
|
|
b67f619448 | ||
|
|
2755706115 | ||
|
|
43ed8a12b7 | ||
|
|
3f5b4a4907 | ||
|
|
cd16478aba | ||
|
|
aa87c747f9 | ||
|
|
9210b19398 | ||
|
|
4d156b9e6b | ||
|
|
56df8b61a2 | ||
|
|
170867e38a | ||
|
|
5ca9fd6c82 | ||
|
|
ef165c80b3 | ||
|
|
6859694e8e | ||
|
|
aeaca77630 | ||
|
|
2a9d1d444b | ||
|
|
abad16c198 | ||
|
|
4c0bc39054 | ||
|
|
501d322264 | ||
|
|
59fc09e93f | ||
|
|
8bd54e2f8f | ||
|
|
bd8d121a23 | ||
|
|
2bd487988f | ||
|
|
452e56f467 | ||
|
|
62e94b0cb7 | ||
|
|
5d331d4141 | ||
|
|
bd8d0324a9 | ||
|
|
d79a3ecc28 | ||
|
|
dac3b45387 | ||
|
|
31fe78e378 | ||
|
|
e16ca49036 | ||
|
|
a7ee72708a | ||
|
|
c4a53cf376 | ||
|
|
e963d02a07 | ||
|
|
f89451b4b8 | ||
|
|
9c87506c2c | ||
|
|
35223543e9 | ||
|
|
e1b22325ea | ||
|
|
2389047072 | ||
|
|
f49c58708b | ||
|
|
d916073397 | ||
|
|
bfa514af98 | ||
|
|
c57e2140c2 | ||
|
|
9e21d490ea | ||
|
|
3b4ca0374e | ||
|
|
7d33c36a30 | ||
|
|
4f14db19c4 | ||
|
|
a0dcf9620b | ||
|
|
9a9c3a984b | ||
|
|
a3543e2248 | ||
|
|
4cdbb04b15 | ||
|
|
2594be9459 | ||
|
|
8f07983ab6 | ||
|
|
8e34eaaa75 | ||
|
|
7e7d373ab3 | ||
|
|
dbd520f851 | ||
|
|
4fd7fa5fc1 | ||
|
|
84d2097a8c | ||
|
|
842f19c6e3 | ||
|
|
577ea0a0a8 | ||
|
|
4d8f9ff3e8 | ||
|
|
60ce6c7302 | ||
|
|
d111bde69e | ||
|
|
3045ed0e43 | ||
|
|
2de0e30016 | ||
|
|
15085ef6e5 | ||
|
|
0c2849da4d | ||
|
|
75eeab1297 | ||
|
|
b8e32ff6b3 | ||
|
|
e7924148af | ||
|
|
d454cf4711 | ||
|
|
ba81ef50f5 | ||
|
|
d5dd4b8b5d | ||
|
|
0e47548cb6 | ||
|
|
74974d85f6 | ||
|
|
7925bb575e | ||
|
|
976cb02f78 | ||
|
|
f1bdc74789 | ||
|
|
920347c21a | ||
|
|
c00ece6cf2 | ||
|
|
7b0157c7e7 | ||
|
|
1b3a2ba06a | ||
|
|
dc22a80f86 | ||
|
|
27d6a75692 | ||
|
|
70456ce4a7 | ||
|
|
078369ec2b | ||
|
|
da8e022f6b | ||
|
|
c8a3f1a8ae | ||
|
|
be3f7b5da3 | ||
|
|
68d7ce3bb6 | ||
|
|
7a490e95b6 | ||
|
|
088e4c6b64 | ||
|
|
76816d722a | ||
|
|
cbabdf283c | ||
|
|
060bc01273 | ||
|
|
0280ac51ed | ||
|
|
bd442fea40 | ||
|
|
fb9e5fcc4f | ||
|
|
bc02453f6d | ||
|
|
74a6c48d96 | ||
|
|
854f169ded | ||
|
|
94c2043b28 | ||
|
|
579df768ca | ||
|
|
e004db8f77 | ||
|
|
2d1cca2839 | ||
|
|
8dae369a69 | ||
|
|
0d76436780 | ||
|
|
34402beeb7 | ||
|
|
6a249944f5 | ||
|
|
6838ee6bb8 | ||
|
|
d50f296d4f | ||
|
|
e5d227e73d | ||
|
|
af7b4c5a2f | ||
|
|
75e9742d71 | ||
|
|
e2f2559a5a | ||
|
|
cac7f9774a | ||
|
|
6c4f8e62ae | ||
|
|
cb03db3d69 | ||
|
|
372bbb43a8 | ||
|
|
7b763faa1c | ||
|
|
4a79857b5e | ||
|
|
61df3b9080 | ||
|
|
0b134aa711 | ||
|
|
ea71477a9d | ||
|
|
bc26848cee | ||
|
|
3e50ee70be | ||
|
|
0d8a20b05e | ||
|
|
74d63c2fd3 | ||
|
|
a0622a2ee0 | ||
|
|
a25a910ba0 | ||
|
|
0e5ce57fd5 | ||
|
|
e86c07547d | ||
|
|
d7b5a27d1d | ||
|
|
eee8fdc438 | ||
|
|
5a91802807 | ||
|
|
7fd56da5b7 | ||
|
|
eefa5d6cb5 | ||
|
|
845973273a | ||
|
|
0696497ffa | ||
|
|
babd29da50 | ||
|
|
c4e2d24ca9 | ||
|
|
2c13361b09 | ||
|
|
e8740b40da | ||
|
|
e45fc994aa | ||
|
|
ed3f5fba1f | ||
|
|
d0e8a4d26f | ||
|
|
60a5f70b80 | ||
|
|
fa9acb6a98 | ||
|
|
621d42d0c7 | ||
|
|
cfdaee4725 | ||
|
|
306ba86709 | ||
|
|
b47fd61f18 | ||
|
|
c10ff27600 | ||
|
|
9056f31f11 | ||
|
|
513232cdb3 | ||
|
|
30893dd99a | ||
|
|
b7c2411b50 | ||
|
|
002e833993 | ||
|
|
02c9296db4 | ||
|
|
562065c427 | ||
|
|
9d00bcb286 | ||
|
|
a009a1a62a | ||
|
|
f7692d5699 | ||
|
|
12e1768fdb | ||
|
|
28a3be3eca | ||
|
|
33500b5169 | ||
|
|
91011a8c5f | ||
|
|
4570c9de5b | ||
|
|
e7507dcd08 | ||
|
|
4711758593 | ||
|
|
b55e9e8248 | ||
|
|
2e62cfea3e | ||
|
|
286e1147d6 | ||
|
|
7e3d228d19 | ||
|
|
a1ab42c8c0 | ||
|
|
57a2f2ddde | ||
|
|
b7b7b2fac4 | ||
|
|
5ac1167250 | ||
|
|
fcc4185132 | ||
|
|
943c8091c2 | ||
|
|
a7a6f2aaef | ||
|
|
5aaa82fb69 | ||
|
|
bc33d5f421 | ||
|
|
0e4e232ad1 | ||
|
|
145d44cd97 | ||
|
|
e797a89fe1 | ||
|
|
254a2bc3ea | ||
|
|
873652a33e | ||
|
|
3f686734ab | ||
|
|
e1373d5408 | ||
|
|
a2054564d8 | ||
|
|
ea75dbf7bd | ||
|
|
63a67e525b | ||
|
|
95aaaeb5af | ||
|
|
a0bd53148b | ||
|
|
3302b176fd | ||
|
|
4672346d9c | ||
|
|
4182e97761 | ||
|
|
5598de88ff | ||
|
|
d88ecf0af0 | ||
|
|
516f0461b8 | ||
|
|
b8cf7c3835 | ||
|
|
707684a7b7 | ||
|
|
739aebbd18 | ||
|
|
71c053c391 | ||
|
|
9a0a4eceaf | ||
|
|
19f8e9147d | ||
|
|
e99750fd3c | ||
|
|
33cde87775 | ||
|
|
e63597bf79 | ||
|
|
2f63342677 | ||
|
|
04eae7316f | ||
|
|
67d6c086d8 | ||
|
|
69370c9c8f | ||
|
|
9948785220 | ||
|
|
4047e025e0 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -10,3 +10,8 @@ updates:
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -34,6 +34,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@@ -41,6 +42,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
|
||||
26
.github/workflows/bootstrap.yml
vendored
26
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,11 +179,11 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
12
.github/workflows/build-containers.yml
vendored
12
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
7
.github/workflows/style/requirements.txt
vendored
Normal file
7
.github/workflows/style/requirements.txt
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.5.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
||||
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
|
||||
17
.github/workflows/valid-style.yml
vendored
17
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,16 +35,17 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -68,7 +69,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
@@ -214,6 +214,7 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
@@ -6196,7 +6196,100 @@ follows:
|
||||
"foo-package@{0}".format(version_str)
|
||||
)
|
||||
|
||||
.. _package-lifecycle:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Add detection tests to packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To ensure that software is detected correctly for multiple configurations
|
||||
and on different systems users can write a ``detection_test.yaml`` file and
|
||||
put it in the package directory alongside the ``package.py`` file.
|
||||
This YAML file contains enough information for Spack to mock an environment
|
||||
and try to check if the detection logic yields the results that are expected.
|
||||
|
||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
||||
represent search mechanisms and they each map to a list of tests that should confirm
|
||||
the validity of the package's detection logic.
|
||||
|
||||
The detection tests can be run with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack audit externals
|
||||
|
||||
Errors that have been detected are reported to screen.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Tests for PATH inspections
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Detection tests insisting on ``PATH`` inspections are listed under
|
||||
the ``paths`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
``results``. The exact details on how to specify both the ``layout`` and the
|
||||
``results`` are reported in the table below:
|
||||
|
||||
.. list-table:: Test based on PATH inspections
|
||||
:header-rows: 1
|
||||
|
||||
* - Option Name
|
||||
- Description
|
||||
- Allowed Values
|
||||
- Required Field
|
||||
* - ``layout``
|
||||
- Specifies the filesystem tree used for the test
|
||||
- List of objects
|
||||
- Yes
|
||||
* - ``layout:[0]:executables``
|
||||
- Relative paths for the mock executables to be created
|
||||
- List of strings
|
||||
- Yes
|
||||
* - ``layout:[0]:script``
|
||||
- Mock logic for the executable
|
||||
- Any valid shell script
|
||||
- Yes
|
||||
* - ``results``
|
||||
- List of expected results
|
||||
- List of objects (empty if no result is expected)
|
||||
- Yes
|
||||
* - ``results:[0]:spec``
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
||||
``myrepo.llvm`` might look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
includes:
|
||||
- "builtin.llvm"
|
||||
|
||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
||||
those locally defined in the file.
|
||||
|
||||
-----------------------------
|
||||
Style guidelines for packages
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.5
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
urllib3==2.0.5
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.2.1"
|
||||
|
||||
@@ -79,14 +79,18 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
"""All the ancestors of this microarchitecture."""
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
return value
|
||||
if self._ancestors is None:
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
self._ancestors = value
|
||||
return self._ancestors
|
||||
|
||||
def _to_set(self):
|
||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||
|
||||
@@ -145,6 +145,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -217,6 +224,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -300,6 +314,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -1412,6 +1433,92 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"sapphirerapids": {
|
||||
"from": [
|
||||
"icelake"
|
||||
],
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"popcnt",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"avx",
|
||||
"rdrand",
|
||||
"f16c",
|
||||
"movbe",
|
||||
"fma",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"rdseed",
|
||||
"adx",
|
||||
"clflushopt",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"avx512f",
|
||||
"avx512vl",
|
||||
"avx512bw",
|
||||
"avx512dq",
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes",
|
||||
"avx512_bf16",
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"k10": {
|
||||
"from": ["x86_64"],
|
||||
"vendor": "AuthenticAMD",
|
||||
@@ -2065,8 +2172,6 @@
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
@@ -2083,12 +2188,12 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:13.0",
|
||||
"versions": "10.3:12.2",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "13.1:",
|
||||
"versions": "12.3:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
|
||||
105
lib/spack/llnl/path.py
Normal file
105
lib/spack/llnl/path.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Path primitives that just require Python standard library."""
|
||||
import functools
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
class Path:
|
||||
"""Enum to identify the path-style."""
|
||||
|
||||
unix: int = 0
|
||||
windows: int = 1
|
||||
platform_path: int = windows if sys.platform == "win32" else unix
|
||||
|
||||
|
||||
def format_os_path(path: str, mode: int = Path.unix) -> str:
|
||||
"""Formats the input path to use consistent, platform specific separators.
|
||||
|
||||
Absolute paths are converted between drive letters and a prepended '/' as per platform
|
||||
requirement.
|
||||
|
||||
Parameters:
|
||||
path: the path to be normalized, must be a string or expose the replace method.
|
||||
mode: the path file separator style to normalize the passed path to.
|
||||
Default is unix style, i.e. '/'
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
if mode == Path.windows:
|
||||
path = path.replace("/", "\\")
|
||||
else:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def convert_to_posix_path(path: str) -> str:
|
||||
"""Converts the input path to POSIX style."""
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path: str) -> str:
|
||||
"""Converts the input path to Windows style."""
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path: str) -> str:
|
||||
"""Converts the input path to the current platform's native style."""
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
|
||||
|
||||
def path_to_os_path(*parameters: str) -> List[str]:
|
||||
"""Takes an arbitrary number of positional parameters, converts each argument of type
|
||||
string to use a normalized filepath separator, and returns a list of all values.
|
||||
"""
|
||||
|
||||
def _is_url(path_or_url: str) -> bool:
|
||||
if "\\" in path_or_url:
|
||||
return False
|
||||
url_tuple = urlparse(path_or_url)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
result = []
|
||||
for item in parameters:
|
||||
if isinstance(item, str) and not _is_url(item):
|
||||
item = convert_to_platform_path(item)
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
"""Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes usage of filepath separators on a per platform basis.
|
||||
|
||||
Note: `**kwargs`, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice: a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
|
||||
def holder_func(func):
|
||||
@functools.wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return path_filter_caller
|
||||
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
67
lib/spack/llnl/string.py
Normal file
67
lib/spack/llnl/string.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""String manipulation functions that do not have other dependencies than Python
|
||||
standard library
|
||||
"""
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def comma_list(sequence: List[str], article: str = "") -> str:
|
||||
if type(sequence) is not list:
|
||||
sequence = list(sequence)
|
||||
|
||||
if not sequence:
|
||||
return ""
|
||||
if len(sequence) == 1:
|
||||
return sequence[0]
|
||||
|
||||
out = ", ".join(str(s) for s in sequence[:-1])
|
||||
if len(sequence) != 2:
|
||||
out += "," # oxford comma
|
||||
out += " "
|
||||
if article:
|
||||
out += article + " "
|
||||
out += str(sequence[-1])
|
||||
return out
|
||||
|
||||
|
||||
def comma_or(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'or').
|
||||
"""
|
||||
return comma_list(sequence, "or")
|
||||
|
||||
|
||||
def comma_and(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'and').
|
||||
"""
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
||||
|
||||
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
|
||||
"""Pluralize <singular> word by adding an s if n != 1.
|
||||
|
||||
Arguments:
|
||||
n: number of things there are
|
||||
singular: singular form of word
|
||||
plural: optional plural form, for when it's not just singular + 's'
|
||||
show_n: whether to include n in the result string (default True)
|
||||
|
||||
Returns:
|
||||
"1 thing" if n == 1 or "n things" if n != 1
|
||||
"""
|
||||
number = f"{n} " if show_n else ""
|
||||
if n == 1:
|
||||
return f"{number}{singular}"
|
||||
elif plural is not None:
|
||||
return f"{number}{plural}"
|
||||
else:
|
||||
return f"{number}{singular}s"
|
||||
459
lib/spack/llnl/url.py
Normal file
459
lib/spack/llnl/url.py
Normal file
@@ -0,0 +1,459 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
|
||||
+ PREFIX_EXTENSIONS
|
||||
+ EXTENSIONS
|
||||
+ NO_TAR_EXTENSIONS
|
||||
)
|
||||
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
|
||||
|
||||
|
||||
def find_list_urls(url: str) -> Set[str]:
|
||||
r"""Find good list URLs for the supplied URL.
|
||||
|
||||
By default, returns the dirname of the archive path.
|
||||
|
||||
Provides special treatment for the following websites, which have a
|
||||
unique list URL different from the dirname of the download URL:
|
||||
|
||||
========= =======================================================
|
||||
GitHub https://github.com/<repo>/<name>/releases
|
||||
GitLab https://gitlab.\*/<repo>/<name>/tags
|
||||
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
|
||||
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
|
||||
PyPI https://pypi.org/simple/<name>/
|
||||
LuaRocks https://luarocks.org/modules/<repo>/<name>
|
||||
========= =======================================================
|
||||
|
||||
Note: this function is called by `spack versions`, `spack checksum`,
|
||||
and `spack create`, but not by `spack fetch` or `spack install`.
|
||||
|
||||
Parameters:
|
||||
url (str): The download URL for the package
|
||||
|
||||
Returns:
|
||||
set: One or more list URLs for the package
|
||||
"""
|
||||
|
||||
url_types = [
|
||||
# GitHub
|
||||
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
|
||||
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
|
||||
# GitLab API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
|
||||
(
|
||||
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
|
||||
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
|
||||
),
|
||||
# GitLab non-API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
|
||||
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
|
||||
# BitBucket
|
||||
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
|
||||
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
|
||||
# CRAN
|
||||
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
|
||||
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
|
||||
(
|
||||
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
|
||||
lambda m: m.group(1) + "/Archive/" + m.group(2),
|
||||
),
|
||||
# PyPI
|
||||
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
|
||||
(
|
||||
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
|
||||
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
|
||||
),
|
||||
# LuaRocks
|
||||
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
|
||||
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
|
||||
(
|
||||
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
|
||||
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
|
||||
lambda m: "https://luarocks.org/modules/"
|
||||
+ m.group("org")
|
||||
+ "/"
|
||||
+ m.group("name")
|
||||
+ "/",
|
||||
),
|
||||
]
|
||||
|
||||
list_urls = {os.path.dirname(url)}
|
||||
|
||||
for pattern, fun in url_types:
|
||||
match = re.search(pattern, url)
|
||||
if match:
|
||||
list_urls.add(fun(match))
|
||||
|
||||
return list_urls
|
||||
|
||||
|
||||
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
|
||||
"""Strips query and fragment from a url, then returns the base url and the suffix.
|
||||
|
||||
Args:
|
||||
url: URL to be stripped
|
||||
|
||||
Raises:
|
||||
ValueError: when there is any error parsing the URL
|
||||
"""
|
||||
components = urlsplit(url)
|
||||
stripped = components[:3] + (None, None)
|
||||
|
||||
query, frag = components[3:5]
|
||||
suffix = ""
|
||||
if query:
|
||||
suffix += "?" + query
|
||||
if frag:
|
||||
suffix += "#" + frag
|
||||
|
||||
return urlunsplit(stripped), suffix
|
||||
|
||||
|
||||
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
|
||||
|
||||
|
||||
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
|
||||
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
|
||||
returns the input URL and an empty string.
|
||||
"""
|
||||
match = SOURCEFORGE_RE.search(url)
|
||||
if match is not None:
|
||||
return match.groups()
|
||||
return url, ""
|
||||
|
||||
|
||||
def has_extension(path_or_url: str, ext: str) -> bool:
|
||||
"""Returns true if the extension in input is present in path, false otherwise."""
|
||||
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
|
||||
if not ext.startswith(r"\."):
|
||||
ext = rf"\.{ext}$"
|
||||
|
||||
if re.search(ext, prefix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
|
||||
"""Tries to match an allowed archive extension to the input. Returns the first match,
|
||||
or None if no match was found.
|
||||
|
||||
Raises:
|
||||
ValueError: if the input is None
|
||||
"""
|
||||
if path_or_url is None:
|
||||
raise ValueError("Can't call extension() on None")
|
||||
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, t):
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
def remove_extension(path_or_url: str, *, extension: str) -> str:
|
||||
"""Returns the input with the extension removed"""
|
||||
suffix = rf"\.{extension}$"
|
||||
return re.sub(suffix, "", path_or_url)
|
||||
|
||||
|
||||
def check_and_remove_ext(path: str, *, extension: str) -> str:
|
||||
"""Returns the input path with the extension removed, if the extension is present in path.
|
||||
Otherwise, returns the input unchanged.
|
||||
"""
|
||||
if not has_extension(path, extension):
|
||||
return path
|
||||
path, _ = split_url_on_sourceforge_suffix(path)
|
||||
return remove_extension(path, extension=extension)
|
||||
|
||||
|
||||
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
|
||||
"""If a path contains the extension in input, returns the path stripped of the extension.
|
||||
Otherwise, returns the input path.
|
||||
|
||||
If extension is None, attempts to strip any allowed extension from path.
|
||||
"""
|
||||
if extension is None:
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, ext=t):
|
||||
extension = t
|
||||
break
|
||||
else:
|
||||
return path_or_url
|
||||
|
||||
return check_and_remove_ext(path_or_url, extension=extension)
|
||||
|
||||
|
||||
def split_url_extension(url: str) -> Tuple[str, ...]:
|
||||
"""Some URLs have a query string, e.g.:
|
||||
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
|
||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
|
||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
||||
"""
|
||||
# Strip off sourceforge download suffix.
|
||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
||||
prefix, suffix = split_url_on_sourceforge_suffix(url)
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
if ext is not None:
|
||||
prefix = strip_extension(prefix)
|
||||
return prefix, ext, suffix
|
||||
|
||||
try:
|
||||
prefix, suf = strip_query_and_fragment(prefix)
|
||||
except ValueError:
|
||||
# FIXME: tty.debug("Got error parsing path %s" % path)
|
||||
# Ignore URL parse errors here
|
||||
return url, ""
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
prefix = strip_extension(prefix)
|
||||
suffix = suf + suffix
|
||||
if ext is None:
|
||||
ext = ""
|
||||
|
||||
return prefix, ext, suffix
|
||||
|
||||
|
||||
def strip_version_suffixes(path_or_url: str) -> str:
|
||||
"""Some tarballs contain extraneous information after the version:
|
||||
|
||||
* ``bowtie2-2.2.5-source``
|
||||
* ``libevent-2.0.21-stable``
|
||||
* ``cuda_8.0.44_linux.run``
|
||||
|
||||
These strings are not part of the version number and should be ignored.
|
||||
This function strips those suffixes off and returns the remaining string.
|
||||
The goal is that the version is always the last thing in ``path``:
|
||||
|
||||
* ``bowtie2-2.2.5``
|
||||
* ``libevent-2.0.21``
|
||||
* ``cuda_8.0.44``
|
||||
|
||||
Args:
|
||||
path_or_url: The filename or URL for the package
|
||||
|
||||
Returns:
|
||||
The ``path`` with any extraneous suffixes removed
|
||||
"""
|
||||
# NOTE: This could be done with complicated regexes in parse_version_offset
|
||||
# NOTE: The problem is that we would have to add these regexes to the end
|
||||
# NOTE: of every single version regex. Easier to just strip them off
|
||||
# NOTE: permanently
|
||||
|
||||
suffix_regexes = [
|
||||
# Download type
|
||||
r"[Ii]nstall",
|
||||
r"all",
|
||||
r"code",
|
||||
r"[Ss]ources?",
|
||||
r"file",
|
||||
r"full",
|
||||
r"single",
|
||||
r"with[a-zA-Z_-]+",
|
||||
r"rock",
|
||||
r"src(_0)?",
|
||||
r"public",
|
||||
r"bin",
|
||||
r"binary",
|
||||
r"run",
|
||||
r"[Uu]niversal",
|
||||
r"jar",
|
||||
r"complete",
|
||||
r"dynamic",
|
||||
r"oss",
|
||||
r"gem",
|
||||
r"tar",
|
||||
r"sh",
|
||||
# Download version
|
||||
r"release",
|
||||
r"bin",
|
||||
r"stable",
|
||||
r"[Ff]inal",
|
||||
r"rel",
|
||||
r"orig",
|
||||
r"dist",
|
||||
r"\+",
|
||||
# License
|
||||
r"gpl",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)",
|
||||
# Other
|
||||
r"cpp",
|
||||
r"gtk",
|
||||
r"incubating",
|
||||
# OS
|
||||
r"[Ll]inux(_64)?",
|
||||
r"LINUX",
|
||||
r"[Uu]ni?x",
|
||||
r"[Ss]un[Oo][Ss]",
|
||||
r"[Mm]ac[Oo][Ss][Xx]?",
|
||||
r"[Oo][Ss][Xx]",
|
||||
r"[Dd]arwin(64)?",
|
||||
r"[Aa]pple",
|
||||
r"[Ww]indows",
|
||||
r"[Ww]in(64|32)?",
|
||||
r"[Cc]ygwin(64|32)?",
|
||||
r"[Mm]ingw",
|
||||
r"centos",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
# Remove the suffix from the end of the path
|
||||
# This may be done multiple times
|
||||
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
|
||||
|
||||
return path_or_url
|
||||
|
||||
|
||||
def expand_contracted_extension(extension: str) -> str:
|
||||
"""Returns the expanded version of a known contracted extension.
|
||||
|
||||
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
|
||||
return the input unmodified.
|
||||
"""
|
||||
extension = extension.strip(".")
|
||||
return CONTRACTION_MAP.get(extension, extension)
|
||||
|
||||
|
||||
def expand_contracted_extension_in_path(
|
||||
path_or_url: str, *, extension: Optional[str] = None
|
||||
) -> str:
|
||||
"""Returns the input path or URL with any contraction extension expanded.
|
||||
|
||||
Args:
|
||||
path_or_url: path or URL to be expanded
|
||||
extension: if specified, only attempt to expand that extension
|
||||
"""
|
||||
extension = extension or extension_from_path(path_or_url)
|
||||
if extension is None:
|
||||
return path_or_url
|
||||
|
||||
expanded = expand_contracted_extension(extension)
|
||||
if expanded != extension:
|
||||
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
|
||||
return path_or_url
|
||||
|
||||
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
||||
|
||||
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
|
||||
"""Strips the compression extension from the input, and returns it. For instance,
|
||||
"foo.tgz" becomes "foo.tar".
|
||||
|
||||
If no extension is given, try a default list of extensions.
|
||||
|
||||
Args:
|
||||
path_or_url: input to be stripped
|
||||
ext: if given, extension to be stripped
|
||||
"""
|
||||
if not extension_from_path(path_or_url):
|
||||
return path_or_url
|
||||
|
||||
expanded_path = expand_contracted_extension_in_path(path_or_url)
|
||||
candidates = [ext] if ext is not None else EXTENSIONS
|
||||
for current_extension in candidates:
|
||||
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
|
||||
if modified_path != expanded_path:
|
||||
return modified_path
|
||||
return expanded_path
|
||||
|
||||
|
||||
def allowed_archive(path_or_url: str) -> bool:
|
||||
"""Returns true if the input is a valid archive, False otherwise."""
|
||||
return (
|
||||
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
)
|
||||
|
||||
|
||||
def determine_url_file_extension(path: str) -> str:
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
|
||||
Where the URL doesn't actually contain the filename. We need
|
||||
to know what type it is so that we can appropriately name files
|
||||
in mirrors.
|
||||
"""
|
||||
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
|
||||
if match:
|
||||
if match.group(1) == "zip":
|
||||
return "zip"
|
||||
elif match.group(1) == "tar":
|
||||
return "tar.gz"
|
||||
|
||||
prefix, ext, suffix = split_url_extension(path)
|
||||
return ext
|
||||
@@ -11,6 +11,7 @@
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pathlib
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
@@ -27,7 +28,8 @@
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
@@ -335,8 +337,7 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -2426,7 +2427,7 @@ def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2439,9 +2440,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(os.path.dirname)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
else:
|
||||
self._additional_library_dependents.add(pth)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2454,7 +2455,7 @@ def rpaths(self):
|
||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||
for extra_path in self._addl_rpaths:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||
return set(dependent_libs)
|
||||
return set([pathlib.Path(x) for x in dependent_libs])
|
||||
|
||||
def add_rpath(self, *paths):
|
||||
"""
|
||||
@@ -2470,7 +2471,7 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path, dest_dir):
|
||||
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
@@ -2478,27 +2479,35 @@ def _link(self, path, dest_dir):
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
|
||||
def report_already_linked():
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(str(dest_file))
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (str(path), str(dest_file))
|
||||
+ "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (str(file_name), str(dest_dir))
|
||||
)
|
||||
|
||||
file_name = path.name
|
||||
dest_file = dest_dir / file_name
|
||||
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
|
||||
try:
|
||||
symlink(path, dest_file)
|
||||
symlink(str(path), str(dest_file))
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
# Catch OSErrors missed by the SymlinkError checks
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest_dir)
|
||||
)
|
||||
pass
|
||||
report_already_linked()
|
||||
else:
|
||||
raise e
|
||||
# catch errors we raise ourselves from Spack
|
||||
except llnl.util.symlink.AlreadyExistsError:
|
||||
report_already_linked()
|
||||
|
||||
def establish_link(self):
|
||||
"""
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.util.string
|
||||
from ..string import plural
|
||||
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
attempts = plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
|
||||
@@ -11,8 +11,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.path import system_path_filter
|
||||
from ..path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
@@ -66,7 +65,9 @@ def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
@@ -78,7 +79,7 @@ def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link patg ahead of link creation to prevent broken
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
@@ -234,7 +235,7 @@ def _windows_create_junction(source: str, link: str):
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
@@ -259,7 +260,7 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
@@ -336,7 +337,11 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(SpackError):
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
|
||||
class AlreadyExistsError(SymlinkError):
|
||||
"""Link path already exists."""
|
||||
|
||||
@@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls):
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import itertools
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -798,3 +801,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
external_detection = AuditClass(
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
def packages_with_detection_tests():
|
||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
|
||||
to_be_tested = []
|
||||
for current_repo in spack.repo.PATH.repos:
|
||||
namespace = current_repo.namespace
|
||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
||||
pkgs_with_tests = [
|
||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
||||
]
|
||||
to_be_tested.extend(pkgs_with_tests)
|
||||
|
||||
return to_be_tested
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
errors = []
|
||||
|
||||
# Filter the packages and retain only the ones with detection tests
|
||||
pkgs_with_tests = packages_with_detection_tests()
|
||||
selected_pkgs = []
|
||||
for current_package in pkgs_with_tests:
|
||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
||||
# Check for both unqualified name and qualified name
|
||||
if unqualified_name in pkgs or current_package in pkgs:
|
||||
selected_pkgs.append(current_package)
|
||||
selected_pkgs.sort()
|
||||
|
||||
if not selected_pkgs:
|
||||
summary = "No detection test to run"
|
||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
||||
warnings.warn("\n".join([summary] + details))
|
||||
return errors
|
||||
|
||||
for pkg_name in selected_pkgs:
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
not_detected = set(expected_specs) - set(specs)
|
||||
if not_detected:
|
||||
summary = pkg_name + ": cannot detect some specs"
|
||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
not_expected = set(specs) - set(expected_specs)
|
||||
if not_expected:
|
||||
summary = pkg_name + ": detected unexpected specs"
|
||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -646,8 +647,7 @@ class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
directories."""
|
||||
|
||||
def __init__(self):
|
||||
# Save unique identifiers of files to avoid
|
||||
# relocating hardlink files for each path.
|
||||
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
||||
self.visited = set()
|
||||
|
||||
# Lists of files we will check
|
||||
@@ -656,6 +656,8 @@ def __init__(self):
|
||||
|
||||
def seen_before(self, root, rel_path):
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
if stat_result.st_nlink == 1:
|
||||
return False
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in self.visited:
|
||||
return True
|
||||
@@ -1417,7 +1419,7 @@ def try_fetch(url_to_fetch):
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
@@ -1580,9 +1582,10 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
for rel_path in buildinfo[key]:
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
if stat_result.st_nlink > 1:
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
new_list.append(rel_path)
|
||||
buildinfo[key] = new_list
|
||||
|
||||
@@ -2144,7 +2147,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
if not os.path.exists(stage.save_filename):
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
continue
|
||||
|
||||
tty.debug("Found key {0}".format(fingerprint))
|
||||
@@ -2296,7 +2299,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
@@ -82,7 +83,6 @@
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.string import plural
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
|
||||
@@ -61,6 +61,11 @@ def component_prefix(self):
|
||||
"""Path to component <prefix>/<component>/<version>."""
|
||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||
|
||||
@property
|
||||
def env_script_args(self):
|
||||
"""Additional arguments to pass to vars.sh script."""
|
||||
return ()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.install_component(basename(self.url_for_version(spec.version)))
|
||||
|
||||
@@ -124,7 +129,7 @@ def setup_run_environment(self, env):
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
@@ -226,7 +227,48 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -273,54 +315,16 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Headers may be in either location
|
||||
include = self.prefix.join(self.spec["python"].package.include)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
@@ -334,13 +338,14 @@ def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||
name = self.spec.name[3:]
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
|
||||
libs = fs.find_all_libraries(root, recursive=True)
|
||||
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = "Unable to recursively locate {} libraries in {}"
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
||||
@@ -308,7 +308,7 @@ def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
@@ -316,7 +316,7 @@ def append_dep(s, d):
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg("Will not stage external dep: {0}".format(d))
|
||||
@@ -1029,13 +1029,18 @@ def main_script_replacements(cmd):
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import attr_setdefault, index_by
|
||||
@@ -29,7 +30,6 @@
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.string
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
@@ -516,7 +516,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -20,6 +21,15 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser("configs", help="audit configuration files")
|
||||
|
||||
# Audit package recipes
|
||||
external_parser = sp.add_parser("externals", help="check external detection in packages")
|
||||
external_parser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
dest="list_externals",
|
||||
help="if passed, list which packages have detection tests",
|
||||
)
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
||||
https_parser.add_argument(
|
||||
@@ -29,7 +39,7 @@ def setup_parser(subparser):
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
for group in [pkg_parser, https_parser, external_parser]:
|
||||
group.add_argument(
|
||||
"name",
|
||||
metavar="PKG",
|
||||
@@ -62,6 +72,18 @@ def packages_https(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def externals(parser, args):
|
||||
if args.list_externals:
|
||||
msg = "@*{The following packages have detection tests:}"
|
||||
tty.msg(cl.colorize(msg))
|
||||
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize("@*b{" + subcommand + "}:"))
|
||||
@@ -78,6 +100,7 @@ def list(parser, args):
|
||||
def audit(parser, args):
|
||||
subcommands = {
|
||||
"configs": configs,
|
||||
"externals": externals,
|
||||
"packages": packages,
|
||||
"packages-https": packages_https,
|
||||
"list": list,
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -32,7 +33,6 @@
|
||||
from spack.cmd import display_specs
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -527,7 +527,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
|
||||
@@ -66,7 +66,7 @@ def setup_parser(subparser):
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
)
|
||||
@@ -96,7 +96,7 @@ def checksum(parser, args):
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
@@ -119,13 +119,13 @@ def checksum(parser, args):
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
@@ -239,7 +239,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.dependency as dep
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.modules
|
||||
@@ -114,16 +114,13 @@ def __call__(self, parser, namespace, jobs, option_string):
|
||||
|
||||
|
||||
class DeptypeAction(argparse.Action):
|
||||
"""Creates a tuple of valid dependency types from a deptype argument."""
|
||||
"""Creates a flag of valid dependency types from a deptype argument."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
deptype = dep.all_deptypes
|
||||
if values:
|
||||
deptype = tuple(x.strip() for x in values.split(","))
|
||||
if deptype == ("all",):
|
||||
deptype = "all"
|
||||
deptype = dep.canonical_deptype(deptype)
|
||||
|
||||
if not values or values == "all":
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.canonicalize(values.split(","))
|
||||
setattr(namespace, self.dest, deptype)
|
||||
|
||||
|
||||
@@ -285,9 +282,8 @@ def deptype():
|
||||
return Args(
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
default=dt.ALL,
|
||||
help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
@@ -46,9 +47,9 @@ def __init__(self, context="build"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = ("build", "link", "run")
|
||||
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
|
||||
else:
|
||||
self.direct_deps = ("build", "test", "link", "run")
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
@@ -71,8 +72,8 @@ def accept(self, item):
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
|
||||
@@ -822,7 +822,7 @@ def get_versions(args, name):
|
||||
if args.url is not None and args.template != "bundle" and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
except UndetectableVersionError:
|
||||
# Use fake versions
|
||||
tty.warn("Couldn't detect version in: {0}".format(args.url))
|
||||
|
||||
@@ -74,7 +74,7 @@ def dependencies(parser, args):
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
deptype=args.deptype,
|
||||
depflag=args.deptype,
|
||||
)
|
||||
|
||||
if spec.name in dependencies:
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -28,7 +29,6 @@
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.string as string
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -156,11 +157,20 @@ def packages_to_search_for(
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
||||
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in names]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if select_re.search(x)]
|
||||
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in exclude]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if not select_re.search(x)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -74,19 +74,19 @@ def graph(parser, args):
|
||||
|
||||
if args.static:
|
||||
args.dot = True
|
||||
static_graph_dot(specs, deptype=args.deptype)
|
||||
static_graph_dot(specs, depflag=args.deptype)
|
||||
return
|
||||
|
||||
if args.dot:
|
||||
builder = SimpleDAG()
|
||||
if args.color:
|
||||
builder = DAGWithDependencyTypes()
|
||||
graph_dot(specs, builder=builder, deptype=args.deptype)
|
||||
graph_dot(specs, builder=builder, depflag=args.deptype)
|
||||
return
|
||||
|
||||
# ascii is default: user doesn't need to provide it explicitly
|
||||
debug = spack.config.get("config:debug")
|
||||
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
|
||||
graph_ascii(specs[0], debug=debug, depflag=args.deptype)
|
||||
for spec in specs[1:]:
|
||||
print() # extra line bt/w independent graphs
|
||||
graph_ascii(spec, debug=debug)
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
@@ -160,7 +161,7 @@ def print_dependencies(pkg):
|
||||
for deptype in ("build", "link", "run"):
|
||||
color.cprint("")
|
||||
color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
|
||||
@@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
|
||||
|
||||
def get_dependencies(pkg):
|
||||
all_deps = {}
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
all_deps[deptype] = [d for d in deps]
|
||||
|
||||
return all_deps
|
||||
@@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("\n")
|
||||
out.write("</dd>\n")
|
||||
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg_cls.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
if deps:
|
||||
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
|
||||
out.write("<dd>\n")
|
||||
|
||||
@@ -112,7 +112,9 @@ def load(parser, args):
|
||||
if "dependencies" in args.things_to_load:
|
||||
include_roots = "package" in args.things_to_load
|
||||
specs = [
|
||||
dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
|
||||
dep
|
||||
for spec in specs
|
||||
for dep in spec.traverse(root=include_roots, order="post", deptype=("run"))
|
||||
]
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
@@ -6,10 +6,11 @@
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
|
||||
@@ -176,17 +176,29 @@ def solve(parser, args):
|
||||
output = sys.stdout if "asp" in show else None
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(specs, out=output, timers=args.timers, stats=args.stats)
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
import io
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
@@ -24,7 +25,7 @@ def report_tags(category, tags):
|
||||
if isatty:
|
||||
num = len(tags)
|
||||
fmt = "{0} package tag".format(category)
|
||||
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
|
||||
buffer.write("{0}:\n".format(llnl.string.plural(num, fmt)))
|
||||
|
||||
if tags:
|
||||
colify.colify(tags, output=buffer, tty=isatty, indent=4)
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
@@ -26,7 +27,6 @@
|
||||
substitution_offsets,
|
||||
)
|
||||
from spack.util.naming import simplify_name
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "debugging tool for url parsing"
|
||||
section = "developer"
|
||||
@@ -139,7 +139,7 @@ def url_parse(args):
|
||||
if args.spider:
|
||||
print()
|
||||
tty.msg("Spidering for versions:")
|
||||
versions = find_versions_of_archive(url)
|
||||
versions = spack.url.find_versions_of_archive(url)
|
||||
|
||||
if not versions:
|
||||
print(" Found no versions for {0}".format(name))
|
||||
|
||||
@@ -37,10 +37,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
@@ -68,7 +65,7 @@ def versions(parser, args):
|
||||
if args.safe:
|
||||
return
|
||||
|
||||
fetched_versions = pkg.fetch_remote_versions(args.concurrency)
|
||||
fetched_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
|
||||
if args.new:
|
||||
if sys.stdout.isatty():
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
@@ -24,7 +25,6 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -39,10 +39,17 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_arg:
|
||||
output = compiler(version_arg, output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(version_arg, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
@@ -153,7 +160,7 @@ def _parse_link_paths(string):
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@llnl.path.system_path_filter
|
||||
def _parse_non_system_link_dirs(string: str) -> List[str]:
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
@@ -229,6 +236,9 @@ class Compiler:
|
||||
# by any compiler
|
||||
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
|
||||
|
||||
#: Platform matcher for Platform objects supported by compiler
|
||||
is_supported_on_platform = lambda x: True
|
||||
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
@@ -594,8 +604,6 @@ def search_regexps(cls, language):
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
# Windows compilers generally have an extension of some sort
|
||||
# as do most files on Windows, handle that case here
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -298,7 +298,7 @@ def select_new_compilers(compilers, scope=None):
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers():
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
See available_compilers() to get a list of all the available
|
||||
@@ -306,10 +306,41 @@ def supported_compilers():
|
||||
"""
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
return sorted(
|
||||
name if name != "apple_clang" else "apple-clang"
|
||||
for name in llnl.util.lang.list_modules(spack.paths.compilers_path)
|
||||
)
|
||||
return sorted(all_compiler_names())
|
||||
|
||||
|
||||
def supported_compilers_for_host_platform() -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the current host platform
|
||||
"""
|
||||
host_plat = spack.platforms.real_host()
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
Args:
|
||||
platform (str): string representation of platform
|
||||
for which compiler compatability should be determined
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name in supported_compilers()
|
||||
if class_for_compiler_name(name).is_supported_on_platform(platform)
|
||||
]
|
||||
|
||||
|
||||
def all_compiler_names() -> List[str]:
|
||||
def replace_apple_clang(name):
|
||||
return name if name != "apple_clang" else "apple-clang"
|
||||
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@@ -628,7 +659,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
|
||||
def _default(search_paths):
|
||||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
@@ -687,9 +718,11 @@ def _default(fn_args):
|
||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
error = "Couldn't get version for compiler {0}".format(path)
|
||||
error = f"Couldn't get version for compiler {path}".format(path)
|
||||
except spack.util.executable.ProcessError as e:
|
||||
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except spack.util.executable.ProcessTimeoutError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
|
||||
@@ -112,6 +112,7 @@ def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
from typing import Dict, List, Set
|
||||
|
||||
import spack.compiler
|
||||
@@ -115,11 +114,11 @@ def command_str(self):
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if StrictVersion(cl_ver) <= StrictVersion(fortran_mapping[ver]):
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
|
||||
@@ -154,9 +153,12 @@ class Msvc(Compiler):
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
|
||||
# The MSVC compiler class overrides this to prevent instances
|
||||
# of erroneous matching on executable names that cannot be msvc
|
||||
# compilers
|
||||
suffixes = []
|
||||
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
is_supported_on_platform = lambda x: isinstance(x, spack.platforms.Windows)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
@@ -167,6 +169,8 @@ def __init__(self, *args, **kwargs):
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
|
||||
@@ -744,8 +744,11 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
@@ -11,6 +14,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
@@ -45,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
)
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
def compiler_from_entry(entry: dict, manifest_path: str):
|
||||
# Note that manifest_path is only passed here to compose a
|
||||
# useful warning message when paths appear to be missing.
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
|
||||
if "prefix" in entry:
|
||||
prefix = entry["prefix"]
|
||||
paths = dict(
|
||||
(lang, os.path.join(prefix, relpath))
|
||||
for (lang, relpath) in entry["executables"].items()
|
||||
)
|
||||
else:
|
||||
paths = entry["executables"]
|
||||
|
||||
# Do a check for missing paths. Note that this isn't possible for
|
||||
# all compiler entries, since their "paths" might actually be
|
||||
# exe names like "cc" that depend on modules being loaded. Cray
|
||||
# manifest entries are always paths though.
|
||||
missing_paths = []
|
||||
for path in paths.values():
|
||||
if not os.path.exists(path):
|
||||
missing_paths.append(path)
|
||||
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
@@ -56,8 +80,18 @@ def compiler_from_entry(entry):
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
return compiler_cls(spec, operating_system, target, paths)
|
||||
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
|
||||
if missing_paths:
|
||||
warnings.warn(
|
||||
"Manifest entry refers to nonexistent paths:\n\t"
|
||||
+ "\n\t".join(missing_paths)
|
||||
+ f"\nfor {str(spec)}"
|
||||
+ f"\nin {manifest_path}"
|
||||
+ "\nPlease report this issue"
|
||||
)
|
||||
|
||||
return compiler_cls(spec, operating_system, target, path_list)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
@@ -158,13 +192,13 @@ def entries_to_specs(entries):
|
||||
dependencies = entry["dependencies"]
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties["hash"]
|
||||
deptypes = properties["type"]
|
||||
depflag = dt.canonicalize(properties["type"])
|
||||
if dep_hash in spec_dict:
|
||||
if entry["hash"] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
@@ -186,12 +220,21 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||
compilers = list()
|
||||
if "compilers" in json_data:
|
||||
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
|
||||
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(compilers, init_config=False)
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
f"\n\tfrom manifest: {path}"
|
||||
"\nPlease reexecute with 'spack -d' and include the stack trace"
|
||||
)
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
||||
@@ -27,6 +27,8 @@
|
||||
import time
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
import spack.deptypes as dt
|
||||
|
||||
try:
|
||||
import uuid
|
||||
|
||||
@@ -89,7 +91,7 @@
|
||||
|
||||
#: Types of dependencies tracked by the database
|
||||
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.depflag
|
||||
|
||||
#: Default list of fields written for each install record
|
||||
DEFAULT_INSTALL_RECORD_FIELDS = (
|
||||
@@ -795,7 +797,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1146,7 +1148,7 @@ def _add(
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
@@ -1154,7 +1156,7 @@ def _add(
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
@@ -1198,10 +1200,10 @@ def _add(
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
@@ -1371,7 +1373,13 @@ def deprecate(self, spec, deprecator):
|
||||
return self._deprecate(spec, deprecator)
|
||||
|
||||
@_autospec
|
||||
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
):
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
||||
@@ -3,64 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's dependency relationships."""
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
all_deptypes = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
default_deptype = ("build", "link")
|
||||
|
||||
#: Type hint for the arguments accepting a dependency type
|
||||
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
|
||||
def deptype_chars(*type_tuples: str) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged).
|
||||
"""
|
||||
types: Set[str] = set()
|
||||
for t in type_tuples:
|
||||
if t:
|
||||
types.update(t)
|
||||
|
||||
return "".join(t[0] if t in types else " " for t in all_deptypes)
|
||||
|
||||
|
||||
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
|
||||
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return all_deptypes
|
||||
|
||||
elif isinstance(deptype, str):
|
||||
if deptype not in all_deptypes:
|
||||
raise ValueError("Invalid dependency type: %s" % deptype)
|
||||
return (deptype,)
|
||||
|
||||
elif isinstance(deptype, (tuple, list, set)):
|
||||
bad = [d for d in deptype if d not in all_deptypes]
|
||||
if bad:
|
||||
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
|
||||
return tuple(sorted(set(deptype)))
|
||||
|
||||
raise ValueError("Invalid dependency type: %s" % repr(deptype))
|
||||
|
||||
|
||||
class Dependency:
|
||||
"""Class representing metadata for a dependency on a package.
|
||||
@@ -93,7 +40,7 @@ def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: "spack.spec.Spec",
|
||||
type: Optional[Tuple[str, ...]] = default_deptype,
|
||||
depflag: dt.DepFlag = dt.DEFAULT,
|
||||
):
|
||||
"""Create a new Dependency.
|
||||
|
||||
@@ -110,11 +57,7 @@ def __init__(
|
||||
# This dict maps condition specs to lists of Patch objects, just
|
||||
# as the patches dict on packages does.
|
||||
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
|
||||
|
||||
if type is None:
|
||||
self.type = set(default_deptype)
|
||||
else:
|
||||
self.type = set(type)
|
||||
self.depflag = depflag
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -124,7 +67,7 @@ def name(self) -> str:
|
||||
def merge(self, other: "Dependency"):
|
||||
"""Merge constraints, deptypes, and patches of other into self."""
|
||||
self.spec.constrain(other.spec)
|
||||
self.type |= other.type
|
||||
self.depflag |= other.depflag
|
||||
|
||||
# concatenate patch lists, or just copy them in
|
||||
for cond, p in other.patches.items():
|
||||
@@ -135,5 +78,5 @@ def merge(self, other: "Dependency"):
|
||||
self.patches[cond] = other.patches[cond]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
types = deptype_chars(*self.type)
|
||||
types = dt.flag_to_chars(self.depflag)
|
||||
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"
|
||||
|
||||
123
lib/spack/spack/deptypes.py
Normal file
123
lib/spack/spack/deptypes.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's edge types."""
|
||||
|
||||
from typing import Iterable, List, Tuple, Union
|
||||
|
||||
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
||||
DepFlag = int
|
||||
|
||||
#: Type hint for the high-level dependency input
|
||||
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
#: Individual dependency types
|
||||
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
|
||||
|
||||
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
||||
# the order (link, run, build, test) when depending on the same package multiple times,
|
||||
# and we rely on default integer comparison to sort dependency types.
|
||||
# New dependency types should be appended.
|
||||
LINK = 0b0001
|
||||
RUN = 0b0010
|
||||
BUILD = 0b0100
|
||||
TEST = 0b1000
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
|
||||
|
||||
#: A flag with all dependency types set
|
||||
ALL: DepFlag = BUILD | LINK | RUN | TEST
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT: DepFlag = BUILD | LINK
|
||||
|
||||
#: An iterator of all flag components
|
||||
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
|
||||
|
||||
|
||||
def flag_from_string(s: str) -> DepFlag:
|
||||
if s == "build":
|
||||
return BUILD
|
||||
elif s == "link":
|
||||
return LINK
|
||||
elif s == "run":
|
||||
return RUN
|
||||
elif s == "test":
|
||||
return TEST
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type: {s}")
|
||||
|
||||
|
||||
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
|
||||
"""Transform an iterable of deptype strings into a flag."""
|
||||
flag = 0
|
||||
for deptype_str in deptype:
|
||||
flag |= flag_from_string(deptype_str)
|
||||
return flag
|
||||
|
||||
|
||||
def canonicalize(deptype: DepTypes) -> DepFlag:
|
||||
"""Convert deptype user input to a DepFlag, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return ALL
|
||||
|
||||
if isinstance(deptype, str):
|
||||
return flag_from_string(deptype)
|
||||
|
||||
if isinstance(deptype, (tuple, list, set)):
|
||||
return flag_from_strings(deptype)
|
||||
|
||||
raise ValueError(f"Invalid dependency type: {deptype!r}")
|
||||
|
||||
|
||||
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
|
||||
deptype: List[DepType] = []
|
||||
if x & BUILD:
|
||||
deptype.append("build")
|
||||
if x & LINK:
|
||||
deptype.append("link")
|
||||
if x & RUN:
|
||||
deptype.append("run")
|
||||
if x & TEST:
|
||||
deptype.append("test")
|
||||
return tuple(deptype)
|
||||
|
||||
|
||||
def flag_to_string(x: DepFlag) -> DepType:
|
||||
if x == BUILD:
|
||||
return "build"
|
||||
elif x == LINK:
|
||||
return "link"
|
||||
elif x == RUN:
|
||||
return "run"
|
||||
elif x == TEST:
|
||||
return "test"
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type flag: {x}")
|
||||
|
||||
|
||||
def flag_to_chars(depflag: DepFlag) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged)."""
|
||||
return "".join(
|
||||
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
|
||||
)
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
@@ -11,4 +12,5 @@
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"detection_tests",
|
||||
]
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of software installed in the system based on paths inspections
|
||||
"""Detection of software installed in the system, based on paths inspections
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
@@ -322,12 +322,14 @@ def by_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
packages_to_search: list of packages to be detected. Each package can be either unqualified
|
||||
of fully qualified
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
@@ -355,7 +357,8 @@ def by_path(
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
|
||||
187
lib/spack/spack/detection/test.py
Normal file
187
lib/spack/spack/detection/test.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Create and run mock e2e tests for package detection."""
|
||||
import collections
|
||||
import contextlib
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
|
||||
from .path import by_path
|
||||
|
||||
|
||||
class MockExecutables(NamedTuple):
|
||||
"""Mock executables to be used in detection tests"""
|
||||
|
||||
#: Relative paths for mock executables to be created
|
||||
executables: List[str]
|
||||
#: Shell script for the mock executable
|
||||
script: str
|
||||
|
||||
|
||||
class ExpectedTestResult(NamedTuple):
|
||||
"""Data structure to model assertions on detection tests"""
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
"""Data structure to construct detection tests by PATH inspection.
|
||||
|
||||
Packages may have a YAML file containing the description of one or more detection tests
|
||||
to be performed. Each test creates a few mock executable scripts in a temporary folder,
|
||||
and checks that detection by PATH gives the expected results.
|
||||
"""
|
||||
|
||||
pkg_name: str
|
||||
layout: List[MockExecutables]
|
||||
results: List[ExpectedTestResult]
|
||||
|
||||
|
||||
class Runner:
|
||||
"""Runs an external detection test"""
|
||||
|
||||
def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None:
|
||||
self.test = test
|
||||
self.repository = repository
|
||||
self.tmpdir = tempfile.TemporaryDirectory()
|
||||
|
||||
def execute(self) -> List[spack.spec.Spec]:
|
||||
"""Executes a test and returns the specs that have been detected.
|
||||
|
||||
This function sets-up a test in a temporary directory, according to the prescriptions
|
||||
in the test layout, then performs a detection by executables and returns the specs that
|
||||
have been detected.
|
||||
"""
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
hints = set()
|
||||
try:
|
||||
for entry in self.test.layout:
|
||||
exes = self._create_executable_scripts(entry)
|
||||
|
||||
for mock_executable in exes:
|
||||
hints.add(str(mock_executable.parent))
|
||||
|
||||
yield list(hints)
|
||||
finally:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
result = []
|
||||
for mock_exe_path in relative_paths:
|
||||
rel_path = pathlib.Path(mock_exe_path)
|
||||
abs_path = pathlib.Path(self.tmpdir.name) / rel_path
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
abs_path.write_text(script_template.render(script=script))
|
||||
filesystem.set_executable(abs_path)
|
||||
result.append(abs_path)
|
||||
return result
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
"""Returns a list of test runners for a given package.
|
||||
|
||||
Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``,
|
||||
alongside the ``package.py`` file.
|
||||
|
||||
This function reads that file to create a bunch of ``Runner`` objects.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository where the package lives
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
MockExecutables(executables=layout["executables"], script=layout["script"])
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
)
|
||||
result.append(Runner(test=current_test, repository=repository))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]:
|
||||
"""Returns the normalized content of the detection_tests.yaml associated with the package
|
||||
passed in input.
|
||||
|
||||
The content is merged with that of any package that is transitively included using the
|
||||
"includes" attribute.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository in which to search for packages
|
||||
"""
|
||||
content_stack, seen = [], set()
|
||||
included_packages: Deque[str] = collections.deque()
|
||||
|
||||
root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository)
|
||||
included_packages.extend(result.get("includes", []))
|
||||
seen |= set(result.get("includes", []))
|
||||
|
||||
while included_packages:
|
||||
current_package = included_packages.popleft()
|
||||
try:
|
||||
current_detection_yaml, content = _detection_tests_yaml(current_package, repository)
|
||||
except FileNotFoundError as e:
|
||||
msg = (
|
||||
f"cannot read the detection tests from the '{current_package}' package, "
|
||||
f"included by {root_detection_yaml}"
|
||||
)
|
||||
raise FileNotFoundError(msg + f"\n\n\t{e}\n")
|
||||
|
||||
content_stack.append((current_package, content))
|
||||
included_packages.extend(x for x in content.get("includes", []) if x not in seen)
|
||||
seen |= set(content.get("includes", []))
|
||||
|
||||
result.setdefault("paths", [])
|
||||
for pkg_name, content in content_stack:
|
||||
result["paths"].extend(content.get("paths", []))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _detection_tests_yaml(
|
||||
pkg_name: str, repository: spack.repo.RepoPath
|
||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||
with open(str(detection_tests_yaml)) as f:
|
||||
content = spack_yaml.load(f)
|
||||
return detection_tests_yaml, content
|
||||
@@ -38,13 +38,14 @@ class OpenMpi(Package):
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.dependency import Dependency
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import (
|
||||
@@ -436,7 +437,7 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
pkg.versions[version] = kwargs
|
||||
|
||||
|
||||
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
@@ -447,7 +448,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
|
||||
type = canonical_deptype(type)
|
||||
depflag = dt.canonicalize(type)
|
||||
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
|
||||
|
||||
# call this patches here for clarity -- we want patch to be a list,
|
||||
@@ -477,12 +478,12 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
|
||||
# this is where we actually add the dependency to this package
|
||||
if when_spec not in conditions:
|
||||
dependency = Dependency(pkg, dep_spec, type=type)
|
||||
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||
conditions[when_spec] = dependency
|
||||
else:
|
||||
dependency = conditions[when_spec]
|
||||
dependency.spec.constrain(dep_spec, deps=False)
|
||||
dependency.type |= set(type)
|
||||
dependency.depflag |= depflag
|
||||
|
||||
# apply patches to the dependency
|
||||
for execute_patch in patches:
|
||||
@@ -525,7 +526,7 @@ def _execute_conflicts(pkg):
|
||||
|
||||
|
||||
@directive(("dependencies"))
|
||||
def depends_on(spec, when=None, type=default_deptype, patches=None):
|
||||
def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
"""Creates a dict of deps with specs defining when they apply.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -120,10 +120,8 @@ def write_host_environment(self, spec):
|
||||
versioning. We use it in the case that an analysis later needs to
|
||||
easily access this information.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = get_host_environment_metadata()
|
||||
environ = spack.spec.get_host_environment_metadata()
|
||||
with open(env_file, "w") as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
@@ -36,7 +37,9 @@ def from_string(s: str) -> "UseBuildCache":
|
||||
def _deptypes(use_buildcache: UseBuildCache):
|
||||
"""What edges should we follow for a given node? If it's a cache-only
|
||||
node, then we can drop build type deps."""
|
||||
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run")
|
||||
return (
|
||||
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
|
||||
)
|
||||
|
||||
|
||||
class DepfileNode:
|
||||
@@ -69,13 +72,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
||||
self.adjacency_list: List[DepfileNode] = []
|
||||
self.pkg_buildcache = pkg_buildcache
|
||||
self.deps_buildcache = deps_buildcache
|
||||
self.deptypes_root = _deptypes(pkg_buildcache)
|
||||
self.deptypes_deps = _deptypes(deps_buildcache)
|
||||
self.depflag_root = _deptypes(pkg_buildcache)
|
||||
self.depflag_deps = _deptypes(deps_buildcache)
|
||||
|
||||
def neighbors(self, node):
|
||||
"""Produce a list of spec to follow from node"""
|
||||
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
|
||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
def accept(self, node):
|
||||
self.adjacency_list.append(
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.hash_types as ht
|
||||
@@ -403,7 +404,7 @@ def _write_yaml(data, str_or_file):
|
||||
|
||||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.util.environment.get_host_environment()
|
||||
valid_variables = spack.spec.get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
@@ -1395,7 +1396,10 @@ def _concretize_together_where_possible(
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(specs_to_concretize, tests=tests):
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
@@ -1536,13 +1540,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2190,7 +2194,7 @@ def _read_lockfile_dict(self, d):
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
|
||||
@@ -128,3 +128,7 @@ def __init__(self, provided, required, constraint_type):
|
||||
self.provided = provided
|
||||
self.required = required
|
||||
self.constraint_type = constraint_type
|
||||
|
||||
|
||||
class FetchError(SpackError):
|
||||
"""Superclass for fetch-related errors."""
|
||||
|
||||
@@ -31,9 +31,11 @@
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import comma_and, quote
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
@@ -46,9 +48,8 @@
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -400,7 +401,7 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except web_util.FetchError as err:
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
@@ -441,7 +442,7 @@ def expand(self):
|
||||
|
||||
# TODO: replace this by mime check.
|
||||
if not self.extension:
|
||||
self.extension = spack.url.determine_url_file_extension(self.url)
|
||||
self.extension = llnl.url.determine_url_file_extension(self.url)
|
||||
|
||||
if self.stage.expanded:
|
||||
tty.debug("Source already staged to %s" % self.stage.source_path)
|
||||
@@ -570,7 +571,7 @@ def expand(self):
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, **kwargs):
|
||||
assert extension_from_path(destination) == "tar.gz"
|
||||
assert llnl.url.extension_from_path(destination) == "tar.gz"
|
||||
assert self.stage.source_path.startswith(self.stage.path)
|
||||
|
||||
tar = which("tar", required=True)
|
||||
@@ -733,7 +734,11 @@ def version_from_git(git_exe):
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = spack.util.git.git()
|
||||
try:
|
||||
self._git = spack.util.git.git(required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
@@ -1289,7 +1294,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1336,7 +1341,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1430,7 +1435,7 @@ def from_kwargs(**kwargs):
|
||||
on attribute names (e.g., ``git``, ``hg``, etc.)
|
||||
|
||||
Raises:
|
||||
spack.util.web.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
spack.error.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
"""
|
||||
for fetcher in all_strategies:
|
||||
if fetcher.matches(kwargs):
|
||||
@@ -1537,7 +1542,7 @@ def for_package_version(pkg, version=None):
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
if not hasattr(pkg, "git"):
|
||||
raise web_util.FetchError(
|
||||
raise spack.error.FetchError(
|
||||
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
@@ -1687,11 +1692,11 @@ def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
||||
|
||||
class NoCacheError(web_util.FetchError):
|
||||
class NoCacheError(spack.error.FetchError):
|
||||
"""Raised when there is no cached archive for a package."""
|
||||
|
||||
|
||||
class FailedDownloadError(web_util.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
@@ -1699,23 +1704,23 @@ def __init__(self, url, msg=""):
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(web_util.FetchError):
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
"""Raised when an archive file is expected but none exists."""
|
||||
|
||||
|
||||
class NoDigestError(web_util.FetchError):
|
||||
class NoDigestError(spack.error.FetchError):
|
||||
"""Raised after attempt to checksum when URL has no digest."""
|
||||
|
||||
|
||||
class ExtrapolationError(web_util.FetchError):
|
||||
class ExtrapolationError(spack.error.FetchError):
|
||||
"""Raised when we can't extrapolate a version for a package."""
|
||||
|
||||
|
||||
class FetcherConflict(web_util.FetchError):
|
||||
class FetcherConflict(spack.error.FetchError):
|
||||
"""Raised for packages with invalid fetch attributes."""
|
||||
|
||||
|
||||
class InvalidArgsError(web_util.FetchError):
|
||||
class InvalidArgsError(spack.error.FetchError):
|
||||
"""Raised when a version can't be deduced from a set of arguments."""
|
||||
|
||||
def __init__(self, pkg=None, version=None, **args):
|
||||
@@ -1728,11 +1733,11 @@ def __init__(self, pkg=None, version=None, **args):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class ChecksumError(web_util.FetchError):
|
||||
class ChecksumError(spack.error.FetchError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
|
||||
|
||||
class NoStageError(web_util.FetchError):
|
||||
class NoStageError(spack.error.FetchError):
|
||||
"""Raised when fetch operations are called before set_stage()."""
|
||||
|
||||
def __init__(self, method):
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import urllib.parse
|
||||
import urllib.response
|
||||
from urllib.error import URLError
|
||||
from urllib.request import BaseHandler
|
||||
|
||||
|
||||
def gcs_open(req, *args, **kwargs):
|
||||
"""Open a reader stream to a blob object on GCS"""
|
||||
import spack.util.gcs as gcs_util
|
||||
|
||||
url = urllib.parse.urlparse(req.get_full_url())
|
||||
gcsblob = gcs_util.GCSBlob(url)
|
||||
|
||||
if not gcsblob.exists():
|
||||
raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path))
|
||||
stream = gcsblob.get_blob_byte_stream()
|
||||
headers = gcsblob.get_blob_headers()
|
||||
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
|
||||
|
||||
class GCSHandler(BaseHandler):
|
||||
def gs_open(self, req):
|
||||
return gcs_open(req)
|
||||
@@ -38,11 +38,12 @@
|
||||
"""
|
||||
import enum
|
||||
import sys
|
||||
from typing import List, Optional, Set, TextIO, Tuple, Union
|
||||
from typing import List, Optional, Set, TextIO, Tuple
|
||||
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
|
||||
@@ -78,7 +79,7 @@ def __init__(self):
|
||||
self.node_character = "o"
|
||||
self.debug = False
|
||||
self.indent = 0
|
||||
self.deptype = spack.dependency.all_deptypes
|
||||
self.depflag = dt.ALL
|
||||
|
||||
# These are colors in the order they'll be used for edges.
|
||||
# See llnl.util.tty.color for details on color characters.
|
||||
@@ -326,7 +327,7 @@ def write(self, spec, color=None, out=None):
|
||||
nodes_in_topological_order = [
|
||||
edge.spec
|
||||
for edge in spack.traverse.traverse_edges_topo(
|
||||
[spec], direction="children", deptype=self.deptype
|
||||
[spec], direction="children", deptype=self.depflag
|
||||
)
|
||||
]
|
||||
nodes_in_topological_order.reverse()
|
||||
@@ -424,7 +425,7 @@ def write(self, spec, color=None, out=None):
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True)
|
||||
edges = sorted(node.edges_to_dependencies(depflag=self.depflag), reverse=True)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
@@ -433,13 +434,14 @@ def write(self, spec, color=None, out=None):
|
||||
self._collapse_line(i)
|
||||
|
||||
|
||||
def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"):
|
||||
def graph_ascii(
|
||||
spec, node="o", out=None, debug=False, indent=0, color=None, depflag: dt.DepFlag = dt.ALL
|
||||
):
|
||||
graph = AsciiGraph()
|
||||
graph.debug = debug
|
||||
graph.indent = indent
|
||||
graph.node_character = node
|
||||
if deptype:
|
||||
graph.deptype = spack.dependency.canonical_deptype(deptype)
|
||||
graph.depflag = depflag
|
||||
|
||||
graph.write(spec, color=color, out=out)
|
||||
|
||||
@@ -513,7 +515,7 @@ def __init__(self):
|
||||
|
||||
def visit(self, edge):
|
||||
if edge.parent is None:
|
||||
for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")):
|
||||
for node in spack.traverse.traverse_nodes([edge.spec], deptype=dt.LINK | dt.RUN):
|
||||
self.main_unified_space.add(node.dag_hash())
|
||||
super().visit(edge)
|
||||
|
||||
@@ -529,40 +531,38 @@ def edge_entry(self, edge):
|
||||
return (
|
||||
edge.parent.dag_hash(),
|
||||
edge.spec.dag_hash(),
|
||||
f"[color=\"{':'.join(colormap[x] for x in edge.deptypes)}\"]",
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
|
||||
)
|
||||
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
def _static_edges(specs, depflag):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
for dependency_name in dependencies:
|
||||
yield spack.spec.DependencySpec(
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
depflag=depflag,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
def static_graph_dot(
|
||||
specs: List[spack.spec.Spec],
|
||||
deptype: Optional[Union[str, Tuple[str, ...]]] = "all",
|
||||
out: Optional[TextIO] = None,
|
||||
specs: List[spack.spec.Spec], depflag: dt.DepFlag = dt.ALL, out: Optional[TextIO] = None
|
||||
):
|
||||
"""Static DOT graph with edges to all possible dependencies.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to be represented
|
||||
deptype: dependency types to consider
|
||||
depflag: dependency types to consider
|
||||
out: optional output stream. If None sys.stdout is used
|
||||
"""
|
||||
out = out or sys.stdout
|
||||
builder = StaticDag()
|
||||
for edge in _static_edges(specs, deptype):
|
||||
for edge in _static_edges(specs, depflag):
|
||||
builder.visit(edge)
|
||||
out.write(builder.render())
|
||||
|
||||
@@ -570,7 +570,7 @@ def static_graph_dot(
|
||||
def graph_dot(
|
||||
specs: List[spack.spec.Spec],
|
||||
builder: Optional[DotGraphBuilder] = None,
|
||||
deptype: spack.dependency.DependencyArgument = "all",
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
out: Optional[TextIO] = None,
|
||||
):
|
||||
"""DOT graph of the concrete specs passed as input.
|
||||
@@ -578,7 +578,7 @@ def graph_dot(
|
||||
Args:
|
||||
specs: specs to be represented
|
||||
builder: builder to use to render the graph
|
||||
deptype: dependency types to consider
|
||||
depflag: dependency types to consider
|
||||
out: optional output stream. If None sys.stdout is used
|
||||
"""
|
||||
if not specs:
|
||||
@@ -587,10 +587,9 @@ def graph_dot(
|
||||
if out is None:
|
||||
out = sys.stdout
|
||||
|
||||
deptype = spack.dependency.canonical_deptype(deptype)
|
||||
builder = builder or SimpleDAG()
|
||||
for edge in spack.traverse.traverse_edges(
|
||||
specs, cover="edges", order="breadth", deptype=deptype
|
||||
specs, cover="edges", order="breadth", deptype=depflag
|
||||
):
|
||||
builder.visit(edge)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Definitions that control how Spack creates Spec hashes."""
|
||||
|
||||
import spack.dependency as dp
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
@@ -20,8 +20,8 @@ class SpecHashDescriptor:
|
||||
|
||||
We currently use different hashes for different use cases."""
|
||||
|
||||
def __init__(self, deptype, package_hash, name, override=None):
|
||||
self.deptype = dp.canonical_deptype(deptype)
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
||||
self.depflag = depflag
|
||||
self.package_hash = package_hash
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
@@ -39,12 +39,12 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash")
|
||||
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
process_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
|
||||
)
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ def _content_hash_override(spec):
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name="package_hash", override=_content_hash_override
|
||||
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
|
||||
)
|
||||
|
||||
|
||||
@@ -64,10 +64,10 @@ def _content_hash_override(spec):
|
||||
# spec formats
|
||||
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run"), package_hash=True, name="full_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="full_hash"
|
||||
)
|
||||
|
||||
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run"), package_hash=False, name="build_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=False, name="build_hash"
|
||||
)
|
||||
|
||||
@@ -79,8 +79,7 @@ class ElfFilesWithRPathVisitor(BaseDirectoryVisitor):
|
||||
"""Visitor that collects all elf files that have an rpath"""
|
||||
|
||||
def __init__(self):
|
||||
# Map from (ino, dev) -> path. We need 1 path per file, if there are hardlinks,
|
||||
# we don't need to store the path multiple times.
|
||||
# Keep track of what hardlinked files we've already visited.
|
||||
self.visited = set()
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
@@ -89,10 +88,10 @@ def visit_file(self, root, rel_path, depth):
|
||||
identifier = (s.st_ino, s.st_dev)
|
||||
|
||||
# We're hitting a hardlink or symlink of an excluded lib, no need to parse.
|
||||
if identifier in self.visited:
|
||||
return
|
||||
|
||||
self.visited.add(identifier)
|
||||
if s.st_nlink > 1:
|
||||
if identifier in self.visited:
|
||||
return
|
||||
self.visited.add(identifier)
|
||||
|
||||
result = drop_redundant_rpaths(filepath)
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import nullcontext
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
@@ -26,7 +27,6 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.util.string import plural
|
||||
|
||||
#: Stand-alone test failure info type
|
||||
TestFailureType = Tuple[BaseException, str]
|
||||
|
||||
@@ -50,6 +50,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.mirror
|
||||
@@ -313,7 +314,7 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
@@ -788,10 +789,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# Save off dependency package ids for quick checks since traversals
|
||||
# are not able to return full dependents for all packages across
|
||||
# environment specs.
|
||||
deptypes = self.get_deptypes(self.pkg)
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
for d in self.pkg.spec.dependencies(deptype=deptypes)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
)
|
||||
|
||||
@@ -830,7 +830,7 @@ def _add_default_args(self) -> None:
|
||||
]:
|
||||
_ = self.install_args.setdefault(arg, default)
|
||||
|
||||
def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]:
|
||||
def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
"""Determine the required dependency types for the associated package.
|
||||
|
||||
Args:
|
||||
@@ -839,7 +839,7 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
|
||||
Returns:
|
||||
tuple: required dependency type(s) for the package
|
||||
"""
|
||||
deptypes = ["link", "run"]
|
||||
depflag = dt.LINK | dt.RUN
|
||||
include_build_deps = self.install_args.get("include_build_deps")
|
||||
|
||||
if self.pkg_id == package_id(pkg):
|
||||
@@ -847,14 +847,15 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
|
||||
# Include build dependencies if pkg is not installed and cache_only
|
||||
# is False, or if build depdencies are explicitly called for
|
||||
# by include_build_deps.
|
||||
if include_build_deps or not (cache_only or pkg.spec.installed):
|
||||
deptypes.append("build")
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
):
|
||||
depflag |= dt.BUILD
|
||||
if self.run_tests(pkg):
|
||||
deptypes.append("test")
|
||||
return tuple(sorted(deptypes))
|
||||
depflag |= dt.TEST
|
||||
return depflag
|
||||
|
||||
def has_dependency(self, dep_id) -> bool:
|
||||
"""Returns ``True`` if the package id represents a known dependency
|
||||
@@ -887,9 +888,8 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
|
||||
spec = self.spec
|
||||
if visited is None:
|
||||
visited = set()
|
||||
deptype = self.get_deptypes(spec.package)
|
||||
|
||||
for dep in spec.dependencies(deptype=deptype):
|
||||
for dep in spec.dependencies(deptype=self.get_depflags(spec.package)):
|
||||
hash = dep.dag_hash()
|
||||
if hash in visited:
|
||||
continue
|
||||
@@ -973,10 +973,9 @@ def __init__(
|
||||
# Be consistent wrt use of dependents and dependencies. That is,
|
||||
# if use traverse for transitive dependencies, then must remove
|
||||
# transitive dependents on failure.
|
||||
deptypes = self.request.get_deptypes(self.pkg)
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
for d in self.pkg.spec.dependencies(deptype=deptypes)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
)
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.log import log_output
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
@@ -716,7 +715,7 @@ def __call__(self, *argv, **kwargs):
|
||||
|
||||
out = io.StringIO()
|
||||
try:
|
||||
with log_output(out):
|
||||
with log_output(out, echo=True):
|
||||
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
|
||||
|
||||
except SystemExit as e:
|
||||
@@ -775,7 +774,7 @@ def _profile_wrapper(command, parser, args, unknown_args):
|
||||
pr.disable()
|
||||
|
||||
# print out profile stats.
|
||||
stats = pstats.Stats(pr)
|
||||
stats = pstats.Stats(pr, stream=sys.stderr)
|
||||
stats.sort_stats(*sortby)
|
||||
stats.print_stats(nlines)
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
@@ -29,7 +30,6 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.url as url
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -375,7 +375,7 @@ def _determine_extension(fetcher):
|
||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||
if fetcher.expand_archive:
|
||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||
ext = url.determine_url_file_extension(fetcher.url)
|
||||
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
||||
|
||||
if ext:
|
||||
# Remove any leading dots
|
||||
|
||||
@@ -142,6 +142,7 @@ def __init__(self):
|
||||
"11": "bigsur",
|
||||
"12": "monterey",
|
||||
"13": "ventura",
|
||||
"14": "sonoma",
|
||||
}
|
||||
|
||||
version = macos_version()
|
||||
|
||||
@@ -67,7 +67,7 @@
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import run_after, run_before
|
||||
from spack.dependency import all_deptypes
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import *
|
||||
from spack.install_test import (
|
||||
SkipTest,
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.directives
|
||||
import spack.directory_layout
|
||||
import spack.environment
|
||||
@@ -66,7 +66,6 @@
|
||||
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.web import FetchError
|
||||
from spack.version import GitVersion, StandardVersion, Version
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
@@ -175,7 +174,7 @@ def windows_establish_runtime_linkage(self):
|
||||
detectable_packages = collections.defaultdict(list)
|
||||
|
||||
|
||||
class DetectablePackageMeta:
|
||||
class DetectablePackageMeta(type):
|
||||
"""Check if a package is detectable and add default implementations
|
||||
for the detection function.
|
||||
"""
|
||||
@@ -525,6 +524,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
# This allows analysis tools to correctly interpret the class attributes.
|
||||
versions: dict
|
||||
|
||||
# Same for dependencies
|
||||
dependencies: dict
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
virtual = False
|
||||
@@ -682,7 +684,7 @@ def possible_dependencies(
|
||||
cls,
|
||||
transitive=True,
|
||||
expand_virtuals=True,
|
||||
deptype="all",
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
visited=None,
|
||||
missing=None,
|
||||
virtuals=None,
|
||||
@@ -694,7 +696,7 @@ def possible_dependencies(
|
||||
True, only direct dependencies if False (default True)..
|
||||
expand_virtuals (bool or None): expand virtual dependencies into
|
||||
all possible implementations (default True)
|
||||
deptype (str or tuple or None): dependency types to consider
|
||||
depflag: dependency types to consider
|
||||
visited (dict or None): dict of names of dependencies visited so
|
||||
far, mapped to their immediate dependencies' names.
|
||||
missing (dict or None): dict to populate with packages and their
|
||||
@@ -720,8 +722,6 @@ def possible_dependencies(
|
||||
Note: the returned dict *includes* the package itself.
|
||||
|
||||
"""
|
||||
deptype = spack.dependency.canonical_deptype(deptype)
|
||||
|
||||
visited = {} if visited is None else visited
|
||||
missing = {} if missing is None else missing
|
||||
|
||||
@@ -729,9 +729,10 @@ def possible_dependencies(
|
||||
|
||||
for name, conditions in cls.dependencies.items():
|
||||
# check whether this dependency could be of the type asked for
|
||||
deptypes = [dep.type for cond, dep in conditions.items()]
|
||||
deptypes = set.union(*deptypes)
|
||||
if not any(d in deptypes for d in deptype):
|
||||
depflag_union = 0
|
||||
for dep in conditions.values():
|
||||
depflag_union |= dep.depflag
|
||||
if not (depflag & depflag_union):
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
@@ -770,7 +771,7 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
dep_cls.possible_dependencies(
|
||||
transitive, expand_virtuals, deptype, visited, missing, virtuals
|
||||
transitive, expand_virtuals, depflag, visited, missing, virtuals
|
||||
)
|
||||
|
||||
return visited
|
||||
@@ -1203,7 +1204,7 @@ def fetcher(self, f):
|
||||
self._fetcher.set_package(self)
|
||||
|
||||
@classmethod
|
||||
def dependencies_of_type(cls, *deptypes):
|
||||
def dependencies_of_type(cls, deptypes: dt.DepFlag):
|
||||
"""Get dependencies that can possibly have these deptypes.
|
||||
|
||||
This analyzes the package and determines which dependencies *can*
|
||||
@@ -1215,7 +1216,7 @@ def dependencies_of_type(cls, *deptypes):
|
||||
return dict(
|
||||
(name, conds)
|
||||
for name, conds in cls.dependencies.items()
|
||||
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
|
||||
if any(deptypes & cls.dependencies[name][cond].depflag for cond in conds)
|
||||
)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
@@ -1392,7 +1393,7 @@ def do_fetch(self, mirror_only=False):
|
||||
tty.debug("Fetching with no checksum. {0}".format(ck_msg))
|
||||
|
||||
if not ignore_checksum:
|
||||
raise FetchError(
|
||||
raise spack.error.FetchError(
|
||||
"Will not fetch %s" % self.spec.format("{name}{@version}"), ck_msg
|
||||
)
|
||||
|
||||
@@ -1418,7 +1419,7 @@ def do_fetch(self, mirror_only=False):
|
||||
tty.debug("Fetching deprecated version. {0}".format(dp_msg))
|
||||
|
||||
if not ignore_deprecation:
|
||||
raise FetchError(
|
||||
raise spack.error.FetchError(
|
||||
"Will not fetch {0}".format(self.spec.format("{name}{@version}")), dp_msg
|
||||
)
|
||||
|
||||
@@ -1445,7 +1446,7 @@ def do_stage(self, mirror_only=False):
|
||||
self.stage.expand_archive()
|
||||
|
||||
if not os.listdir(self.stage.path):
|
||||
raise FetchError("Archive was empty for %s" % self.name)
|
||||
raise spack.error.FetchError("Archive was empty for %s" % self.name)
|
||||
else:
|
||||
# Support for post-install hooks requires a stage.source_path
|
||||
fsys.mkdirp(self.stage.source_path)
|
||||
@@ -2363,7 +2364,7 @@ def all_urls(self):
|
||||
urls.append(args["url"])
|
||||
return urls
|
||||
|
||||
def fetch_remote_versions(self, concurrency=128):
|
||||
def fetch_remote_versions(self, concurrency=None):
|
||||
"""Find remote versions of this package.
|
||||
|
||||
Uses ``list_url`` and any other URLs listed in the package file.
|
||||
@@ -2375,7 +2376,7 @@ def fetch_remote_versions(self, concurrency=128):
|
||||
return {}
|
||||
|
||||
try:
|
||||
return spack.util.web.find_versions_of_archive(
|
||||
return spack.url.find_versions_of_archive(
|
||||
self.all_urls, self.list_url, self.list_depth, concurrency, reference_package=self
|
||||
)
|
||||
except spack.util.web.NoNetworkConnectionError as e:
|
||||
|
||||
@@ -288,7 +288,7 @@ def next_spec(
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
root_spec._add_dependency(dependency, depflag=0, virtuals=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
from llnl.url import allowed_archive
|
||||
|
||||
import spack
|
||||
import spack.error
|
||||
@@ -19,7 +20,6 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.util.compression import allowed_archive
|
||||
from spack.util.crypto import Checker, checksum
|
||||
from spack.util.executable import which, which_string
|
||||
|
||||
@@ -76,7 +76,7 @@ def __init__(self, pkg, path_or_url, level, working_dir):
|
||||
self.level = level
|
||||
self.working_dir = working_dir
|
||||
|
||||
def apply(self, stage: spack.stage.Stage):
|
||||
def apply(self, stage: "spack.stage.Stage"):
|
||||
"""Apply a patch to source in a stage.
|
||||
|
||||
Arguments:
|
||||
@@ -190,7 +190,7 @@ def __init__(self, pkg, url, level=1, working_dir=".", ordering_key=None, **kwar
|
||||
if not self.sha256:
|
||||
raise PatchDirectiveError("URL patches require a sha256 checksum")
|
||||
|
||||
def apply(self, stage: spack.stage.Stage):
|
||||
def apply(self, stage: "spack.stage.Stage"):
|
||||
assert self.stage.expanded, "Stage must be expanded before applying patches"
|
||||
|
||||
# Get the patch file.
|
||||
|
||||
@@ -24,8 +24,9 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any, Dict, List, Tuple, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -563,7 +564,7 @@ def __init__(
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = spack.util.path.convert_to_posix_path(self.packages_path)
|
||||
self.packages_path = llnl.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
@@ -744,10 +745,18 @@ def all_package_paths(self):
|
||||
for name in self.all_package_names():
|
||||
yield self.package_path(name)
|
||||
|
||||
def packages_with_tags(self, *tags):
|
||||
def packages_with_tags(self, *tags, full=False):
|
||||
"""Returns a list of packages matching any of the tags in input.
|
||||
|
||||
Args:
|
||||
full: if True the package names in the output are fully-qualified
|
||||
"""
|
||||
r = set()
|
||||
for repo in self.repos:
|
||||
r |= set(repo.packages_with_tags(*tags))
|
||||
current = repo.packages_with_tags(*tags)
|
||||
if full:
|
||||
current = [f"{repo.namespace}.{x}" for x in current]
|
||||
r |= set(current)
|
||||
return sorted(r)
|
||||
|
||||
def all_package_classes(self):
|
||||
@@ -1123,7 +1132,8 @@ def extensions_for(self, extendee_spec):
|
||||
def dirname_for_package_name(self, pkg_name):
|
||||
"""Get the directory name for a particular package. This is the
|
||||
directory that contains its package.py file."""
|
||||
return os.path.join(self.packages_path, pkg_name)
|
||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||
return os.path.join(self.packages_path, unqualified_name)
|
||||
|
||||
def filename_for_package_name(self, pkg_name):
|
||||
"""Get the filename for the module we should load for a particular
|
||||
@@ -1221,15 +1231,10 @@ def get_pkg_class(self, pkg_name):
|
||||
package. Then extracts the package class from the module
|
||||
according to Spack's naming convention.
|
||||
"""
|
||||
namespace, _, pkg_name = pkg_name.rpartition(".")
|
||||
if namespace and (namespace != self.namespace):
|
||||
raise InvalidNamespaceError(
|
||||
"Invalid namespace for %s repo: %s" % (self.namespace, namespace)
|
||||
)
|
||||
|
||||
namespace, pkg_name = self.partition_package_name(pkg_name)
|
||||
class_name = nm.mod_to_class(pkg_name)
|
||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||
|
||||
fullname = "{0}.{1}".format(self.full_namespace, pkg_name)
|
||||
try:
|
||||
module = importlib.import_module(fullname)
|
||||
except ImportError:
|
||||
@@ -1240,7 +1245,7 @@ def get_pkg_class(self, pkg_name):
|
||||
|
||||
cls = getattr(module, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
tty.die("%s.%s is not a class" % (pkg_name, class_name))
|
||||
tty.die(f"{pkg_name}.{class_name} is not a class")
|
||||
|
||||
new_cfg_settings = (
|
||||
spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {})
|
||||
@@ -1279,6 +1284,15 @@ def get_pkg_class(self, pkg_name):
|
||||
|
||||
return cls
|
||||
|
||||
def partition_package_name(self, pkg_name: str) -> Tuple[str, str]:
|
||||
namespace, pkg_name = partition_package_name(pkg_name)
|
||||
if namespace and (namespace != self.namespace):
|
||||
raise InvalidNamespaceError(
|
||||
f"Invalid namespace for the '{self.namespace}' repo: {namespace}"
|
||||
)
|
||||
|
||||
return namespace, pkg_name
|
||||
|
||||
def __str__(self):
|
||||
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
|
||||
|
||||
@@ -1292,6 +1306,20 @@ def __contains__(self, pkg_name):
|
||||
RepoType = Union[Repo, RepoPath]
|
||||
|
||||
|
||||
def partition_package_name(pkg_name: str) -> Tuple[str, str]:
|
||||
"""Given a package name that might be fully-qualified, returns the namespace part,
|
||||
if present and the unqualified package name.
|
||||
|
||||
If the package name is unqualified, the namespace is an empty string.
|
||||
|
||||
Args:
|
||||
pkg_name: a package name, either unqualified like "llvl", or
|
||||
fully-qualified, like "builtin.llvm"
|
||||
"""
|
||||
namespace, _, pkg_name = pkg_name.rpartition(".")
|
||||
return namespace, pkg_name
|
||||
|
||||
|
||||
def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
"""Create a new repository in root with the specified namespace.
|
||||
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from io import BufferedReader, BytesIO, IOBase
|
||||
|
||||
import spack.util.s3 as s3_util
|
||||
|
||||
|
||||
# NOTE(opadron): Workaround issue in boto where its StreamingBody
|
||||
# implementation is missing several APIs expected from IOBase. These missing
|
||||
# APIs prevent the streams returned by boto from being passed as-are along to
|
||||
# urllib.
|
||||
#
|
||||
# https://github.com/boto/botocore/issues/879
|
||||
# https://github.com/python/cpython/pull/3249
|
||||
class WrapStream(BufferedReader):
|
||||
def __init__(self, raw):
|
||||
# In botocore >=1.23.47, StreamingBody inherits from IOBase, so we
|
||||
# only add missing attributes in older versions.
|
||||
# https://github.com/boto/botocore/commit/a624815eabac50442ed7404f3c4f2664cd0aa784
|
||||
if not isinstance(raw, IOBase):
|
||||
raw.readable = lambda: True
|
||||
raw.writable = lambda: False
|
||||
raw.seekable = lambda: False
|
||||
raw.closed = False
|
||||
raw.flush = lambda: None
|
||||
super().__init__(raw)
|
||||
|
||||
def detach(self):
|
||||
self.raw = None
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
return self.raw.read(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, key):
|
||||
return getattr(self.raw, key)
|
||||
|
||||
|
||||
def _s3_open(url, method="GET"):
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
s3 = s3_util.get_s3_session(url, method="fetch")
|
||||
|
||||
bucket = parsed.netloc
|
||||
key = parsed.path
|
||||
|
||||
if key.startswith("/"):
|
||||
key = key[1:]
|
||||
|
||||
if method not in ("GET", "HEAD"):
|
||||
raise urllib.error.URLError(
|
||||
"Only GET and HEAD verbs are currently supported for the s3:// scheme"
|
||||
)
|
||||
|
||||
try:
|
||||
if method == "GET":
|
||||
obj = s3.get_object(Bucket=bucket, Key=key)
|
||||
# NOTE(opadron): Apply workaround here (see above)
|
||||
stream = WrapStream(obj["Body"])
|
||||
elif method == "HEAD":
|
||||
obj = s3.head_object(Bucket=bucket, Key=key)
|
||||
stream = BytesIO()
|
||||
except s3.ClientError as e:
|
||||
raise urllib.error.URLError(e) from e
|
||||
|
||||
headers = obj["ResponseMetadata"]["HTTPHeaders"]
|
||||
|
||||
return url, headers, stream
|
||||
|
||||
|
||||
class UrllibS3Handler(urllib.request.BaseHandler):
|
||||
def s3_open(self, req):
|
||||
orig_url = req.get_full_url()
|
||||
url, headers, stream = _s3_open(orig_url, method=req.get_method())
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
@@ -13,10 +13,12 @@
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import List, NamedTuple, Tuple, Union
|
||||
from typing import List, NamedTuple, Optional, Sequence, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.deptypes as dt
|
||||
|
||||
try:
|
||||
import clingo # type: ignore[import]
|
||||
|
||||
@@ -34,7 +36,6 @@
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
import spack.directives
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
@@ -137,7 +138,15 @@ class RequirementKind(enum.Enum):
|
||||
PACKAGE = enum.auto()
|
||||
|
||||
|
||||
DeclaredVersion = collections.namedtuple("DeclaredVersion", ["version", "idx", "origin"])
|
||||
class DeclaredVersion(NamedTuple):
|
||||
"""Data class to contain information on declared versions used in the solve"""
|
||||
|
||||
#: String representation of the version
|
||||
version: str
|
||||
#: Unique index assigned to this version
|
||||
idx: int
|
||||
#: Provenance of the version
|
||||
origin: Provenance
|
||||
|
||||
|
||||
# Below numbers are used to map names of criteria to the order
|
||||
@@ -784,7 +793,7 @@ def fact(self, head):
|
||||
if choice:
|
||||
self.assumptions.append(atom)
|
||||
|
||||
def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
def solve(self, setup, specs, reuse=None, output=None, control=None, allow_deprecated=False):
|
||||
"""Set up the input and solve for dependencies of ``specs``.
|
||||
|
||||
Arguments:
|
||||
@@ -795,6 +804,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
the output of this solve.
|
||||
control (clingo.Control): configuration for the solver. If None,
|
||||
default values will be used
|
||||
allow_deprecated: if True, allow deprecated versions in the solve
|
||||
|
||||
Return:
|
||||
A tuple of the solve result, the timer for the different phases of the
|
||||
@@ -814,7 +824,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
timer.start("setup")
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
setup.setup(self, specs, reuse=reuse)
|
||||
setup.setup(self, specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||
timer.stop("setup")
|
||||
|
||||
timer.start("load")
|
||||
@@ -1462,18 +1472,18 @@ def package_dependencies_rules(self, pkg):
|
||||
"""Translate 'depends_on' directives into ASP logic."""
|
||||
for _, conditions in sorted(pkg.dependencies.items()):
|
||||
for cond, dep in sorted(conditions.items()):
|
||||
deptypes = dep.type.copy()
|
||||
depflag = dep.depflag
|
||||
# Skip test dependencies if they're not requested
|
||||
if not self.tests:
|
||||
deptypes.discard("test")
|
||||
depflag &= ~dt.TEST
|
||||
|
||||
# ... or if they are requested only for certain packages
|
||||
if not isinstance(self.tests, bool) and pkg.name not in self.tests:
|
||||
deptypes.discard("test")
|
||||
elif not isinstance(self.tests, bool) and pkg.name not in self.tests:
|
||||
depflag &= ~dt.TEST
|
||||
|
||||
# if there are no dependency types to be considered
|
||||
# anymore, don't generate the dependency
|
||||
if not deptypes:
|
||||
if not depflag:
|
||||
continue
|
||||
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
@@ -1487,9 +1497,10 @@ def package_dependencies_rules(self, pkg):
|
||||
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
|
||||
)
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
||||
for t in dt.ALL_FLAGS:
|
||||
if t & depflag:
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1558,7 +1569,9 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
)
|
||||
except Exception as e:
|
||||
if rule.kind != RequirementKind.DEFAULT:
|
||||
raise RuntimeError("cannot emit requirements for the solver") from e
|
||||
raise RuntimeError(
|
||||
"cannot emit requirements for the solver: " + str(e)
|
||||
) from e
|
||||
continue
|
||||
|
||||
self.gen.fact(
|
||||
@@ -1590,7 +1603,9 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
# would be impaired. If a rule does not apply for a specific package, just
|
||||
# discard it.
|
||||
if rule.kind != RequirementKind.DEFAULT:
|
||||
raise RuntimeError("cannot emit requirements for the solver") from e
|
||||
raise RuntimeError(
|
||||
"cannot emit requirements for the solver: " + str(e)
|
||||
) from e
|
||||
continue
|
||||
|
||||
self.gen.fact(fn.requirement_group_member(member_id, pkg_name, requirement_grp_id))
|
||||
@@ -1863,9 +1878,11 @@ class Body:
|
||||
if spec.concrete:
|
||||
# We know dependencies are real for concrete specs. For abstract
|
||||
# specs they just mean the dep is somehow in the DAG.
|
||||
for dtype in dspec.deptypes:
|
||||
for dtype in dt.ALL_FLAGS:
|
||||
if not dspec.depflag & dtype:
|
||||
continue
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
if concrete_build_deps or dtype != dt.BUILD:
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
@@ -1875,7 +1892,7 @@ class Body:
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.deptypes != ("build",):
|
||||
if concrete_build_deps or dspec.depflag != dt.BUILD:
|
||||
clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
|
||||
|
||||
# if the spec is abstract, descend into dependencies.
|
||||
@@ -1894,7 +1911,7 @@ class Body:
|
||||
return clauses
|
||||
|
||||
def define_package_versions_and_validate_preferences(
|
||||
self, possible_pkgs, require_checksum: bool
|
||||
self, possible_pkgs, *, require_checksum: bool, allow_deprecated: bool
|
||||
):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
@@ -1914,13 +1931,15 @@ def define_package_versions_and_validate_preferences(
|
||||
]
|
||||
|
||||
for idx, (v, version_info) in enumerate(package_py_versions):
|
||||
if version_info.get("deprecated", False):
|
||||
self.deprecated_versions[pkg_name].add(v)
|
||||
if not allow_deprecated:
|
||||
continue
|
||||
|
||||
self.possible_versions[pkg_name].add(v)
|
||||
self.declared_versions[pkg_name].append(
|
||||
DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY)
|
||||
)
|
||||
deprecated = version_info.get("deprecated", False)
|
||||
if deprecated:
|
||||
self.deprecated_versions[pkg_name].add(v)
|
||||
|
||||
if pkg_name not in packages_yaml or "version" not in packages_yaml[pkg_name]:
|
||||
continue
|
||||
@@ -1949,7 +1968,9 @@ def define_package_versions_and_validate_preferences(
|
||||
)
|
||||
self.possible_versions[pkg_name].add(vdef)
|
||||
|
||||
def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: bool):
|
||||
def define_ad_hoc_versions_from_specs(
|
||||
self, specs, origin, *, allow_deprecated: bool, require_checksum: bool
|
||||
):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# If there is a concrete version on the CLI *that we know nothing
|
||||
@@ -1965,6 +1986,9 @@ def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: boo
|
||||
s.format("No matching version for constraint {name}{@versions}")
|
||||
)
|
||||
|
||||
if not allow_deprecated and version in self.deprecated_versions[s.name]:
|
||||
continue
|
||||
|
||||
declared = DeclaredVersion(version=version, idx=0, origin=origin)
|
||||
self.declared_versions[s.name].append(declared)
|
||||
self.possible_versions[s.name].add(version)
|
||||
@@ -2329,7 +2353,14 @@ def define_concrete_input_specs(self, specs, possible):
|
||||
if spec.concrete:
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
|
||||
def setup(self, driver, specs, reuse=None):
|
||||
def setup(
|
||||
self,
|
||||
driver: PyclingoDriver,
|
||||
specs: Sequence[spack.spec.Spec],
|
||||
*,
|
||||
reuse: Optional[List[spack.spec.Spec]] = None,
|
||||
allow_deprecated: bool = False,
|
||||
):
|
||||
"""Generate an ASP program with relevant constraints for specs.
|
||||
|
||||
This calls methods on the solve driver to set up the problem with
|
||||
@@ -2337,9 +2368,10 @@ def setup(self, driver, specs, reuse=None):
|
||||
specs, as well as constraints from the specs themselves.
|
||||
|
||||
Arguments:
|
||||
driver (PyclingoDriver): driver instance of this solve
|
||||
specs (list): list of Specs to solve
|
||||
reuse (None or list): list of concrete specs that can be reused
|
||||
driver: driver instance of this solve
|
||||
specs: list of Specs to solve
|
||||
reuse: list of concrete specs that can be reused
|
||||
allow_deprecated: if True adds deprecated versions into the solve
|
||||
"""
|
||||
self._condition_id_counter = itertools.count()
|
||||
|
||||
@@ -2365,10 +2397,13 @@ def setup(self, driver, specs, reuse=None):
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
|
||||
# Calculate develop specs
|
||||
# they will be used in addition to command line specs
|
||||
# in determining known versions/targets/os
|
||||
dev_specs = ()
|
||||
dev_specs: Tuple[spack.spec.Spec, ...] = ()
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
dev_specs = tuple(
|
||||
@@ -2414,11 +2449,22 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.external_packages()
|
||||
|
||||
# TODO: make a config option for this undocumented feature
|
||||
require_checksum = "SPACK_CONCRETIZER_REQUIRE_CHECKSUM" in os.environ
|
||||
self.define_package_versions_and_validate_preferences(self.pkgs, require_checksum)
|
||||
self.define_ad_hoc_versions_from_specs(specs, Provenance.SPEC, require_checksum)
|
||||
self.define_ad_hoc_versions_from_specs(dev_specs, Provenance.DEV_SPEC, require_checksum)
|
||||
self.validate_and_define_versions_from_requirements(require_checksum)
|
||||
checksummed = "SPACK_CONCRETIZER_REQUIRE_CHECKSUM" in os.environ
|
||||
self.define_package_versions_and_validate_preferences(
|
||||
self.pkgs, allow_deprecated=allow_deprecated, require_checksum=checksummed
|
||||
)
|
||||
self.define_ad_hoc_versions_from_specs(
|
||||
specs, Provenance.SPEC, allow_deprecated=allow_deprecated, require_checksum=checksummed
|
||||
)
|
||||
self.define_ad_hoc_versions_from_specs(
|
||||
dev_specs,
|
||||
Provenance.DEV_SPEC,
|
||||
allow_deprecated=allow_deprecated,
|
||||
require_checksum=checksummed,
|
||||
)
|
||||
self.validate_and_define_versions_from_requirements(
|
||||
allow_deprecated=allow_deprecated, require_checksum=checksummed
|
||||
)
|
||||
|
||||
self.gen.h1("Package Constraints")
|
||||
for pkg in sorted(self.pkgs):
|
||||
@@ -2467,7 +2513,9 @@ def literal_specs(self, specs):
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def validate_and_define_versions_from_requirements(self, require_checksum: bool):
|
||||
def validate_and_define_versions_from_requirements(
|
||||
self, *, allow_deprecated: bool, require_checksum: bool
|
||||
):
|
||||
"""If package requirements mention concrete versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions. If they are abstract and statically have no match, then we
|
||||
@@ -2500,6 +2548,9 @@ def validate_and_define_versions_from_requirements(self, require_checksum: bool)
|
||||
if v in self.possible_versions[name]:
|
||||
continue
|
||||
|
||||
if not allow_deprecated and v in self.deprecated_versions[name]:
|
||||
continue
|
||||
|
||||
# If concrete an not yet defined, conditionally define it, like we do for specs
|
||||
# from the command line.
|
||||
if not require_checksum or _is_checksummed_git_version(v):
|
||||
@@ -2658,13 +2709,14 @@ def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
depflag = dt.flag_from_string(type)
|
||||
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], deptypes=(type,), virtuals=()
|
||||
self._specs[dependency_node], depflag=depflag, virtuals=()
|
||||
)
|
||||
else:
|
||||
edges[0].update_deptypes(deptypes=(type,))
|
||||
edges[0].update_deptypes(depflag=depflag)
|
||||
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
@@ -2730,9 +2782,8 @@ def reorder_flags(self):
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
|
||||
def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
tty.warn(msg.format(pkg, version))
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple):
|
||||
@@ -2933,7 +2984,16 @@ def _reusable_specs(self, specs):
|
||||
|
||||
return reusable_specs
|
||||
|
||||
def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_only=False):
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
timers=False,
|
||||
stats=False,
|
||||
tests=False,
|
||||
setup_only=False,
|
||||
allow_deprecated=False,
|
||||
):
|
||||
"""
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
@@ -2944,6 +3004,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
If a tuple of package names, concretize test dependencies for named
|
||||
packages (defaults to False: do not concretize test dependencies).
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
@@ -2951,10 +3012,14 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
result, _, _ = self.driver.solve(setup, specs, reuse=reusable_specs, output=output)
|
||||
result, _, _ = self.driver.solve(
|
||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||
)
|
||||
return result
|
||||
|
||||
def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=False):
|
||||
def solve_in_rounds(
|
||||
self, specs, out=None, timers=False, stats=False, tests=False, allow_deprecated=False
|
||||
):
|
||||
"""Solve for a stable model of specs in multiple rounds.
|
||||
|
||||
This relaxes the assumption of solve that everything must be consistent and
|
||||
@@ -2969,6 +3034,7 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
timers (bool): print timing if set to True
|
||||
stats (bool): print internal statistics if set to True
|
||||
tests (bool): add test dependencies to the solve
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
@@ -2982,7 +3048,11 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=False)
|
||||
while True:
|
||||
result, _, _ = self.driver.solve(
|
||||
setup, input_specs, reuse=reusable_specs, output=output
|
||||
setup,
|
||||
input_specs,
|
||||
reuse=reusable_specs,
|
||||
output=output,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
yield result
|
||||
|
||||
|
||||
@@ -196,6 +196,13 @@ attr("deprecated", node(ID, Package), Version) :-
|
||||
attr("version", node(ID, Package), Version),
|
||||
pkg_fact(Package, deprecated_version(Version)).
|
||||
|
||||
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)
|
||||
:- deprecated_versions_not_allowed(),
|
||||
attr("version", node(ID, Package), Version),
|
||||
not external(node(ID, Package)),
|
||||
not concrete(node(ID, Package)),
|
||||
pkg_fact(Package, deprecated_version(Version)).
|
||||
|
||||
possible_version_weight(node(ID, Package), Weight)
|
||||
:- attr("version", node(ID, Package), Version),
|
||||
pkg_fact(Package, version_declared(Version, Weight)).
|
||||
@@ -252,6 +259,7 @@ attr("node_version_satisfies", node(ID, Package), Constraint)
|
||||
pkg_fact(Package, version_satisfies(Constraint, Version)).
|
||||
|
||||
#defined version_satisfies/3.
|
||||
#defined deprecated_versions_not_allowed/0.
|
||||
#defined deprecated_version/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
from typing import List, Set, Tuple
|
||||
from typing import List, Set
|
||||
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
@@ -23,11 +24,11 @@ class Counter:
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
|
||||
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
|
||||
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
|
||||
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
|
||||
self.all_types: dt.DepFlag = dt.ALL
|
||||
if not tests:
|
||||
self.link_run_types = ("link", "run")
|
||||
self.all_types = ("link", "run", "build")
|
||||
self.link_run_types = dt.LINK | dt.RUN
|
||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
@@ -59,7 +60,7 @@ def _compute_cache_values(self):
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self):
|
||||
result = spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
|
||||
)
|
||||
self._possible_dependencies = set(result)
|
||||
|
||||
@@ -89,17 +90,17 @@ def __init__(self, specs, tests):
|
||||
def _compute_cache_values(self):
|
||||
self._link_run = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
|
||||
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
|
||||
)
|
||||
)
|
||||
self._link_run_virtuals.update(self._possible_virtuals)
|
||||
for x in self._link_run:
|
||||
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
|
||||
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
|
||||
self._direct_build.update(current)
|
||||
|
||||
self._total_build = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
|
||||
)
|
||||
)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
@@ -54,10 +54,14 @@
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import List, Tuple, Union
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -67,6 +71,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.dependency as dp
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.paths
|
||||
@@ -81,11 +86,9 @@
|
||||
import spack.util.executable
|
||||
import spack.util.hash
|
||||
import spack.util.module_cmd as md
|
||||
import spack.util.path as pth
|
||||
import spack.util.prefix
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.string
|
||||
import spack.variant as vt
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
@@ -174,9 +177,12 @@
|
||||
SPECFILE_FORMAT_VERSION = 4
|
||||
|
||||
|
||||
# InstallStatus is used to map install statuses to symbols for display
|
||||
# Options are artificially disjoint for dispay purposes
|
||||
class InstallStatus(enum.Enum):
|
||||
"""Maps install statuses to symbols for display.
|
||||
|
||||
Options are artificially disjoint for display purposes
|
||||
"""
|
||||
|
||||
installed = "@g{[+]} "
|
||||
upstream = "@g{[^]} "
|
||||
external = "@g{[e]} "
|
||||
@@ -724,81 +730,54 @@ class DependencySpec:
|
||||
Args:
|
||||
parent: starting node of the edge
|
||||
spec: ending node of the edge.
|
||||
deptypes: list of strings, representing dependency relationships.
|
||||
depflag: represents dependency relationships.
|
||||
virtuals: virtual packages provided from child to parent node.
|
||||
"""
|
||||
|
||||
__slots__ = "parent", "spec", "parameters"
|
||||
__slots__ = "parent", "spec", "depflag", "virtuals"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: "Spec",
|
||||
spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
self, parent: "Spec", spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
|
||||
):
|
||||
self.parent = parent
|
||||
self.spec = spec
|
||||
self.parameters = {
|
||||
"deptypes": dp.canonical_deptype(deptypes),
|
||||
"virtuals": tuple(sorted(set(virtuals))),
|
||||
}
|
||||
self.depflag = depflag
|
||||
self.virtuals = tuple(sorted(set(virtuals)))
|
||||
|
||||
@property
|
||||
def deptypes(self) -> Tuple[str, ...]:
|
||||
return self.parameters["deptypes"]
|
||||
|
||||
@property
|
||||
def virtuals(self) -> Tuple[str, ...]:
|
||||
return self.parameters["virtuals"]
|
||||
|
||||
def _update_edge_multivalued_property(
|
||||
self, property_name: str, value: Tuple[str, ...]
|
||||
) -> bool:
|
||||
current = self.parameters[property_name]
|
||||
update = set(current) | set(value)
|
||||
update = tuple(sorted(update))
|
||||
changed = current != update
|
||||
|
||||
if not changed:
|
||||
return False
|
||||
|
||||
self.parameters[property_name] = update
|
||||
return True
|
||||
|
||||
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
|
||||
def update_deptypes(self, depflag: dt.DepFlag) -> bool:
|
||||
"""Update the current dependency types"""
|
||||
return self._update_edge_multivalued_property("deptypes", deptypes)
|
||||
old = self.depflag
|
||||
new = depflag | old
|
||||
if new == old:
|
||||
return False
|
||||
self.depflag = new
|
||||
return True
|
||||
|
||||
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
|
||||
"""Update the list of provided virtuals"""
|
||||
return self._update_edge_multivalued_property("virtuals", virtuals)
|
||||
old = self.virtuals
|
||||
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
|
||||
return old != self.virtuals
|
||||
|
||||
def copy(self) -> "DependencySpec":
|
||||
"""Return a copy of this edge"""
|
||||
return DependencySpec(
|
||||
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
|
||||
)
|
||||
return DependencySpec(self.parent, self.spec, depflag=self.depflag, virtuals=self.virtuals)
|
||||
|
||||
def _cmp_iter(self):
|
||||
yield self.parent.name if self.parent else None
|
||||
yield self.spec.name if self.spec else None
|
||||
yield self.deptypes
|
||||
yield self.depflag
|
||||
yield self.virtuals
|
||||
|
||||
def __str__(self) -> str:
|
||||
parent = self.parent.name if self.parent else None
|
||||
child = self.spec.name if self.spec else None
|
||||
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
|
||||
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
|
||||
return f"{parent} {self.depflag}[virtuals={','.join(self.virtuals)}] --> {child}"
|
||||
|
||||
def flip(self) -> "DependencySpec":
|
||||
"""Flip the dependency, and drop virtual information"""
|
||||
return DependencySpec(
|
||||
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
|
||||
parent=self.spec, spec=self.parent, depflag=self.depflag, virtuals=()
|
||||
)
|
||||
|
||||
|
||||
@@ -943,9 +922,8 @@ def __str__(self):
|
||||
)
|
||||
|
||||
|
||||
def _sort_by_dep_types(dspec):
|
||||
# Use negation since False < True for sorting
|
||||
return tuple(t not in dspec.deptypes for t in ("link", "run", "build", "test"))
|
||||
def _sort_by_dep_types(dspec: DependencySpec):
|
||||
return dspec.depflag
|
||||
|
||||
|
||||
#: Enum for edge directions
|
||||
@@ -1011,7 +989,7 @@ def copy(self):
|
||||
|
||||
return clone
|
||||
|
||||
def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
|
||||
def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Select a list of edges and return them.
|
||||
|
||||
If an edge:
|
||||
@@ -1019,18 +997,18 @@ def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
|
||||
- Matches the parent and/or child name, if passed
|
||||
then it is selected.
|
||||
|
||||
The deptypes argument needs to be canonical, since the method won't
|
||||
The deptypes argument needs to be a flag, since the method won't
|
||||
convert it for performance reason.
|
||||
|
||||
Args:
|
||||
parent (str): name of the parent package
|
||||
child (str): name of the child package
|
||||
deptypes (tuple): allowed dependency types in canonical form
|
||||
depflag: allowed dependency types in flag form
|
||||
|
||||
Returns:
|
||||
List of DependencySpec objects
|
||||
"""
|
||||
if not deptypes:
|
||||
if not depflag:
|
||||
return []
|
||||
|
||||
# Start from all the edges we store
|
||||
@@ -1045,12 +1023,7 @@ def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
|
||||
selected = (d for d in selected if d.spec.name == child)
|
||||
|
||||
# Filter by allowed dependency types
|
||||
if deptypes:
|
||||
selected = (
|
||||
dep
|
||||
for dep in selected
|
||||
if not dep.deptypes or any(d in deptypes for d in dep.deptypes)
|
||||
)
|
||||
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
|
||||
|
||||
return list(selected)
|
||||
|
||||
@@ -1419,7 +1392,7 @@ def _format_module_list(modules):
|
||||
|
||||
@property
|
||||
def external_path(self):
|
||||
return pth.path_to_os_path(self._external_path)[0]
|
||||
return llnl.path.path_to_os_path(self._external_path)[0]
|
||||
|
||||
@external_path.setter
|
||||
def external_path(self, ext_path):
|
||||
@@ -1470,47 +1443,49 @@ def _get_dependency(self, name):
|
||||
raise spack.error.SpecError(err_msg.format(name, len(deps)))
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(self, name=None, deptype="all"):
|
||||
def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
|
||||
Args:
|
||||
name (str): filter dependents by package name
|
||||
deptype (str or tuple): allowed dependency types
|
||||
depflag: allowed dependency types
|
||||
"""
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
return [d for d in self._dependents.select(parent=name, deptypes=deptype)]
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
|
||||
def edges_to_dependencies(self, name=None, deptype="all"):
|
||||
def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
deptype (str or tuple): allowed dependency types
|
||||
depflag: allowed dependency types
|
||||
"""
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
return [d for d in self._dependencies.select(child=name, deptypes=deptype)]
|
||||
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
|
||||
|
||||
def dependencies(self, name=None, deptype="all"):
|
||||
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
deptype (str or tuple): allowed dependency types
|
||||
deptype: allowed dependency types
|
||||
"""
|
||||
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
|
||||
|
||||
def dependents(self, name=None, deptype="all"):
|
||||
def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
|
||||
"""Return a list of direct dependents (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
name (str): filter dependents by package name
|
||||
deptype (str or tuple): allowed dependency types
|
||||
deptype: allowed dependency types
|
||||
"""
|
||||
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [d.parent for d in self.edges_from_dependents(name, depflag=deptype)]
|
||||
|
||||
def _dependencies_dict(self, deptype="all"):
|
||||
def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Return a dictionary, keyed by package name, of the direct
|
||||
dependencies.
|
||||
|
||||
@@ -1519,10 +1494,9 @@ def _dependencies_dict(self, deptype="all"):
|
||||
Args:
|
||||
deptype: allowed dependency types
|
||||
"""
|
||||
_sort_fn = lambda x: (x.spec.name,) + _sort_by_dep_types(x)
|
||||
_sort_fn = lambda x: (x.spec.name, _sort_by_dep_types(x))
|
||||
_group_fn = lambda x: x.spec.name
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
selected_edges = self._dependencies.select(deptypes=deptype)
|
||||
selected_edges = self._dependencies.select(depflag=depflag)
|
||||
result = {}
|
||||
for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn):
|
||||
result[key] = list(group)
|
||||
@@ -1618,19 +1592,17 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(
|
||||
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
|
||||
):
|
||||
def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies or not spec.name:
|
||||
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
# multiple times. Currently, we only allow identical edge types.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
|
||||
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
|
||||
except StopIteration:
|
||||
raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
|
||||
|
||||
@@ -1642,11 +1614,7 @@ def _add_dependency(
|
||||
)
|
||||
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
virtuals: Tuple[str, ...],
|
||||
self, dependency_spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
@@ -1655,19 +1623,17 @@ def add_dependency_edge(
|
||||
deptypes: dependency types for this edge
|
||||
virtuals: virtuals provided by this edge
|
||||
"""
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
has_errors, details = False, []
|
||||
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
if edge.depflag & depflag:
|
||||
has_errors = True
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these types {str(deptypes)}"
|
||||
f" of these types {depflag}"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1676,7 +1642,7 @@ def add_dependency_edge(
|
||||
details.append(
|
||||
(
|
||||
f"{edge.parent.name} has already an edge matching any"
|
||||
f" of these virtuals {str(virtuals)}"
|
||||
f" of these virtuals {virtuals}"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1688,11 +1654,11 @@ def add_dependency_edge(
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.update_deptypes(deptypes=deptypes)
|
||||
edge.update_deptypes(depflag=depflag)
|
||||
edge.update_virtuals(virtuals=virtuals)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
|
||||
edge = DependencySpec(self, dependency_spec, depflag=depflag, virtuals=virtuals)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -1835,7 +1801,7 @@ def prefix(self):
|
||||
|
||||
@prefix.setter
|
||||
def prefix(self, value):
|
||||
self._prefix = spack.util.prefix.Prefix(pth.convert_to_platform_path(value))
|
||||
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
|
||||
|
||||
def spec_hash(self, hash):
|
||||
"""Utility method for computing different types of Spec hashes.
|
||||
@@ -1959,12 +1925,12 @@ def lookup_hash(self):
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
|
||||
spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
spec._add_dependency(node.copy(), depflag=0, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2090,7 +2056,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
d["package_hash"] = package_hash
|
||||
|
||||
# Note: Relies on sorting dict by keys later in algorithm.
|
||||
deps = self._dependencies_dict(deptype=hash.deptype)
|
||||
deps = self._dependencies_dict(depflag=hash.depflag)
|
||||
if deps:
|
||||
deps_list = []
|
||||
for name, edges_for_name in sorted(deps.items()):
|
||||
@@ -2100,7 +2066,10 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
parameters_tuple = (
|
||||
"parameters",
|
||||
syaml.syaml_dict(
|
||||
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
|
||||
(
|
||||
("deptypes", dt.flag_to_tuple(dspec.depflag)),
|
||||
("virtuals", dspec.virtuals),
|
||||
)
|
||||
),
|
||||
)
|
||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
||||
@@ -2198,7 +2167,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
"""
|
||||
node_list = [] # Using a list to preserve preorder traversal for hash.
|
||||
hash_set = set()
|
||||
for s in self.traverse(order="pre", deptype=hash.deptype):
|
||||
for s in self.traverse(order="pre", deptype=hash.depflag):
|
||||
spec_hash = s._cached_hash(hash)
|
||||
|
||||
if spec_hash not in hash_set:
|
||||
@@ -2382,13 +2351,12 @@ def spec_builder(d):
|
||||
if dep_like is None:
|
||||
return spec
|
||||
|
||||
def name_and_dependency_types(s):
|
||||
def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
|
||||
"""Given a key in the dictionary containing the literal,
|
||||
extracts the name of the spec and its dependency types.
|
||||
|
||||
Args:
|
||||
s (str): key in the dictionary containing the literal
|
||||
|
||||
s: key in the dictionary containing the literal
|
||||
"""
|
||||
t = s.split(":")
|
||||
|
||||
@@ -2396,39 +2364,37 @@ def name_and_dependency_types(s):
|
||||
msg = 'more than one ":" separator in key "{0}"'
|
||||
raise KeyError(msg.format(s))
|
||||
|
||||
n = t[0]
|
||||
name = t[0]
|
||||
if len(t) == 2:
|
||||
dtypes = tuple(dt.strip() for dt in t[1].split(","))
|
||||
depflag = dt.flag_from_strings(dep_str.strip() for dep_str in t[1].split(","))
|
||||
else:
|
||||
dtypes = ()
|
||||
depflag = 0
|
||||
return name, depflag
|
||||
|
||||
return n, dtypes
|
||||
|
||||
def spec_and_dependency_types(s):
|
||||
def spec_and_dependency_types(
|
||||
s: Union[Spec, Tuple[Spec, str]]
|
||||
) -> Tuple[Spec, dt.DepFlag]:
|
||||
"""Given a non-string key in the literal, extracts the spec
|
||||
and its dependency types.
|
||||
|
||||
Args:
|
||||
s (spec or tuple): either a Spec object or a tuple
|
||||
composed of a Spec object and a string with the
|
||||
dependency types
|
||||
|
||||
s: either a Spec object, or a tuple of Spec and string of dependency types
|
||||
"""
|
||||
if isinstance(s, Spec):
|
||||
return s, ()
|
||||
return s, 0
|
||||
|
||||
spec_obj, dtypes = s
|
||||
return spec_obj, tuple(dt.strip() for dt in dtypes.split(","))
|
||||
return spec_obj, dt.flag_from_strings(dt.strip() for dt in dtypes.split(","))
|
||||
|
||||
# Recurse on dependencies
|
||||
for s, s_dependencies in dep_like.items():
|
||||
if isinstance(s, str):
|
||||
dag_node, dependency_types = name_and_dependency_types(s)
|
||||
dag_node, dep_flag = name_and_dependency_types(s)
|
||||
else:
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
dag_node, dep_flag = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
|
||||
spec._add_dependency(dependency_spec, depflag=dep_flag, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2601,7 +2567,7 @@ def _replace_with(self, concrete):
|
||||
virtuals = (self.name,)
|
||||
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
|
||||
dependent = dep_spec.parent
|
||||
deptypes = dep_spec.deptypes
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
# remove self from all dependents, unless it is already removed
|
||||
if self.name in dependent._dependencies:
|
||||
@@ -2609,7 +2575,7 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
|
||||
dependent._add_dependency(concrete, depflag=depflag, virtuals=virtuals)
|
||||
else:
|
||||
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
|
||||
virtuals=virtuals
|
||||
@@ -2960,8 +2926,9 @@ def _new_concretize(self, tests=False):
|
||||
if self._concrete:
|
||||
return
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve([self], tests=tests)
|
||||
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
|
||||
result.raise_if_unsat()
|
||||
|
||||
# take the best answer
|
||||
@@ -3171,7 +3138,7 @@ def _evaluate_dependency_conditions(self, name):
|
||||
for when_spec, dependency in conditions.items():
|
||||
if self.satisfies(when_spec):
|
||||
if dep is None:
|
||||
dep = dp.Dependency(self.name, Spec(name), type=())
|
||||
dep = dp.Dependency(self.name, Spec(name), depflag=0)
|
||||
try:
|
||||
dep.merge(dependency)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
@@ -3315,7 +3282,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
|
||||
self._add_dependency(spec_dependency, depflag=dependency.depflag, virtuals=virtuals)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3356,7 +3323,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index, tests):
|
||||
or (tests and self.name in tests)
|
||||
or
|
||||
# this is not a test-only dependency
|
||||
dep.type - set(["test"])
|
||||
(dep.depflag & ~dt.TEST)
|
||||
)
|
||||
|
||||
if merge:
|
||||
@@ -3650,9 +3617,7 @@ def _constrain_dependencies(self, other):
|
||||
# WARNING: using index 0 i.e. we assume that we have only
|
||||
# WARNING: one edge from package "name"
|
||||
edges_from_name = self._dependencies[name]
|
||||
changed |= edges_from_name[0].update_deptypes(
|
||||
other._dependencies[name][0].deptypes
|
||||
)
|
||||
changed |= edges_from_name[0].update_deptypes(other._dependencies[name][0].depflag)
|
||||
changed |= edges_from_name[0].update_virtuals(
|
||||
other._dependencies[name][0].virtuals
|
||||
)
|
||||
@@ -3664,7 +3629,7 @@ def _constrain_dependencies(self, other):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
self._add_dependency(
|
||||
dep_spec_copy.spec.copy(),
|
||||
deptypes=dep_spec_copy.deptypes,
|
||||
depflag=dep_spec_copy.depflag,
|
||||
virtuals=dep_spec_copy.virtuals,
|
||||
)
|
||||
changed = True
|
||||
@@ -3939,7 +3904,7 @@ def patches(self):
|
||||
|
||||
return self._patches
|
||||
|
||||
def _dup(self, other, deps=True, cleardeps=True):
|
||||
def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, cleardeps=True):
|
||||
"""Copy the spec other into self. This is an overwriting
|
||||
copy. It does not copy any dependents (parents), but by default
|
||||
copies dependencies.
|
||||
@@ -3948,9 +3913,8 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
|
||||
Args:
|
||||
other (Spec): spec to be copied onto ``self``
|
||||
deps (bool or Sequence): if True copies all the dependencies. If
|
||||
False copies None. If a sequence of dependency types copy
|
||||
only those types.
|
||||
deps: if True copies all the dependencies. If
|
||||
False copies None. If deptype/depflag, copy matching types.
|
||||
cleardeps (bool): if True clears the dependencies of ``self``,
|
||||
before possibly copying the dependencies of ``other`` onto
|
||||
``self``
|
||||
@@ -4010,10 +3974,10 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
if deps:
|
||||
# If caller restricted deptypes to be copied, adjust that here.
|
||||
# By default, just copy all deptypes
|
||||
deptypes = dp.all_deptypes
|
||||
if isinstance(deps, (tuple, list)):
|
||||
deptypes = deps
|
||||
self._dup_deps(other, deptypes)
|
||||
depflag = dt.ALL
|
||||
if isinstance(deps, (tuple, list, str)):
|
||||
depflag = dt.canonicalize(deps)
|
||||
self._dup_deps(other, depflag)
|
||||
|
||||
self._concrete = other._concrete
|
||||
|
||||
@@ -4034,13 +3998,13 @@ def _dup(self, other, deps=True, cleardeps=True):
|
||||
|
||||
return changed
|
||||
|
||||
def _dup_deps(self, other, deptypes):
|
||||
def _dup_deps(self, other, depflag: dt.DepFlag):
|
||||
def spid(spec):
|
||||
return id(spec)
|
||||
|
||||
new_specs = {spid(other): self}
|
||||
for edge in other.traverse_edges(cover="edges", root=False):
|
||||
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
|
||||
if edge.depflag and not depflag & edge.depflag:
|
||||
continue
|
||||
|
||||
if spid(edge.parent) not in new_specs:
|
||||
@@ -4050,17 +4014,16 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
new_specs[spid(edge.spec)], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
def copy(self, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, **kwargs):
|
||||
"""Make a copy of this spec.
|
||||
|
||||
Args:
|
||||
deps (bool or tuple): Defaults to True. If boolean, controls
|
||||
deps: Defaults to True. If boolean, controls
|
||||
whether dependencies are copied (copied if True). If a
|
||||
tuple is provided, *only* dependencies of types matching
|
||||
those in the tuple are copied.
|
||||
DepTypes or DepFlag is provided, *only* matching dependencies are copied.
|
||||
kwargs: additional arguments for internal use (passed to ``_dup``).
|
||||
|
||||
Returns:
|
||||
@@ -4120,7 +4083,7 @@ def __getitem__(self, name):
|
||||
# only when we don't find the package do we consider the full DAG.
|
||||
order = lambda: itertools.chain(
|
||||
self.traverse(deptype="link"),
|
||||
self.dependencies(deptype=("build", "run", "test")),
|
||||
self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
|
||||
self.traverse(), # fall back to a full search
|
||||
)
|
||||
|
||||
@@ -4178,7 +4141,7 @@ def eq_dag(self, other, deptypes=True, vs=None, vo=None):
|
||||
for s_dspec, o_dspec in zip(
|
||||
itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted)
|
||||
):
|
||||
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
|
||||
if deptypes and s_dspec.depflag != o_dspec.depflag:
|
||||
return False
|
||||
|
||||
s, o = s_dspec.spec, o_dspec.spec
|
||||
@@ -4236,7 +4199,7 @@ def _cmp_iter(self):
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
yield tuple(sorted(dep.deptypes))
|
||||
yield dep.depflag
|
||||
yield hash(dep.spec)
|
||||
|
||||
yield deps
|
||||
@@ -4509,29 +4472,51 @@ def _installed_explicitly(self):
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
def tree(self, **kwargs):
|
||||
def tree(
|
||||
self,
|
||||
*,
|
||||
color: Optional[bool] = None,
|
||||
depth: bool = False,
|
||||
hashes: bool = False,
|
||||
hashlen: Optional[int] = None,
|
||||
cover: str = "nodes",
|
||||
indent: int = 0,
|
||||
format: str = DEFAULT_FORMAT,
|
||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
||||
show_types: bool = False,
|
||||
depth_first: bool = False,
|
||||
recurse_dependencies: bool = True,
|
||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||
) -> str:
|
||||
"""Prints out this spec and its dependencies, tree-formatted
|
||||
with indentation.
|
||||
|
||||
Status function may either output a boolean or an InstallStatus
|
||||
"""
|
||||
color = kwargs.pop("color", clr.get_color_when())
|
||||
depth = kwargs.pop("depth", False)
|
||||
hashes = kwargs.pop("hashes", False)
|
||||
hlen = kwargs.pop("hashlen", None)
|
||||
status_fn = kwargs.pop("status_fn", False)
|
||||
cover = kwargs.pop("cover", "nodes")
|
||||
indent = kwargs.pop("indent", 0)
|
||||
fmt = kwargs.pop("format", DEFAULT_FORMAT)
|
||||
prefix = kwargs.pop("prefix", None)
|
||||
show_types = kwargs.pop("show_types", False)
|
||||
deptypes = kwargs.pop("deptypes", "all")
|
||||
recurse_dependencies = kwargs.pop("recurse_dependencies", True)
|
||||
depth_first = kwargs.pop("depth_first", False)
|
||||
lang.check_kwargs(kwargs, self.tree)
|
||||
|
||||
Args:
|
||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||
use the default from llnl.tty.color
|
||||
depth: print the depth from the root
|
||||
hashes: if True, print the hash of each node
|
||||
hashlen: length of the hash to be printed
|
||||
cover: either "nodes" or "edges"
|
||||
indent: extra indentation for the tree being printed
|
||||
format: format to be used to print each node
|
||||
deptypes: dependency types to be represented in the tree
|
||||
show_types: if True, show the (merged) dependency type of a node
|
||||
depth_first: if True, traverse the DAG depth first when representing it as a tree
|
||||
recurse_dependencies: if True, recurse on dependencies
|
||||
status_fn: optional callable that takes a node as an argument and return its
|
||||
installation status
|
||||
prefix: optional callable that takes a node as an argument and return its
|
||||
installation prefix
|
||||
"""
|
||||
out = ""
|
||||
|
||||
if color is None:
|
||||
color = clr.get_color_when()
|
||||
|
||||
for d, dep_spec in traverse.traverse_tree(
|
||||
[self], cover=cover, deptype=deptypes, depth_first=depth_first
|
||||
):
|
||||
@@ -4554,25 +4539,27 @@ def tree(self, **kwargs):
|
||||
out += clr.colorize("@r{[-]} ", color=color)
|
||||
|
||||
if hashes:
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
|
||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||
|
||||
if show_types:
|
||||
if cover == "nodes":
|
||||
# when only covering nodes, we merge dependency types
|
||||
# from all dependents before showing them.
|
||||
types = [ds.deptypes for ds in node.edges_from_dependents()]
|
||||
depflag = 0
|
||||
for ds in node.edges_from_dependents():
|
||||
depflag |= ds.depflag
|
||||
else:
|
||||
# when covering edges or paths, we show dependency
|
||||
# types only for the edge through which we visited
|
||||
types = [dep_spec.deptypes]
|
||||
depflag = dep_spec.depflag
|
||||
|
||||
type_chars = dp.deptype_chars(*types)
|
||||
type_chars = dt.flag_to_chars(depflag)
|
||||
out += "[%s] " % type_chars
|
||||
|
||||
out += " " * d
|
||||
if d > 0:
|
||||
out += "^"
|
||||
out += node.format(fmt, color=color) + "\n"
|
||||
out += node.format(format, color=color) + "\n"
|
||||
|
||||
# Check if we wanted just the first line
|
||||
if not recurse_dependencies:
|
||||
@@ -4728,14 +4715,14 @@ def from_self(name, transitive):
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(
|
||||
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
@@ -4826,8 +4813,9 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
|
||||
merged_spec._add_dependency(
|
||||
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
|
||||
edge.spec.copy(), depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
|
||||
return merged_spec
|
||||
@@ -4974,9 +4962,11 @@ def _load(cls, data):
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
for _, dhash, dtype, _, virtuals in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(
|
||||
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
|
||||
hash_dict[dhash]["node_spec"],
|
||||
depflag=dt.canonicalize(dtype),
|
||||
virtuals=virtuals,
|
||||
)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
@@ -5012,7 +5002,9 @@ def load(cls, data):
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
|
||||
deps[name]._add_dependency(
|
||||
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
reconstruct_virtuals_on_edges(result)
|
||||
return result
|
||||
@@ -5158,6 +5150,43 @@ def save_dependency_specfiles(root: Spec, output_directory: str, dependencies: L
|
||||
fd.write(spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
|
||||
def get_host_environment_metadata() -> Dict[str, str]:
|
||||
"""Get the host environment, reduce to a subset that we can store in
|
||||
the install directory, and add the spack version.
|
||||
"""
|
||||
import spack.main
|
||||
|
||||
environ = get_host_environment()
|
||||
return {
|
||||
"host_os": environ["os"],
|
||||
"platform": environ["platform"],
|
||||
"host_target": environ["target"],
|
||||
"hostname": environ["hostname"],
|
||||
"spack_version": spack.main.get_version(),
|
||||
"kernel_version": platform.version(),
|
||||
}
|
||||
|
||||
|
||||
def get_host_environment() -> Dict[str, Any]:
|
||||
"""Return a dictionary (lookup) with host information (not including the
|
||||
os.environ).
|
||||
"""
|
||||
host_platform = spack.platforms.host()
|
||||
host_target = host_platform.target("default_target")
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
arch_fmt = "platform={0} os={1} target={2}"
|
||||
arch_spec = Spec(arch_fmt.format(host_platform, host_os, host_target))
|
||||
return {
|
||||
"target": str(host_target),
|
||||
"os": str(host_os),
|
||||
"platform": str(host_platform),
|
||||
"arch": arch_spec,
|
||||
"architecture": arch_spec,
|
||||
"arch_str": str(arch_spec),
|
||||
"hostname": socket.gethostname(),
|
||||
}
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
"""Wrapper for ParseError for when we're parsing specs."""
|
||||
|
||||
@@ -5218,7 +5247,7 @@ class InvalidDependencyError(spack.error.SpecError):
|
||||
def __init__(self, pkg, deps):
|
||||
self.invalid_deps = deps
|
||||
super().__init__(
|
||||
"Package {0} does not depend on {1}".format(pkg, spack.util.string.comma_or(deps))
|
||||
"Package {0} does not depend on {1}".format(pkg, llnl.string.comma_or(deps))
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import concurrent.futures
|
||||
import errno
|
||||
import getpass
|
||||
import glob
|
||||
@@ -12,8 +12,9 @@
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, Iterable
|
||||
from typing import Callable, Dict, Iterable, Optional
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
@@ -39,7 +40,6 @@
|
||||
import spack.util.pattern as pattern
|
||||
import spack.util.url as url_util
|
||||
from spack.util.crypto import bit_length, prefix_bits
|
||||
from spack.util.web import FetchError
|
||||
|
||||
# The well-known stage source subdirectory name.
|
||||
_source_path_subdir = "spack-src"
|
||||
@@ -241,10 +241,7 @@ class Stage:
|
||||
similar, and are intended to persist for only one run of spack.
|
||||
"""
|
||||
|
||||
"""Shared dict of all stage locks."""
|
||||
stage_locks: Dict[str, spack.util.lock.Lock] = {}
|
||||
|
||||
"""Most staging is managed by Spack. DIYStage is one exception."""
|
||||
#: Most staging is managed by Spack. DIYStage is one exception.
|
||||
managed_by_spack = True
|
||||
|
||||
def __init__(
|
||||
@@ -330,17 +327,12 @@ def __init__(
|
||||
# details on this approach.
|
||||
self._lock = None
|
||||
if lock:
|
||||
if self.name not in Stage.stage_locks:
|
||||
sha1 = hashlib.sha1(self.name.encode("utf-8")).digest()
|
||||
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
|
||||
stage_lock_path = os.path.join(get_stage_root(), ".lock")
|
||||
|
||||
tty.debug("Creating stage lock {0}".format(self.name))
|
||||
Stage.stage_locks[self.name] = spack.util.lock.Lock(
|
||||
stage_lock_path, start=lock_id, length=1, desc=self.name
|
||||
)
|
||||
|
||||
self._lock = Stage.stage_locks[self.name]
|
||||
sha1 = hashlib.sha1(self.name.encode("utf-8")).digest()
|
||||
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
|
||||
stage_lock_path = os.path.join(get_stage_root(), ".lock")
|
||||
self._lock = spack.util.lock.Lock(
|
||||
stage_lock_path, start=lock_id, length=1, desc=self.name
|
||||
)
|
||||
|
||||
# When stages are reused, we need to know whether to re-create
|
||||
# it. This marks whether it has been created/destroyed.
|
||||
@@ -522,7 +514,7 @@ def print_errors(errors):
|
||||
|
||||
self.fetcher = self.default_fetcher
|
||||
default_msg = "All fetchers failed for {0}".format(self.name)
|
||||
raise FetchError(err_msg or default_msg, None)
|
||||
raise spack.error.FetchError(err_msg or default_msg, None)
|
||||
|
||||
print_errors(errors)
|
||||
|
||||
@@ -868,45 +860,47 @@ def purge():
|
||||
os.remove(stage_path)
|
||||
|
||||
|
||||
def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
"""Fetches and checksums archives from URLs.
|
||||
def get_checksums_for_versions(
|
||||
url_by_version: Dict[str, str],
|
||||
package_name: str,
|
||||
*,
|
||||
batch: bool = False,
|
||||
first_stage_function: Optional[Callable[[Stage, str], None]] = None,
|
||||
keep_stage: bool = False,
|
||||
concurrency: Optional[int] = None,
|
||||
fetch_options: Optional[Dict[str, str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Computes the checksums for each version passed in input, and returns the results.
|
||||
|
||||
This function is called by both ``spack checksum`` and ``spack
|
||||
create``. The ``first_stage_function`` argument allows the caller to
|
||||
inspect the first downloaded archive, e.g., to determine the build
|
||||
system.
|
||||
Archives are fetched according to the usl dictionary passed as input.
|
||||
|
||||
The ``first_stage_function`` argument allows the caller to inspect the first downloaded
|
||||
archive, e.g., to determine the build system.
|
||||
|
||||
Args:
|
||||
url_dict (dict): A dictionary of the form: version -> URL
|
||||
name (str): The name of the package
|
||||
first_stage_function (typing.Callable): function that takes a Stage and a URL;
|
||||
this is run on the stage of the first URL downloaded
|
||||
keep_stage (bool): whether to keep staging area when command completes
|
||||
batch (bool): whether to ask user how many versions to fetch (false)
|
||||
or fetch all versions (true)
|
||||
fetch_options (dict): Options used for the fetcher (such as timeout
|
||||
or cookies)
|
||||
url_by_version: URL keyed by version
|
||||
package_name: name of the package
|
||||
first_stage_function: function that takes a Stage and a URL; this is run on the stage
|
||||
of the first URL downloaded
|
||||
keep_stage: whether to keep staging area when command completes
|
||||
batch: whether to ask user how many versions to fetch (false) or fetch all versions (true)
|
||||
fetch_options: options used for the fetcher (such as timeout or cookies)
|
||||
concurrency: maximum number of workers to use for retrieving archives
|
||||
|
||||
Returns:
|
||||
(dict): A dictionary of the form: version -> checksum
|
||||
|
||||
A dictionary mapping each version to the corresponding checksum
|
||||
"""
|
||||
batch = kwargs.get("batch", False)
|
||||
fetch_options = kwargs.get("fetch_options", None)
|
||||
first_stage_function = kwargs.get("first_stage_function", None)
|
||||
keep_stage = kwargs.get("keep_stage", False)
|
||||
|
||||
sorted_versions = sorted(url_dict.keys(), reverse=True)
|
||||
sorted_versions = sorted(url_by_version.keys(), reverse=True)
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
max_len = max(len(str(v)) for v in sorted_versions)
|
||||
num_ver = len(sorted_versions)
|
||||
|
||||
tty.msg(
|
||||
"Found {0} version{1} of {2}:".format(num_ver, "" if num_ver == 1 else "s", name),
|
||||
f"Found {llnl.string.plural(num_ver, 'version')} of {package_name}:",
|
||||
"",
|
||||
*llnl.util.lang.elide_list(
|
||||
["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) for v in sorted_versions]
|
||||
["{0:{1}} {2}".format(str(v), max_len, url_by_version[v]) for v in sorted_versions]
|
||||
),
|
||||
)
|
||||
print()
|
||||
@@ -922,50 +916,76 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
tty.die("Aborted.")
|
||||
|
||||
versions = sorted_versions[:archives_to_fetch]
|
||||
urls = [url_dict[v] for v in versions]
|
||||
search_arguments = [(url_by_version[v], v) for v in versions]
|
||||
|
||||
tty.debug("Downloading...")
|
||||
version_hashes = {}
|
||||
i = 0
|
||||
errors = []
|
||||
for url, version in zip(urls, versions):
|
||||
try:
|
||||
if fetch_options:
|
||||
url_or_fs = fs.URLFetchStrategy(url, fetch_options=fetch_options)
|
||||
else:
|
||||
url_or_fs = url
|
||||
with Stage(url_or_fs, keep=keep_stage) as stage:
|
||||
# Fetch the archive
|
||||
stage.fetch()
|
||||
if i == 0 and first_stage_function:
|
||||
# Only run first_stage_function the first time,
|
||||
# no need to run it every time
|
||||
first_stage_function(stage, url)
|
||||
version_hashes, errors = {}, []
|
||||
|
||||
# Checksum the archive and add it to the list
|
||||
version_hashes[version] = spack.util.crypto.checksum(
|
||||
hashlib.sha256, stage.archive_file
|
||||
)
|
||||
i += 1
|
||||
except FailedDownloadError:
|
||||
errors.append("Failed to fetch {0}".format(url))
|
||||
except Exception as e:
|
||||
tty.msg("Something failed on {0}, skipping. ({1})".format(url, e))
|
||||
# Don't spawn 16 processes when we need to fetch 2 urls
|
||||
if concurrency is not None:
|
||||
concurrency = min(concurrency, len(search_arguments))
|
||||
else:
|
||||
concurrency = min(os.cpu_count() or 1, len(search_arguments))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
# The function might have side effects in memory, that would not be reflected in the
|
||||
# parent process, if run in a child process. If this pattern happens frequently, we
|
||||
# can move this function call *after* having distributed the work to executors.
|
||||
if first_stage_function is not None:
|
||||
(url, version), search_arguments = search_arguments[0], search_arguments[1:]
|
||||
checksum, error = _fetch_and_checksum(url, fetch_options, keep_stage, first_stage_function)
|
||||
if error is not None:
|
||||
errors.append(error)
|
||||
|
||||
if checksum is not None:
|
||||
version_hashes[version] = checksum
|
||||
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=concurrency) as executor:
|
||||
results = []
|
||||
for url, version in search_arguments:
|
||||
future = executor.submit(_fetch_and_checksum, url, fetch_options, keep_stage)
|
||||
results.append((version, future))
|
||||
|
||||
for version, future in results:
|
||||
checksum, error = future.result()
|
||||
if error is not None:
|
||||
errors.append(error)
|
||||
continue
|
||||
version_hashes[version] = checksum
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for {0}".format(name))
|
||||
tty.die(f"Could not fetch any versions for {package_name}")
|
||||
|
||||
num_hash = len(version_hashes)
|
||||
tty.debug(
|
||||
"Checksummed {0} version{1} of {2}:".format(num_hash, "" if num_hash == 1 else "s", name)
|
||||
)
|
||||
tty.debug(f"Checksummed {num_hash} version{'' if num_hash == 1 else 's'} of {package_name}:")
|
||||
|
||||
return version_hashes
|
||||
|
||||
|
||||
def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
|
||||
try:
|
||||
url_or_fs = url
|
||||
if options:
|
||||
url_or_fs = fs.URLFetchStrategy(url, fetch_options=options)
|
||||
|
||||
with Stage(url_or_fs, keep=keep_stage) as stage:
|
||||
# Fetch the archive
|
||||
stage.fetch()
|
||||
if action_fn is not None:
|
||||
# Only run first_stage_function the first time,
|
||||
# no need to run it every time
|
||||
action_fn(stage, url)
|
||||
|
||||
# Checksum the archive and add it to the list
|
||||
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
|
||||
return checksum, None
|
||||
except FailedDownloadError:
|
||||
return None, f"[WORKER] Failed to fetch {url}"
|
||||
except Exception as e:
|
||||
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
|
||||
|
||||
|
||||
class StageError(spack.error.SpackError):
|
||||
""" "Superclass for all errors encountered during staging."""
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.path import Path, convert_to_platform_path
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.build_environment
|
||||
@@ -21,7 +22,6 @@
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.path import Path, convert_to_platform_path
|
||||
|
||||
|
||||
def os_pathsep_join(path, *pths):
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.installer as inst
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -59,10 +60,10 @@ def test_build_request_strings(install_mockery):
|
||||
@pytest.mark.parametrize(
|
||||
"package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes",
|
||||
[
|
||||
(False, False, ["build", "link", "run"], ["build", "link", "run"]),
|
||||
(True, False, ["link", "run"], ["build", "link", "run"]),
|
||||
(False, True, ["build", "link", "run"], ["link", "run"]),
|
||||
(True, True, ["link", "run"], ["link", "run"]),
|
||||
(False, False, dt.BUILD | dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
|
||||
(True, False, dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
|
||||
(False, True, dt.BUILD | dt.LINK | dt.RUN, dt.LINK | dt.RUN),
|
||||
(True, True, dt.LINK | dt.RUN, dt.LINK | dt.RUN),
|
||||
],
|
||||
)
|
||||
def test_build_request_deptypes(
|
||||
@@ -82,8 +83,8 @@ def test_build_request_deptypes(
|
||||
},
|
||||
)
|
||||
|
||||
actual_package_deptypes = build_request.get_deptypes(s.package)
|
||||
actual_dependency_deptypes = build_request.get_deptypes(s["dependency-install"].package)
|
||||
actual_package_deptypes = build_request.get_depflags(s.package)
|
||||
actual_dependency_deptypes = build_request.get_depflags(s["dependency-install"].package)
|
||||
|
||||
assert sorted(actual_package_deptypes) == package_deptypes
|
||||
assert sorted(actual_dependency_deptypes) == dependencies_deptypes
|
||||
assert actual_package_deptypes == package_deptypes
|
||||
assert actual_dependency_deptypes == dependencies_deptypes
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
_bootstrap = spack.main.SpackCommand("bootstrap")
|
||||
|
||||
|
||||
@@ -120,8 +120,9 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
|
||||
"names,tags,exclude,expected",
|
||||
[
|
||||
# find --all
|
||||
(None, ["detectable"], [], ["find-externals1"]),
|
||||
(None, ["detectable"], [], ["builtin.mock.find-externals1"]),
|
||||
# find --all --exclude find-externals1
|
||||
(None, ["detectable"], ["builtin.mock.find-externals1"], []),
|
||||
(None, ["detectable"], ["find-externals1"], []),
|
||||
# find cmake (and cmake is not detectable)
|
||||
(["cmake"], ["detectable"], [], []),
|
||||
@@ -202,19 +203,6 @@ def fail():
|
||||
assert "Skipping manifest and continuing" in output
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
"""
|
||||
monkeypatch.setenv("PATH", "")
|
||||
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
|
||||
external("find")
|
||||
specs = spack.store.STORE.db.query("hwloc")
|
||||
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
|
||||
|
||||
|
||||
def test_find_external_merge(mutable_config, mutable_mock_repo):
|
||||
"""Check that 'spack find external' doesn't overwrite an existing spec
|
||||
entry in packages.yaml.
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
from spack.util.web import FetchError
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("config", "mutable_mock_repo")
|
||||
|
||||
@@ -208,7 +207,7 @@ def test_env_aware_spec(mutable_mock_env_path):
|
||||
[
|
||||
("develop-branch-version", "f3c7206350ac8ee364af687deaae5c574dcfca2c=develop", None),
|
||||
("develop-branch-version", "git." + "a" * 40 + "=develop", None),
|
||||
("callpath", "f3c7206350ac8ee364af687deaae5c574dcfca2c=1.0", FetchError),
|
||||
("callpath", "f3c7206350ac8ee364af687deaae5c574dcfca2c=1.0", spack.error.FetchError),
|
||||
("develop-branch-version", "git.foo=0.2.15", None),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
@@ -235,13 +236,13 @@ def test_concretize_mention_build_dep(self):
|
||||
# Check parent's perspective of child
|
||||
to_dependencies = spec.edges_to_dependencies(name="cmake")
|
||||
assert len(to_dependencies) == 1
|
||||
assert set(to_dependencies[0].deptypes) == set(["build"])
|
||||
assert to_dependencies[0].depflag == dt.BUILD
|
||||
|
||||
# Check child's perspective of parent
|
||||
cmake = spec["cmake"]
|
||||
from_dependents = cmake.edges_from_dependents(name="cmake-client")
|
||||
assert len(from_dependents) == 1
|
||||
assert set(from_dependents[0].deptypes) == set(["build"])
|
||||
assert from_dependents[0].depflag == dt.BUILD
|
||||
|
||||
def test_concretize_preferred_version(self):
|
||||
spec = check_concretize("python")
|
||||
@@ -1355,15 +1356,15 @@ def test_multivalued_variants_from_cli(self, spec_str, expected_dict):
|
||||
[
|
||||
# Version 1.1.0 is deprecated and should not be selected, unless we
|
||||
# explicitly asked for that
|
||||
("deprecated-versions", ["deprecated-versions@1.0.0"]),
|
||||
("deprecated-versions@1.1.0", ["deprecated-versions@1.1.0"]),
|
||||
("deprecated-versions", "deprecated-versions@1.0.0"),
|
||||
("deprecated-versions@=1.1.0", "deprecated-versions@1.1.0"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_deprecated_versions_not_selected(self, spec_str, expected):
|
||||
s = Spec(spec_str).concretized()
|
||||
for abstract_spec in expected:
|
||||
assert abstract_spec in s
|
||||
with spack.config.override("config:deprecated", True):
|
||||
s = Spec(spec_str).concretized()
|
||||
s.satisfies(expected)
|
||||
|
||||
@pytest.mark.regression("24196")
|
||||
def test_version_badness_more_important_than_default_mv_variants(self):
|
||||
|
||||
@@ -543,21 +543,37 @@ def test_reuse_oneof(concretize_scope, create_test_repo, mutable_database, fake_
|
||||
assert not s2.satisfies("@2.5 %gcc")
|
||||
|
||||
|
||||
def test_requirements_are_higher_priority_than_deprecation(concretize_scope, test_repo):
|
||||
"""Test that users can override a deprecated version with a requirement."""
|
||||
# @2.3 is a deprecated versions. Ensure that any_of picks both constraints,
|
||||
@pytest.mark.parametrize(
|
||||
"allow_deprecated,expected,not_expected",
|
||||
[(True, ["@=2.3", "%gcc"], []), (False, ["%gcc"], ["@=2.3"])],
|
||||
)
|
||||
def test_requirements_and_deprecated_versions(
|
||||
allow_deprecated, expected, not_expected, concretize_scope, test_repo
|
||||
):
|
||||
"""Tests the expected behavior of requirements and deprecated versions.
|
||||
|
||||
If deprecated versions are not allowed, concretization should just pick
|
||||
the other requirement.
|
||||
|
||||
If deprecated versions are allowed, both requirements are honored.
|
||||
"""
|
||||
# 2.3 is a deprecated versions. Ensure that any_of picks both constraints,
|
||||
# since they are possible
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
require:
|
||||
- any_of: ["@2.3", "%gcc"]
|
||||
- any_of: ["@=2.3", "%gcc"]
|
||||
"""
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("y").concretized()
|
||||
assert s1.satisfies("@2.3")
|
||||
assert s1.satisfies("%gcc")
|
||||
with spack.config.override("config:deprecated", allow_deprecated):
|
||||
s1 = Spec("y").concretized()
|
||||
for constrain in expected:
|
||||
assert s1.satisfies(constrain)
|
||||
|
||||
for constrain in not_expected:
|
||||
assert not s1.satisfies(constrain)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("spec_str,requirement_str", [("x", "%gcc"), ("x", "%clang")])
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
import spack.database
|
||||
import spack.directory_layout
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.package_prefs
|
||||
import spack.paths
|
||||
@@ -52,7 +53,6 @@
|
||||
import spack.util.url as url_util
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.util.pattern import Bunch
|
||||
from spack.util.web import FetchError
|
||||
|
||||
|
||||
def ensure_configuration_fixture_run_before(request):
|
||||
@@ -472,7 +472,7 @@ def fetcher(self, target_path, digest, **kwargs):
|
||||
|
||||
class MockCacheFetcher:
|
||||
def fetch(self):
|
||||
raise FetchError("Mock cache always fails for tests")
|
||||
raise spack.error.FetchError("Mock cache always fails for tests")
|
||||
|
||||
def __str__(self):
|
||||
return "[mock fetch cache]"
|
||||
@@ -1714,17 +1714,6 @@ def brand_new_binary_cache():
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def directory_with_manifest(tmpdir):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'."""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(spack.test.cray_manifest.create_manifest_content(), db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def noncyclical_dir_structure(tmpdir):
|
||||
"""
|
||||
|
||||
@@ -23,53 +23,6 @@
|
||||
import spack.store
|
||||
from spack.cray_manifest import compiler_from_entry, entries_to_specs
|
||||
|
||||
example_x_json_str = """\
|
||||
{
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "linux",
|
||||
"platform_os": "centos8",
|
||||
"target": {
|
||||
"name": "haswell"
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0.cray"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
"hash": "hash-of-y",
|
||||
"type": ["link"]
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"precision": ["double", "float"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
example_compiler_entry = """\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class JsonSpecEntry:
|
||||
def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters):
|
||||
@@ -104,16 +57,19 @@ def __init__(self, platform, os, target):
|
||||
self.os = os
|
||||
self.target = target
|
||||
|
||||
def to_dict(self):
|
||||
def spec_json(self):
|
||||
return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}}
|
||||
|
||||
def compiler_json(self):
|
||||
return {"os": self.os, "target": self.target}
|
||||
|
||||
|
||||
class JsonCompilerEntry:
|
||||
def __init__(self, name, version, arch=None, executables=None):
|
||||
self.name = name
|
||||
self.version = version
|
||||
if not arch:
|
||||
arch = {"os": "centos8", "target": "x86_64"}
|
||||
arch = JsonArchEntry("anyplatform", "anyos", "anytarget")
|
||||
if not executables:
|
||||
executables = {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
@@ -127,7 +83,7 @@ def compiler_json(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"version": self.version,
|
||||
"arch": self.arch,
|
||||
"arch": self.arch.compiler_json(),
|
||||
"executables": self.executables,
|
||||
}
|
||||
|
||||
@@ -138,22 +94,58 @@ def spec_json(self):
|
||||
return {"name": self.name, "version": self.version}
|
||||
|
||||
|
||||
_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict()
|
||||
|
||||
# Intended to match example_compiler_entry above
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name="gcc",
|
||||
version="10.2.0.cray",
|
||||
arch={"os": "centos8", "target": "x86_64"},
|
||||
executables={
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc",
|
||||
},
|
||||
)
|
||||
@pytest.fixture
|
||||
def _common_arch(test_platform):
|
||||
return JsonArchEntry(
|
||||
platform=test_platform.name,
|
||||
os=test_platform.front_os,
|
||||
target=test_platform.target("fe").name,
|
||||
)
|
||||
|
||||
|
||||
def test_compatibility():
|
||||
@pytest.fixture
|
||||
def _common_compiler(_common_arch):
|
||||
return JsonCompilerEntry(
|
||||
name="gcc",
|
||||
version="10.2.0.2112",
|
||||
arch=_common_arch,
|
||||
executables={
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _other_compiler(_common_arch):
|
||||
return JsonCompilerEntry(
|
||||
name="clang",
|
||||
version="3.0.0",
|
||||
arch=_common_arch,
|
||||
executables={
|
||||
"cc": "/path/to/compiler/clang",
|
||||
"cxx": "/path/to/compiler/clang++",
|
||||
"fc": "/path/to/compiler/flang",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _raw_json_x(_common_arch):
|
||||
return {
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": _common_arch.spec_json(),
|
||||
"compiler": {"name": "gcc", "version": "10.2.0.2112"},
|
||||
"dependencies": {"packagey": {"hash": "hash-of-y", "type": ["link"]}},
|
||||
"parameters": {"precision": ["double", "float"]},
|
||||
}
|
||||
|
||||
|
||||
def test_manifest_compatibility(_common_arch, _common_compiler, _raw_json_x):
|
||||
"""Make sure that JsonSpecEntry outputs the expected JSON structure
|
||||
by comparing it with JSON parsed from an example string. This
|
||||
ensures that the testing objects like JsonSpecEntry produce the
|
||||
@@ -164,7 +156,7 @@ def test_compatibility():
|
||||
hash="hash-of-y",
|
||||
prefix="/path/to/packagey-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@@ -175,23 +167,44 @@ def test_compatibility():
|
||||
hash="hash-of-x",
|
||||
prefix="/path/to/packagex-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([y.as_dependency(deptypes=["link"])]),
|
||||
parameters={"precision": ["double", "float"]},
|
||||
)
|
||||
|
||||
x_from_entry = x.to_dict()
|
||||
x_from_str = json.loads(example_x_json_str)
|
||||
assert x_from_entry == x_from_str
|
||||
assert x_from_entry == _raw_json_x
|
||||
|
||||
|
||||
def test_compiler_from_entry():
|
||||
compiler_data = json.loads(example_compiler_entry)
|
||||
compiler_from_entry(compiler_data)
|
||||
compiler_data = json.loads(
|
||||
"""\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
)
|
||||
compiler = compiler_from_entry(compiler_data, "/example/file")
|
||||
assert compiler.cc == "/path/to/compiler/cc"
|
||||
assert compiler.cxx == "/path/to/compiler/cxx"
|
||||
assert compiler.fc == "/path/to/compiler/fc"
|
||||
assert compiler.operating_system == "centos8"
|
||||
|
||||
|
||||
def generate_openmpi_entries():
|
||||
@pytest.fixture
|
||||
def generate_openmpi_entries(_common_arch, _common_compiler):
|
||||
"""Generate two example JSON entries that refer to an OpenMPI
|
||||
installation and a hwloc dependency.
|
||||
"""
|
||||
@@ -202,7 +215,7 @@ def generate_openmpi_entries():
|
||||
hash="hwlocfakehashaaa",
|
||||
prefix="/path/to/hwloc-install/",
|
||||
version="2.0.3",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@@ -216,26 +229,25 @@ def generate_openmpi_entries():
|
||||
hash="openmpifakehasha",
|
||||
prefix="/path/to/openmpi-install/",
|
||||
version="4.1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([hwloc.as_dependency(deptypes=["link"])]),
|
||||
parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True},
|
||||
)
|
||||
|
||||
return [openmpi, hwloc]
|
||||
return list(x.to_dict() for x in [openmpi, hwloc])
|
||||
|
||||
|
||||
def test_generate_specs_from_manifest():
|
||||
def test_generate_specs_from_manifest(generate_openmpi_entries):
|
||||
"""Given JSON entries, check that we can form a set of Specs
|
||||
including dependency references.
|
||||
"""
|
||||
entries = list(x.to_dict() for x in generate_openmpi_entries())
|
||||
specs = entries_to_specs(entries)
|
||||
specs = entries_to_specs(generate_openmpi_entries)
|
||||
(openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi")
|
||||
assert openmpi_spec["hwloc"]
|
||||
|
||||
|
||||
def test_translate_cray_platform_to_linux(monkeypatch):
|
||||
def test_translate_cray_platform_to_linux(monkeypatch, _common_compiler):
|
||||
"""Manifests might list specs on newer Cray platforms as being "cray",
|
||||
but Spack identifies such platforms as "linux". Make sure we
|
||||
automaticaly transform these entries.
|
||||
@@ -247,13 +259,13 @@ def the_host_is_linux():
|
||||
|
||||
monkeypatch.setattr(spack.platforms, "host", the_host_is_linux)
|
||||
|
||||
cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64").to_dict()
|
||||
cray_arch = JsonArchEntry(platform="cray", os="rhel8", target="x86_64")
|
||||
spec_json = JsonSpecEntry(
|
||||
name="cray-mpich",
|
||||
hash="craympichfakehashaaa",
|
||||
prefix="/path/to/cray-mpich/",
|
||||
version="1.0.0",
|
||||
arch=cray_arch,
|
||||
arch=cray_arch.spec_json(),
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@@ -263,14 +275,15 @@ def the_host_is_linux():
|
||||
assert spec.architecture.platform == "linux"
|
||||
|
||||
|
||||
def test_translate_compiler_name():
|
||||
def test_translate_compiler_name(_common_arch):
|
||||
nvidia_compiler = JsonCompilerEntry(
|
||||
name="nvidia",
|
||||
version="19.1",
|
||||
arch=_common_arch,
|
||||
executables={"cc": "/path/to/compiler/nvc", "cxx": "/path/to/compiler/nvc++"},
|
||||
)
|
||||
|
||||
compiler = compiler_from_entry(nvidia_compiler.compiler_json())
|
||||
compiler = compiler_from_entry(nvidia_compiler.compiler_json(), "/example/file")
|
||||
assert compiler.name == "nvhpc"
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
@@ -278,7 +291,7 @@ def test_translate_compiler_name():
|
||||
hash="hwlocfakehashaaa",
|
||||
prefix="/path/to/hwloc-install/",
|
||||
version="2.0.3",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=nvidia_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@@ -288,18 +301,18 @@ def test_translate_compiler_name():
|
||||
assert spec.compiler.name == "nvhpc"
|
||||
|
||||
|
||||
def test_failed_translate_compiler_name():
|
||||
def test_failed_translate_compiler_name(_common_arch):
|
||||
unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0")
|
||||
|
||||
with pytest.raises(spack.compilers.UnknownCompilerError):
|
||||
compiler_from_entry(unknown_compiler.compiler_json())
|
||||
compiler_from_entry(unknown_compiler.compiler_json(), "/example/file")
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
name="packagey",
|
||||
hash="hash-of-y",
|
||||
prefix="/path/to/packagey-install/",
|
||||
version="1.0",
|
||||
arch=_common_arch,
|
||||
arch=_common_arch.spec_json(),
|
||||
compiler=unknown_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={},
|
||||
@@ -309,7 +322,8 @@ def test_failed_translate_compiler_name():
|
||||
entries_to_specs([spec_json])
|
||||
|
||||
|
||||
def create_manifest_content():
|
||||
@pytest.fixture
|
||||
def manifest_content(generate_openmpi_entries, _common_compiler, _other_compiler):
|
||||
return {
|
||||
# Note: the cray_manifest module doesn't use the _meta section right
|
||||
# now, but it is anticipated to be useful
|
||||
@@ -319,43 +333,70 @@ def create_manifest_content():
|
||||
"schema-version": "1.3",
|
||||
"cpe-version": "22.06",
|
||||
},
|
||||
"specs": list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
"compilers": [_common_compiler.compiler_json()],
|
||||
"specs": generate_openmpi_entries,
|
||||
"compilers": [_common_compiler.compiler_json(), _other_compiler.compiler_json()],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.only_original(
|
||||
"The ASP-based concretizer is currently picky about OS matching and will fail."
|
||||
)
|
||||
def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
def test_read_cray_manifest(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
|
||||
):
|
||||
"""Check that (a) we can read the cray manifest and add it to the Spack
|
||||
Database and (b) we can concretize specs based on that.
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
|
||||
|
||||
concretized_specs = spack.cmd.parse_specs(
|
||||
"depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(),
|
||||
concretize=True,
|
||||
"depends-on-openmpi ^/openmpifakehasha".split(), concretize=True
|
||||
)
|
||||
assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa"
|
||||
|
||||
|
||||
@pytest.mark.only_original(
|
||||
"The ASP-based concretizer is currently picky about OS matching and will fail."
|
||||
)
|
||||
def test_read_cray_manifest_add_compiler_failure(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content, monkeypatch
|
||||
):
|
||||
"""Check that cray manifest can be read even if some compilers cannot
|
||||
be added.
|
||||
"""
|
||||
orig_add_compilers_to_config = spack.compilers.add_compilers_to_config
|
||||
|
||||
class fail_for_clang:
|
||||
def __init__(self):
|
||||
self.called_with_clang = False
|
||||
|
||||
def __call__(self, compilers, **kwargs):
|
||||
if any(x.name == "clang" for x in compilers):
|
||||
self.called_with_clang = True
|
||||
raise Exception()
|
||||
return orig_add_compilers_to_config(compilers, **kwargs)
|
||||
|
||||
checker = fail_for_clang()
|
||||
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", checker)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.STORE.db.query("openmpi")
|
||||
assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
|
||||
|
||||
assert checker.called_with_clang
|
||||
|
||||
|
||||
def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database
|
||||
tmpdir, mutable_config, mock_packages, mutable_database, manifest_content
|
||||
):
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
# Read the manifest twice
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
@@ -363,7 +404,7 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
|
||||
|
||||
compilers = spack.compilers.all_compilers()
|
||||
filtered = list(
|
||||
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.cray")
|
||||
c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@=10.2.0.2112")
|
||||
)
|
||||
assert len(filtered) == 1
|
||||
|
||||
@@ -423,3 +464,27 @@ def test_convert_validation_error(tmpdir, mutable_config, mock_packages, mutable
|
||||
with pytest.raises(cray_manifest.ManifestValidationError) as e:
|
||||
cray_manifest.read(invalid_schema_path, True)
|
||||
str(e)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def directory_with_manifest(tmpdir, manifest_content):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'."""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = "external-db.json"
|
||||
with open(test_db_fname, "w") as db_file:
|
||||
json.dump(manifest_content, db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
|
||||
):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
"""
|
||||
monkeypatch.setenv("PATH", "")
|
||||
monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
|
||||
spack.cmd.external._collect_and_consume_cray_manifest_files(ignore_default_dir=False)
|
||||
specs = spack.store.STORE.db.query("hwloc")
|
||||
assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
|
||||
|
||||
@@ -12,11 +12,12 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.path import path_to_os_path
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
from spack.directory_layout import DirectoryLayout, InvalidDirectoryLayoutParametersError
|
||||
from spack.spec import Spec
|
||||
from spack.util.path import path_to_os_path
|
||||
|
||||
# number of packages to test (to reduce test time)
|
||||
max_packages = 10
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.stage
|
||||
from spack.util.web import FetchError
|
||||
|
||||
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
@@ -33,7 +33,7 @@ def test_gcsfetchstrategy_bad_url(tmpdir, _fetch_method):
|
||||
with spack.stage.Stage(fetcher, path=testpath) as stage:
|
||||
assert stage is not None
|
||||
assert fetcher.archive_file is None
|
||||
with pytest.raises(FetchError):
|
||||
with pytest.raises(spack.error.FetchError):
|
||||
fetcher.fetch()
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
import spack.installer as inst
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
@@ -1388,6 +1389,26 @@ def test_single_external_implicit_install(install_mockery, explicit_args, is_exp
|
||||
assert spack.store.STORE.db.get_record(pkg).explicit == is_explicit
|
||||
|
||||
|
||||
def test_overwrite_install_does_install_build_deps(install_mockery, mock_fetch):
|
||||
"""When overwrite installing something from sources, build deps should be installed."""
|
||||
s = spack.spec.Spec("dtrun3").concretized()
|
||||
create_installer([(s, {})]).install()
|
||||
|
||||
# Verify there is a pure build dep
|
||||
edge = s.edges_to_dependencies(name="dtbuild3").pop()
|
||||
assert edge.depflag == dt.BUILD
|
||||
build_dep = edge.spec
|
||||
|
||||
# Uninstall the build dep
|
||||
build_dep.package.do_uninstall()
|
||||
|
||||
# Overwrite install the root dtrun3
|
||||
create_installer([(s, {"overwrite": [s.dag_hash()]})]).install()
|
||||
|
||||
# Verify that the build dep was also installed.
|
||||
assert build_dep.installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("run_tests", [True, False])
|
||||
def test_print_install_test_log_skipped(install_mockery, mock_packages, capfd, run_tests):
|
||||
"""Confirm printing of install log skipped if not run/no failures."""
|
||||
|
||||
43
lib/spack/spack/test/llnl/llnl_string.py
Normal file
43
lib/spack/spack/test/llnl/llnl_string.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import llnl.string
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,expected",
|
||||
[
|
||||
((0, "thing"), "0 things"),
|
||||
((1, "thing"), "1 thing"),
|
||||
((2, "thing"), "2 things"),
|
||||
((1, "thing", "wombats"), "1 thing"),
|
||||
((2, "thing", "wombats"), "2 wombats"),
|
||||
((2, "thing", "wombats", False), "wombats"),
|
||||
],
|
||||
)
|
||||
def test_plural(arguments, expected):
|
||||
assert llnl.string.plural(*arguments) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,expected",
|
||||
[((["one", "two"],), ["'one'", "'two'"]), ((["one", "two"], "^"), ["^one^", "^two^"])],
|
||||
)
|
||||
def test_quote(arguments, expected):
|
||||
assert llnl.string.quote(*arguments) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input,expected_and,expected_or",
|
||||
[
|
||||
(["foo"], "foo", "foo"),
|
||||
(["foo", "bar"], "foo and bar", "foo or bar"),
|
||||
(["foo", "bar", "baz"], "foo, bar, and baz", "foo, bar, or baz"),
|
||||
],
|
||||
)
|
||||
def test_comma_and_or(input, expected_and, expected_or):
|
||||
assert llnl.string.comma_and(input) == expected_and
|
||||
assert llnl.string.comma_or(input) == expected_or
|
||||
167
lib/spack/spack/test/llnl/url.py
Normal file
167
lib/spack/spack/test/llnl/url.py
Normal file
@@ -0,0 +1,167 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Tests for llnl.url functions"""
|
||||
import itertools
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.url
|
||||
|
||||
|
||||
@pytest.fixture(params=llnl.url.ALLOWED_ARCHIVE_TYPES)
|
||||
def archive_and_expected(request):
|
||||
archive_name = ".".join(["Foo", request.param])
|
||||
return archive_name, request.param
|
||||
|
||||
|
||||
def test_get_extension(archive_and_expected):
|
||||
"""Tests that we can predict correctly known extensions for simple cases."""
|
||||
archive, expected = archive_and_expected
|
||||
result = llnl.url.extension_from_path(archive)
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_bad_extension():
|
||||
"""Tests that a bad extension returns None"""
|
||||
result = llnl.url.extension_from_path("Foo.cxx")
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url,expected",
|
||||
[
|
||||
# No suffix
|
||||
("rgb-1.0.6", "rgb-1.0.6"),
|
||||
# Misleading prefix
|
||||
("jpegsrc.v9b", "jpegsrc.v9b"),
|
||||
("turbolinux702", "turbolinux702"),
|
||||
("converge_install_2.3.16", "converge_install_2.3.16"),
|
||||
# Download type - code, source
|
||||
("cistem-1.0.0-beta-source-code", "cistem-1.0.0-beta"),
|
||||
# Download type - src
|
||||
("apache-ant-1.9.7-src", "apache-ant-1.9.7"),
|
||||
("go1.7.4.src", "go1.7.4"),
|
||||
# Download type - source
|
||||
("bowtie2-2.2.5-source", "bowtie2-2.2.5"),
|
||||
("grib_api-1.17.0-Source", "grib_api-1.17.0"),
|
||||
# Download type - full
|
||||
("julia-0.4.3-full", "julia-0.4.3"),
|
||||
# Download type - bin
|
||||
("apache-maven-3.3.9-bin", "apache-maven-3.3.9"),
|
||||
# Download type - binary
|
||||
("Jmol-14.8.0-binary", "Jmol-14.8.0"),
|
||||
# Download type - gem
|
||||
("rubysl-date-2.0.9.gem", "rubysl-date-2.0.9"),
|
||||
# Download type - tar
|
||||
("gromacs-4.6.1-tar", "gromacs-4.6.1"),
|
||||
# Download type - sh
|
||||
("Miniconda2-4.3.11-Linux-x86_64.sh", "Miniconda2-4.3.11"),
|
||||
# Download version - release
|
||||
("v1.0.4-release", "v1.0.4"),
|
||||
# Download version - stable
|
||||
("libevent-2.0.21-stable", "libevent-2.0.21"),
|
||||
# Download version - final
|
||||
("2.6.7-final", "2.6.7"),
|
||||
# Download version - rel
|
||||
("v1.9.5.1rel", "v1.9.5.1"),
|
||||
# Download version - orig
|
||||
("dash_0.5.5.1.orig", "dash_0.5.5.1"),
|
||||
# Download version - plus
|
||||
("ncbi-blast-2.6.0+-src", "ncbi-blast-2.6.0"),
|
||||
# License
|
||||
("cppad-20170114.gpl", "cppad-20170114"),
|
||||
# Arch
|
||||
("pcraster-4.1.0_x86-64", "pcraster-4.1.0"),
|
||||
("dislin-11.0.linux.i586_64", "dislin-11.0"),
|
||||
("PAGIT.V1.01.64bit", "PAGIT.V1.01"),
|
||||
# OS - linux
|
||||
("astyle_2.04_linux", "astyle_2.04"),
|
||||
# OS - unix
|
||||
("install-tl-unx", "install-tl"),
|
||||
# OS - macos
|
||||
("astyle_1.23_macosx", "astyle_1.23"),
|
||||
("haxe-2.08-osx", "haxe-2.08"),
|
||||
# PyPI - wheel
|
||||
("entrypoints-0.2.2-py2.py3-none-any.whl", "entrypoints-0.2.2"),
|
||||
(
|
||||
"numpy-1.12.0-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel."
|
||||
"macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
|
||||
"numpy-1.12.0",
|
||||
),
|
||||
# PyPI - exe
|
||||
("PyYAML-3.12.win-amd64-py3.5.exe", "PyYAML-3.12"),
|
||||
# Combinations of multiple patterns - bin, release
|
||||
("rocketmq-all-4.5.2-bin-release", "rocketmq-all-4.5.2"),
|
||||
# Combinations of multiple patterns - all
|
||||
("p7zip_9.04_src_all", "p7zip_9.04"),
|
||||
# Combinations of multiple patterns - run
|
||||
("cuda_8.0.44_linux.run", "cuda_8.0.44"),
|
||||
# Combinations of multiple patterns - file
|
||||
("ack-2.14-single-file", "ack-2.14"),
|
||||
# Combinations of multiple patterns - jar
|
||||
("antlr-3.4-complete.jar", "antlr-3.4"),
|
||||
# Combinations of multiple patterns - oss
|
||||
("tbb44_20160128oss_src_0", "tbb44_20160128"),
|
||||
# Combinations of multiple patterns - darwin
|
||||
("ghc-7.0.4-x86_64-apple-darwin", "ghc-7.0.4"),
|
||||
("ghc-7.0.4-i386-apple-darwin", "ghc-7.0.4"),
|
||||
# Combinations of multiple patterns - centos
|
||||
("sratoolkit.2.8.2-1-centos_linux64", "sratoolkit.2.8.2-1"),
|
||||
# Combinations of multiple patterns - arch
|
||||
(
|
||||
"VizGlow_v2.2alpha17-R21November2016-Linux-x86_64-Install",
|
||||
"VizGlow_v2.2alpha17-R21November2016",
|
||||
),
|
||||
("jdk-8u92-linux-x64", "jdk-8u92"),
|
||||
("cuda_6.5.14_linux_64.run", "cuda_6.5.14"),
|
||||
("Mathematica_12.0.0_LINUX.sh", "Mathematica_12.0.0"),
|
||||
("trf407b.linux64", "trf407b"),
|
||||
# Combinations of multiple patterns - with
|
||||
("mafft-7.221-with-extensions-src", "mafft-7.221"),
|
||||
("spark-2.0.0-bin-without-hadoop", "spark-2.0.0"),
|
||||
("conduit-v0.3.0-src-with-blt", "conduit-v0.3.0"),
|
||||
# Combinations of multiple patterns - rock
|
||||
("bitlib-23-2.src.rock", "bitlib-23-2"),
|
||||
# Combinations of multiple patterns - public
|
||||
("dakota-6.3-public.src", "dakota-6.3"),
|
||||
# Combinations of multiple patterns - universal
|
||||
("synergy-1.3.6p2-MacOSX-Universal", "synergy-1.3.6p2"),
|
||||
# Combinations of multiple patterns - dynamic
|
||||
("snptest_v2.5.2_linux_x86_64_dynamic", "snptest_v2.5.2"),
|
||||
# Combinations of multiple patterns - other
|
||||
("alglib-3.11.0.cpp.gpl", "alglib-3.11.0"),
|
||||
("hpcviewer-2019.08-linux.gtk.x86_64", "hpcviewer-2019.08"),
|
||||
("apache-mxnet-src-1.3.0-incubating", "apache-mxnet-src-1.3.0"),
|
||||
],
|
||||
)
|
||||
def test_url_strip_version_suffixes(url, expected):
|
||||
stripped = llnl.url.strip_version_suffixes(url)
|
||||
assert stripped == expected
|
||||
|
||||
|
||||
def test_strip_compression_extension(archive_and_expected):
|
||||
archive, extension = archive_and_expected
|
||||
stripped = llnl.url.strip_compression_extension(archive)
|
||||
if extension == "zip":
|
||||
assert stripped == "Foo.zip"
|
||||
stripped = llnl.url.strip_compression_extension(archive, "zip")
|
||||
assert stripped == "Foo"
|
||||
elif (
|
||||
extension.lower() == "tar"
|
||||
or extension in llnl.url.CONTRACTION_MAP
|
||||
or extension
|
||||
in [
|
||||
".".join(ext)
|
||||
for ext in itertools.product(llnl.url.PREFIX_EXTENSIONS, llnl.url.EXTENSIONS)
|
||||
]
|
||||
):
|
||||
assert stripped == "Foo.tar" or stripped == "Foo.TAR"
|
||||
else:
|
||||
assert stripped == "Foo"
|
||||
|
||||
|
||||
def test_allowed_archive(archive_and_expected):
|
||||
archive, _ = archive_and_expected
|
||||
assert llnl.url.allowed_archive(archive)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user