Compare commits
188 Commits
hs/fix/for
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ceba90bde2 | ||
|
|
c0bdc37226 | ||
|
|
8bad9fb804 | ||
|
|
2df7cc0087 | ||
|
|
40d40ccc52 | ||
|
|
afe7d6c39e | ||
|
|
113733d9fb | ||
|
|
a8e2da5bb8 | ||
|
|
97750189b6 | ||
|
|
bcd40835a0 | ||
|
|
2c3f2c5733 | ||
|
|
302d74394b | ||
|
|
cf94dc7823 | ||
|
|
4411ee3382 | ||
|
|
f790ce0f72 | ||
|
|
64d53037db | ||
|
|
4aef50739b | ||
|
|
a6e966f6f2 | ||
|
|
1f428c4188 | ||
|
|
731e48b1bd | ||
|
|
74ff9ad821 | ||
|
|
16a4eff689 | ||
|
|
d0b0d8db50 | ||
|
|
54f591cce5 | ||
|
|
8677bb4d43 | ||
|
|
b66b80a96a | ||
|
|
10e21f399c | ||
|
|
56892f6140 | ||
|
|
7eddc4b1f8 | ||
|
|
3c7392bbcc | ||
|
|
bb0517f4d9 | ||
|
|
c8994ee50f | ||
|
|
4b2f5638f2 | ||
|
|
31312a379f | ||
|
|
b0d5f272b0 | ||
|
|
1c93fef160 | ||
|
|
8bb5f4faf4 | ||
|
|
f76ab5f72f | ||
|
|
49c831edc3 | ||
|
|
c943c8c1d2 | ||
|
|
e0e6f29584 | ||
|
|
72bc3bb803 | ||
|
|
dba8fe2b96 | ||
|
|
4487598d60 | ||
|
|
495537cf56 | ||
|
|
22c3b4099f | ||
|
|
13978d11a0 | ||
|
|
a22114b20b | ||
|
|
c10624390f | ||
|
|
fb3d9de80b | ||
|
|
fbb688af07 | ||
|
|
d34b709425 | ||
|
|
cb0b188cf6 | ||
|
|
9a2b0aca66 | ||
|
|
89a8ab3233 | ||
|
|
5d87166c07 | ||
|
|
15c989b3fe | ||
|
|
b7f556e4b4 | ||
|
|
36f32ceda3 | ||
|
|
01d77ed915 | ||
|
|
0049f8332d | ||
|
|
39c10c3116 | ||
|
|
71d1901831 | ||
|
|
41e0863b86 | ||
|
|
a75d83f65c | ||
|
|
f2f13964fb | ||
|
|
9b032018d6 | ||
|
|
7d470c05be | ||
|
|
664fe9e9e6 | ||
|
|
2745a519e2 | ||
|
|
4348ee1c75 | ||
|
|
8e39fb1e54 | ||
|
|
09458312a3 | ||
|
|
5fd0693df4 | ||
|
|
f58684429d | ||
|
|
409611a479 | ||
|
|
dd98cfb839 | ||
|
|
5c91667dab | ||
|
|
9efd6f3f11 | ||
|
|
a8f5289801 | ||
|
|
ac635aa777 | ||
|
|
45dcddf9c3 | ||
|
|
f1660722e7 | ||
|
|
04b44d841c | ||
|
|
7f30502297 | ||
|
|
61b1586c51 | ||
|
|
8579efcadf | ||
|
|
1c3e2b5425 | ||
|
|
011ef0aaaf | ||
|
|
9642f3f49a | ||
|
|
a6c9b55fad | ||
|
|
608ed967e1 | ||
|
|
742eaa32b7 | ||
|
|
763b35a2e0 | ||
|
|
12280f864c | ||
|
|
253ba05732 | ||
|
|
195b869e1c | ||
|
|
393961ffd6 | ||
|
|
392a58e9be | ||
|
|
0e8e97a811 | ||
|
|
43a0cbe7a2 | ||
|
|
bb35a98079 | ||
|
|
fa7e0e8230 | ||
|
|
2c128751f5 | ||
|
|
fb0493a366 | ||
|
|
6d1b6e7087 | ||
|
|
759518182c | ||
|
|
7ebabfcf0e | ||
|
|
6203ae31d2 | ||
|
|
6b13017ded | ||
|
|
2c51b5853f | ||
|
|
d0cbd056a8 | ||
|
|
e1b579a8b4 | ||
|
|
b02dcf697d | ||
|
|
6e046b04c7 | ||
|
|
d196795437 | ||
|
|
0d444fb4e7 | ||
|
|
467e631260 | ||
|
|
f21de698f7 | ||
|
|
59532986be | ||
|
|
36fd547b40 | ||
|
|
b5f9dea6d0 | ||
|
|
5904834295 | ||
|
|
2da8a1d1e3 | ||
|
|
d50eba40d9 | ||
|
|
8d3a733b77 | ||
|
|
dfa86dce08 | ||
|
|
3d82e5c573 | ||
|
|
a77f903f4d | ||
|
|
92260b179d | ||
|
|
196c912b8a | ||
|
|
0f54995e53 | ||
|
|
9d1332f1a1 | ||
|
|
40a1da4a73 | ||
|
|
82e091e2c2 | ||
|
|
c86112b0e8 | ||
|
|
bb25c04845 | ||
|
|
d69d26d9ce | ||
|
|
06d660b9ba | ||
|
|
40b3196412 | ||
|
|
7e893da4a6 | ||
|
|
13aa8b6867 | ||
|
|
b0afb619de | ||
|
|
7a82c703c7 | ||
|
|
0d3667175a | ||
|
|
a754341f6c | ||
|
|
a50c45f00c | ||
|
|
87e65e5377 | ||
|
|
50fe96aaf6 | ||
|
|
56495a8cd8 | ||
|
|
c054cb818d | ||
|
|
bc28ec35d1 | ||
|
|
e47a6059a7 | ||
|
|
0d170b9ef3 | ||
|
|
5174cb9180 | ||
|
|
22ba366e85 | ||
|
|
13558269b5 | ||
|
|
615b7a6ddb | ||
|
|
0415b21d3d | ||
|
|
053c9d2846 | ||
|
|
1e763629f6 | ||
|
|
7568687f1e | ||
|
|
3b81c0e6b7 | ||
|
|
c764400338 | ||
|
|
4e8a6eec1a | ||
|
|
ebc9f03dda | ||
|
|
8ac0bd2825 | ||
|
|
cc9e0137df | ||
|
|
b8e448afa0 | ||
|
|
209d670bf3 | ||
|
|
c6202842ed | ||
|
|
b2a75db030 | ||
|
|
0ec00a9c9a | ||
|
|
5e3020ad02 | ||
|
|
a0d0e6321f | ||
|
|
0afac0beaa | ||
|
|
6155be8548 | ||
|
|
611cb98b02 | ||
|
|
ea5742853f | ||
|
|
25a3e8ba59 | ||
|
|
7fbb3df6b0 | ||
|
|
a728db95de | ||
|
|
7bc4069b9e | ||
|
|
51fc195d14 | ||
|
|
27a0593104 | ||
|
|
f95e27a159 | ||
|
|
effe433c96 | ||
|
|
21988fbb18 |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -81,6 +81,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
|
||||||
|
import-check:
|
||||||
|
needs: [ changes ]
|
||||||
|
uses: ./.github/workflows/import-check.yaml
|
||||||
|
|
||||||
all-prechecks:
|
all-prechecks:
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
|
|||||||
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,3 +33,4 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: false
|
fail_ci_if_error: false
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
49
.github/workflows/import-check.yaml
vendored
Normal file
49
.github/workflows/import-check.yaml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: import-check
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Check we don't make the situation with circular imports worse
|
||||||
|
import-check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: julia-actions/setup-julia@v2
|
||||||
|
with:
|
||||||
|
version: '1.10'
|
||||||
|
- uses: julia-actions/cache@v2
|
||||||
|
|
||||||
|
# PR: use the base of the PR as the old commit
|
||||||
|
- name: Checkout PR base commit
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.base.sha }}
|
||||||
|
path: old
|
||||||
|
# not a PR: use the previous commit as the old commit
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
path: old
|
||||||
|
- name: Checkout previous commit
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
run: git -C old reset --hard HEAD^
|
||||||
|
|
||||||
|
- name: Checkout new commit
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
path: new
|
||||||
|
- name: Install circular import checker
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
repository: haampie/circular-import-fighter
|
||||||
|
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
|
||||||
|
path: circular-import-fighter
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j dependencies
|
||||||
|
- name: Circular import check
|
||||||
|
working-directory: circular-import-fighter
|
||||||
|
run: make -j compare "SPACK_ROOT=../old ../new"
|
||||||
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -86,66 +86,6 @@ jobs:
|
|||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack -d style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
# Check we don't make the situation with circular imports worse
|
|
||||||
import-check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: julia-actions/setup-julia@v2
|
|
||||||
with:
|
|
||||||
version: '1.10'
|
|
||||||
- uses: julia-actions/cache@v2
|
|
||||||
|
|
||||||
# PR: use the base of the PR as the old commit
|
|
||||||
- name: Checkout PR base commit
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.sha }}
|
|
||||||
path: old
|
|
||||||
# not a PR: use the previous commit as the old commit
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
path: old
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
run: git -C old reset --hard HEAD^
|
|
||||||
|
|
||||||
- name: Checkout new commit
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
path: new
|
|
||||||
- name: Install circular import checker
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
repository: haampie/circular-import-fighter
|
|
||||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
|
||||||
path: circular-import-fighter
|
|
||||||
- name: Install dependencies
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make -j dependencies
|
|
||||||
- name: Problematic imports before
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
|
||||||
- name: Problematic imports after
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
|
||||||
- name: Compare import cycles
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: |
|
|
||||||
edges_before="$(head -n1 solution.old)"
|
|
||||||
edges_after="$(head -n1 solution.new)"
|
|
||||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
|
||||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
|
||||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
|
||||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
|
||||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Further style checks from pylint
|
# Further style checks from pylint
|
||||||
pylint:
|
pylint:
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ exit 1
|
|||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
min_python3 = (3, 6)
|
min_python3 = (3, 6)
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ packages:
|
|||||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||||
iconv: [libiconv]
|
iconv: [libiconv]
|
||||||
ipp: [intel-oneapi-ipp]
|
ipp: [intel-oneapi-ipp]
|
||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
@@ -73,15 +73,27 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
cray-fftw:
|
||||||
|
buildable: false
|
||||||
|
cray-libsci:
|
||||||
|
buildable: false
|
||||||
cray-mpich:
|
cray-mpich:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
cray-pmi:
|
||||||
|
buildable: false
|
||||||
egl:
|
egl:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
essl:
|
||||||
|
buildable: false
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
fujitsu-ssl2:
|
||||||
|
buildable: false
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
mpt:
|
||||||
|
buildable: false
|
||||||
spectrum-mpi:
|
spectrum-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
config:
|
config:
|
||||||
locks: false
|
locks: false
|
||||||
build_stage::
|
build_stage::
|
||||||
- '$spack/.staging'
|
- '$user_cache_path/stage'
|
||||||
stage_name: '{name}-{version}-{hash:7}'
|
stage_name: '{name}-{version}-{hash:7}'
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ bootstrapping.
|
|||||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||||
|
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||||
|
|
||||||
This command needs to be run on a machine with internet access and the resulting folder
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
|
|||||||
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
|||||||
"environment variables" section lists environment variables that the
|
"environment variables" section lists environment variables that the
|
||||||
build system uses to pass flags to the compiler and linker.
|
build system uses to pass flags to the compiler and linker.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Addings flags to configure
|
Adding flags to configure
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
For most of the flags you encounter, you will want a variant to
|
For most of the flags you encounter, you will want a variant to
|
||||||
optionally enable/disable them. You can then optionally pass these
|
optionally enable/disable them. You can then optionally pass these
|
||||||
@@ -285,7 +285,7 @@ function like so:
|
|||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
args = []
|
args = []
|
||||||
|
...
|
||||||
if self.spec.satisfies("+mpi"):
|
if self.spec.satisfies("+mpi"):
|
||||||
args.append("--enable-mpi")
|
args.append("--enable-mpi")
|
||||||
else:
|
else:
|
||||||
@@ -299,7 +299,10 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
return [self.enable_or_disable("mpi")]
|
args = []
|
||||||
|
...
|
||||||
|
args.extend(self.enable_or_disable("mpi"))
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
Note that we are explicitly disabling MPI support if it is not
|
Note that we are explicitly disabling MPI support if it is not
|
||||||
@@ -344,7 +347,14 @@ typically used to enable or disable some feature within the package.
|
|||||||
default=False,
|
default=False,
|
||||||
description="Memchecker support for debugging [degrades performance]"
|
description="Memchecker support for debugging [degrades performance]"
|
||||||
)
|
)
|
||||||
config_args.extend(self.enable_or_disable("memchecker"))
|
...
|
||||||
|
|
||||||
|
def configure_args(self):
|
||||||
|
args = []
|
||||||
|
...
|
||||||
|
args.extend(self.enable_or_disable("memchecker"))
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
In this example, specifying the variant ``+memchecker`` will generate
|
In this example, specifying the variant ``+memchecker`` will generate
|
||||||
the following configuration options:
|
the following configuration options:
|
||||||
|
|||||||
@@ -361,7 +361,6 @@ and the tags associated with the class of runners to build on.
|
|||||||
* ``.linux_neoverse_n1``
|
* ``.linux_neoverse_n1``
|
||||||
* ``.linux_neoverse_v1``
|
* ``.linux_neoverse_v1``
|
||||||
* ``.linux_neoverse_v2``
|
* ``.linux_neoverse_v2``
|
||||||
* ``.linux_power``
|
|
||||||
* ``.linux_skylake``
|
* ``.linux_skylake``
|
||||||
* ``.linux_x86_64``
|
* ``.linux_x86_64``
|
||||||
* ``.linux_x86_64_v4``
|
* ``.linux_x86_64_v4``
|
||||||
|
|||||||
@@ -112,6 +112,19 @@ the original but may concretize differently in the presence of different
|
|||||||
explicit or default configuration settings (e.g., a different version of
|
explicit or default configuration settings (e.g., a different version of
|
||||||
Spack or for a different user account).
|
Spack or for a different user account).
|
||||||
|
|
||||||
|
Environments created from a manifest will copy any included configs
|
||||||
|
from relative paths inside the environment. Relative paths from
|
||||||
|
outside the environment will cause errors, and absolute paths will be
|
||||||
|
kept absolute. For example, if ``spack.yaml`` includes:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
include: [./config.yaml]
|
||||||
|
|
||||||
|
then the created environment will have its own copy of the file
|
||||||
|
``config.yaml`` copied from the location in the original environment.
|
||||||
|
|
||||||
Create an environment from a ``spack.lock`` file using:
|
Create an environment from a ``spack.lock`` file using:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -160,7 +173,7 @@ accepts. If an environment already exists then spack will simply activate it
|
|||||||
and ignore the create-specific flags.
|
and ignore the create-specific flags.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create -p myenv
|
$ spack env activate --create -p myenv
|
||||||
# ...
|
# ...
|
||||||
# [creates if myenv does not exist yet]
|
# [creates if myenv does not exist yet]
|
||||||
@@ -424,8 +437,8 @@ Developing Packages in a Spack Environment
|
|||||||
|
|
||||||
The ``spack develop`` command allows one to develop Spack packages in
|
The ``spack develop`` command allows one to develop Spack packages in
|
||||||
an environment. It requires a spec containing a concrete version, and
|
an environment. It requires a spec containing a concrete version, and
|
||||||
will configure Spack to install the package from local source.
|
will configure Spack to install the package from local source.
|
||||||
If a version is not provided from the command line interface then spack
|
If a version is not provided from the command line interface then spack
|
||||||
will automatically pick the highest version the package has defined.
|
will automatically pick the highest version the package has defined.
|
||||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||||
preferred in this selection process.
|
preferred in this selection process.
|
||||||
@@ -435,9 +448,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
|||||||
any time the environment is installed if the package's local source
|
any time the environment is installed if the package's local source
|
||||||
code has been modified. Spack's native implementation to check for modifications
|
code has been modified. Spack's native implementation to check for modifications
|
||||||
is to check if ``mtime`` is newer than the installation.
|
is to check if ``mtime`` is newer than the installation.
|
||||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||||
in your package class. This is particularly useful for projects using custom spack repo's
|
in your package class. This is particularly useful for projects using custom spack repo's
|
||||||
to drive development and want to optimize performance.
|
to drive development and want to optimize performance.
|
||||||
|
|
||||||
Spack ensures that all instances of a
|
Spack ensures that all instances of a
|
||||||
developed package in the environment are concretized to match the
|
developed package in the environment are concretized to match the
|
||||||
@@ -453,7 +466,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
|||||||
and eventually committed and pushed to the upstream git repo.
|
and eventually committed and pushed to the upstream git repo.
|
||||||
|
|
||||||
If the package being developed supports out-of-source builds then users can use the
|
If the package being developed supports out-of-source builds then users can use the
|
||||||
``--build_directory`` flag to control the location and name of the build directory.
|
``--build_directory`` flag to control the location and name of the build directory.
|
||||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||||
The supplied location will become the build-directory for that package in all future builds.
|
The supplied location will become the build-directory for that package in all future builds.
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""URL primitives that just require Python standard library."""
|
"""URL primitives that just require Python standard library."""
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Optional, Set, Tuple
|
from typing import Optional, Set, Tuple
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|||||||
@@ -668,7 +668,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -679,7 +679,7 @@ def copy(src, dest, _permissions=False):
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise OSError("No such file or directory: '{0}'".format(src))
|
||||||
if len(files) > 1 and not os.path.isdir(dest):
|
if len(files) > 1 and not os.path.isdir(dest):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||||
@@ -710,7 +710,7 @@ def install(src, dest):
|
|||||||
dest (str): the destination file or directory
|
dest (str): the destination file or directory
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* matches multiple files but *dest* is
|
ValueError: if *src* matches multiple files but *dest* is
|
||||||
not a directory
|
not a directory
|
||||||
"""
|
"""
|
||||||
@@ -748,7 +748,7 @@ def copy_tree(
|
|||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if _permissions:
|
if _permissions:
|
||||||
@@ -762,7 +762,7 @@ def copy_tree(
|
|||||||
|
|
||||||
files = glob.glob(src)
|
files = glob.glob(src)
|
||||||
if not files:
|
if not files:
|
||||||
raise IOError("No such file or directory: '{0}'".format(src))
|
raise OSError("No such file or directory: '{0}'".format(src))
|
||||||
|
|
||||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||||
@@ -843,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
OSError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
@@ -1472,7 +1472,7 @@ def set_executable(path):
|
|||||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||||
# use bfs order to increase likelihood of early return
|
# use bfs order to increase likelihood of early return
|
||||||
queue: Deque[str] = collections.deque()
|
queue: Deque[str] = collections.deque([path])
|
||||||
|
|
||||||
if os.stat(path).st_mtime > time:
|
if os.stat(path).st_mtime > time:
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -308,7 +308,7 @@ class LinkTree:
|
|||||||
|
|
||||||
def __init__(self, source_root):
|
def __init__(self, source_root):
|
||||||
if not os.path.exists(source_root):
|
if not os.path.exists(source_root):
|
||||||
raise IOError("No such file or directory: '%s'", source_root)
|
raise OSError("No such file or directory: '%s'", source_root)
|
||||||
|
|
||||||
self._root = source_root
|
self._root = source_root
|
||||||
|
|
||||||
|
|||||||
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
# EAGAIN and EACCES == locked by another process (so try again)
|
# EAGAIN and EACCES == locked by another process (so try again)
|
||||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -918,7 +918,7 @@ def _writer_daemon(
|
|||||||
try:
|
try:
|
||||||
if stdin_file.read(1) == "v":
|
if stdin_file.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
# to let the caller know the read failed b/c it
|
# to let the caller know the read failed b/c it
|
||||||
# was in the bg. Ignore that too.
|
# was in the bg. Ignore that too.
|
||||||
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
return function(*args, **kwargs)
|
return function(*args, **kwargs)
|
||||||
except IOError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.EINTR:
|
if e.errno == errno.EINTR:
|
||||||
continue
|
continue
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.24.0.dev0"
|
__version__ = "1.0.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
@@ -801,7 +802,7 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except web_util.SpackWebError as e:
|
except (web_util.SpackWebError, OSError) as e:
|
||||||
tty.error(f"Error reading specfile: {url}: {e}")
|
tty.error(f"Error reading specfile: {url}: {e}")
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
@@ -2009,7 +2010,7 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
# Download the config = spec.json and the relevant tarball
|
# Download the config = spec.json and the relevant tarball
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||||
tarball_digest = spack.oci.image.Digest.from_string(
|
tarball_digest = spack.oci.image.Digest.from_string(
|
||||||
manifest["layers"][-1]["digest"]
|
manifest["layers"][-1]["digest"]
|
||||||
@@ -2270,6 +2271,24 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
|||||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||||
codesign("-fs-", tmp_binary)
|
codesign("-fs-", tmp_binary)
|
||||||
|
|
||||||
|
install_manifest = os.path.join(
|
||||||
|
spec.prefix,
|
||||||
|
spack.store.STORE.layout.metadata_dir,
|
||||||
|
spack.store.STORE.layout.manifest_file_name,
|
||||||
|
)
|
||||||
|
if not os.path.exists(install_manifest):
|
||||||
|
spec_id = spec.format("{name}/{hash:7}")
|
||||||
|
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||||
|
|
||||||
|
# overwrite old metadata with new
|
||||||
|
if spec.spliced:
|
||||||
|
# rewrite spec on disk
|
||||||
|
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||||
|
|
||||||
|
# de-cache the install manifest
|
||||||
|
with contextlib.suppress(FileNotFoundError):
|
||||||
|
os.unlink(install_manifest)
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||||
stagepath = os.path.dirname(filename)
|
stagepath = os.path.dirname(filename)
|
||||||
@@ -2436,15 +2455,6 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||||
raise e
|
raise e
|
||||||
else:
|
|
||||||
manifest_file = os.path.join(
|
|
||||||
spec.prefix,
|
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
if not os.path.exists(manifest_file):
|
|
||||||
spec_id = spec.format("{name}/{hash:7}")
|
|
||||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
|
||||||
finally:
|
finally:
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
@@ -2549,10 +2559,6 @@ def install_root_node(
|
|||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
spec.package.windows_establish_runtime_linkage()
|
||||||
if spec.spliced: # overwrite old metadata with new
|
|
||||||
spack.store.STORE.layout.write_spec(
|
|
||||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
|
||||||
)
|
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||||
|
|
||||||
@@ -2590,11 +2596,14 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except web_util.SpackWebError as e1:
|
except (web_util.SpackWebError, OSError) as e1:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except web_util.SpackWebError as e2:
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
specfile_is_signed = False
|
||||||
|
except (web_util.SpackWebError, OSError) as e2:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||||
e1,
|
e1,
|
||||||
@@ -2604,7 +2613,6 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
|
||||||
|
|
||||||
# read the spec from the build cache file. All specs in build caches
|
# read the spec from the build cache file. All specs in build caches
|
||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
@@ -2698,8 +2706,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(json_file)
|
||||||
except web_util.SpackWebError as url_err:
|
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||||
|
# TODO: avoid repeated request
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
tty.error(
|
tty.error(
|
||||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||||
@@ -2949,11 +2958,11 @@ def get_remote_hash(self):
|
|||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError):
|
remote_hash = response.read(64)
|
||||||
|
except OSError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
remote_hash = response.read(64)
|
|
||||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||||
return None
|
return None
|
||||||
return remote_hash.decode("utf-8")
|
return remote_hash.decode("utf-8")
|
||||||
@@ -2971,13 +2980,13 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||||
|
|
||||||
computed_hash = compute_hash(result)
|
computed_hash = compute_hash(result)
|
||||||
|
|
||||||
@@ -3021,12 +3030,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e: # URLError, socket.timeout, etc.
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
@@ -3058,11 +3067,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
@@ -3077,14 +3086,16 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise fetch the blob / index.json
|
# Otherwise fetch the blob / index.json
|
||||||
response = self.urlopen(
|
try:
|
||||||
urllib.request.Request(
|
response = self.urlopen(
|
||||||
url=self.ref.blob_url(index_digest),
|
urllib.request.Request(
|
||||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
url=self.ref.blob_url(index_digest),
|
||||||
|
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
|
except (OSError, ValueError) as e:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
# Make sure the blob we download has the advertised hash
|
# Make sure the blob we download has the advertised hash
|
||||||
if compute_hash(result) != index_digest.digest:
|
if compute_hash(result) != index_digest.digest:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
|
|||||||
@@ -27,9 +27,9 @@
|
|||||||
class ClingoBootstrapConcretizer:
|
class ClingoBootstrapConcretizer:
|
||||||
def __init__(self, configuration):
|
def __init__(self, configuration):
|
||||||
self.host_platform = spack.platforms.host()
|
self.host_platform = spack.platforms.host()
|
||||||
self.host_os = self.host_platform.operating_system("frontend")
|
self.host_os = self.host_platform.default_operating_system()
|
||||||
self.host_target = archspec.cpu.host().family
|
self.host_target = archspec.cpu.host().family
|
||||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||||
self.host_architecture.target = str(self.host_target)
|
self.host_architecture.target = str(self.host_target)
|
||||||
self.host_compiler = self._valid_compiler_or_raise()
|
self.host_compiler = self._valid_compiler_or_raise()
|
||||||
self.host_python = self.python_external_spec()
|
self.host_python = self.python_external_spec()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
"""Manage configuration swapping for bootstrapping purposes"""
|
"""Manage configuration swapping for bootstrapping purposes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os.path
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||||
|
|
||||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
|
|
||||||
|
|
||||||
def _add_compilers_if_missing() -> None:
|
def _add_compilers_if_missing() -> None:
|
||||||
arch = spack.spec.ArchSpec.frontend_arch()
|
arch = spack.spec.ArchSpec.default_arch()
|
||||||
if not spack.compilers.compilers_for_arch(arch):
|
if not spack.compilers.compilers_for_arch(arch):
|
||||||
spack.compilers.find_compilers()
|
spack.compilers.find_compilers()
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||||
@@ -46,6 +45,7 @@
|
|||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
|
|
||||||
@@ -97,8 +97,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.name = conf["name"]
|
self.name = conf["name"]
|
||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Check for relative paths, and turn them into absolute paths
|
||||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
# root is the metadata_dir
|
||||||
|
maybe_url = conf["info"]["url"]
|
||||||
|
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||||
|
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||||
|
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
from llnl.util import tty
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -458,11 +459,23 @@ def cmake(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
# skip cmake phase if it is an incremental develop build
|
if spec.is_develop:
|
||||||
if spec.is_develop and os.path.isfile(
|
# skip cmake phase if it is an incremental develop build
|
||||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
|
||||||
):
|
# Determine the files that will re-run CMake that are generated from a successful
|
||||||
return
|
# configure step based on state
|
||||||
|
primary_generator = _extract_primary_generator(self.generator)
|
||||||
|
configure_artifact = "Makefile"
|
||||||
|
if primary_generator == "Ninja":
|
||||||
|
configure_artifact = "ninja.build"
|
||||||
|
|
||||||
|
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||||
|
tty.msg(
|
||||||
|
"Incremental build criteria satisfied."
|
||||||
|
"Skipping CMake configure step. To force configuration run"
|
||||||
|
f" `spack clean {pkg.name}`"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
|||||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||||
and is meant to unify and facilitate its usage.
|
and is meant to unify and facilitate its usage.
|
||||||
|
|
||||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||||
@@ -47,6 +47,12 @@ class CudaPackage(PackageBase):
|
|||||||
"89",
|
"89",
|
||||||
"90",
|
"90",
|
||||||
"90a",
|
"90a",
|
||||||
|
"100",
|
||||||
|
"100a",
|
||||||
|
"101",
|
||||||
|
"101a",
|
||||||
|
"120",
|
||||||
|
"120a",
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||||
@@ -99,39 +105,56 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
# CUDA version vs Architecture
|
# CUDA version vs Architecture
|
||||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||||
|
# Tesla support:
|
||||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||||
|
|
||||||
|
# Fermi support:
|
||||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||||
|
|
||||||
|
# Kepler support:
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||||
|
|
||||||
|
# Maxwell support:
|
||||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||||
|
|
||||||
|
# Pascal support:
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||||
|
|
||||||
|
# Volta support:
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||||
|
# Turing support:
|
||||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||||
|
|
||||||
|
# Ampere support:
|
||||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||||
|
# Ada support:
|
||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
|
||||||
|
# Hopper support:
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||||
|
|
||||||
|
# Blackwell support:
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=100")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=101")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||||
|
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
@@ -163,6 +186,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||||
|
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
@@ -171,6 +195,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||||
|
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.prefix
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||||
@@ -28,9 +28,7 @@ class GoPackage(spack.package_base.PackageBase):
|
|||||||
build_system("go")
|
build_system("go")
|
||||||
|
|
||||||
with when("build_system=go"):
|
with when("build_system=go"):
|
||||||
# TODO: this seems like it should be depends_on, see
|
depends_on("go", type="build")
|
||||||
# setup_dependent_build_environment in go for why I kept it like this
|
|
||||||
extends("go@1.14:", type="build")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("go")
|
@spack.builder.builder("go")
|
||||||
@@ -73,6 +71,7 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("GO111MODULE", "on")
|
env.set("GO111MODULE", "on")
|
||||||
env.set("GOTOOLCHAIN", "local")
|
env.set("GOTOOLCHAIN", "local")
|
||||||
|
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
@@ -83,7 +82,15 @@ def build_directory(self):
|
|||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``go build``."""
|
"""Arguments for ``go build``."""
|
||||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
return [
|
||||||
|
"-p",
|
||||||
|
str(self.pkg.module.make_jobs),
|
||||||
|
"-modcacherw",
|
||||||
|
"-ldflags",
|
||||||
|
"-s -w",
|
||||||
|
"-o",
|
||||||
|
f"{self.pkg.name}",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
|
|||||||
@@ -264,16 +264,17 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
# Ensure architecture information is present
|
# Ensure architecture information is present
|
||||||
if not python.architecture:
|
if not python.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system("default_os")
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target("default_target")
|
host_target = host_platform.default_target()
|
||||||
python.architecture = spack.spec.ArchSpec(
|
python.architecture = spack.spec.ArchSpec(
|
||||||
(str(host_platform), str(host_os), str(host_target))
|
(str(host_platform), str(host_os), str(host_target))
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not python.architecture.platform:
|
if not python.architecture.platform:
|
||||||
python.architecture.platform = spack.platforms.host()
|
python.architecture.platform = spack.platforms.host()
|
||||||
|
platform = spack.platforms.by_name(python.architecture.platform)
|
||||||
if not python.architecture.os:
|
if not python.architecture.os:
|
||||||
python.architecture.os = "default_os"
|
python.architecture.os = platform.default_operating_system()
|
||||||
if not python.architecture.target:
|
if not python.architecture.target:
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
with when("build_system=sip"):
|
with when("build_system=sip"):
|
||||||
extends("python", type=("build", "link", "run"))
|
extends("python", type=("build", "link", "run"))
|
||||||
depends_on("py-sip", type="build")
|
depends_on("py-sip", type="build")
|
||||||
|
depends_on("gmake", type="build")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.request import Request
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -63,6 +62,8 @@
|
|||||||
|
|
||||||
PushResult = namedtuple("PushResult", "success url")
|
PushResult = namedtuple("PushResult", "success url")
|
||||||
|
|
||||||
|
urlopen = web_util.urlopen # alias for mocking in tests
|
||||||
|
|
||||||
|
|
||||||
def get_change_revisions():
|
def get_change_revisions():
|
||||||
"""If this is a git repo get the revisions to use when checking
|
"""If this is a git repo get the revisions to use when checking
|
||||||
@@ -472,12 +473,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
|||||||
# Use all unpruned specs to populate the build group for this set
|
# Use all unpruned specs to populate the build group for this set
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||||
try:
|
options.cdash_handler.populate_buildgroup(
|
||||||
options.cdash_handler.populate_buildgroup(
|
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
)
|
||||||
)
|
|
||||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
|
||||||
tty.warn(f"Problem populating buildgroup: {err}")
|
|
||||||
elif cdash_config:
|
elif cdash_config:
|
||||||
# warn only if there was actually a CDash configuration.
|
# warn only if there was actually a CDash configuration.
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
@@ -631,29 +629,19 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
if token:
|
if token:
|
||||||
headers["PRIVATE-TOKEN"] = token
|
headers["PRIVATE-TOKEN"] = token
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
request = Request(url, headers=headers, method="GET")
|
||||||
|
|
||||||
request = Request(url, headers=headers)
|
|
||||||
request.get_method = lambda: "GET"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code != 200:
|
|
||||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
|
||||||
raise SpackError(msg)
|
|
||||||
|
|
||||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||||
|
os.makedirs(work_dir, exist_ok=True)
|
||||||
|
|
||||||
if not os.path.exists(work_dir):
|
try:
|
||||||
os.makedirs(work_dir)
|
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
with open(artifacts_zip_path, "wb") as out_file:
|
||||||
|
shutil.copyfileobj(response, out_file)
|
||||||
|
except OSError as e:
|
||||||
|
raise SpackError(f"Error fetching artifacts: {e}")
|
||||||
|
|
||||||
with open(artifacts_zip_path, "wb") as out_file:
|
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||||
shutil.copyfileobj(response, out_file)
|
zip_file.extractall(work_dir)
|
||||||
|
|
||||||
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
|
||||||
zip_file.extractall(work_dir)
|
|
||||||
zip_file.close()
|
|
||||||
|
|
||||||
os.remove(artifacts_zip_path)
|
os.remove(artifacts_zip_path)
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,21 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import codecs
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import ssl
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import Singleton, memoized
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
@@ -35,32 +33,11 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
|
||||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
|
||||||
with_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
|
||||||
without_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
|
||||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|
||||||
opener = with_ssl if verify_ssl else without_ssl
|
|
||||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
|
||||||
|
|
||||||
return dispatch_open
|
|
||||||
|
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
|
||||||
|
# this exists purely for testing purposes
|
||||||
|
_urlopen = web_util.urlopen
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
@@ -279,26 +256,25 @@ def copy_test_results(self, source, dest):
|
|||||||
reports = fs.join_path(source, "*_Test*.xml")
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
copy_files_to_artifacts(reports, dest)
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||||
|
except OSError as e:
|
||||||
if response_code not in [200, 201]:
|
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
|
||||||
tty.warn(msg)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
response_text = response.read()
|
try:
|
||||||
response_json = json.loads(response_text)
|
response_json = json.loads(response_text)
|
||||||
build_group_id = response_json["id"]
|
return response_json["id"]
|
||||||
|
except (json.JSONDecodeError, KeyError) as e:
|
||||||
return build_group_id
|
tty.warn(f"Failed to parse CDash response: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
def populate_buildgroup(self, job_names):
|
||||||
url = f"{self.url}/api/v1/buildgroup.php"
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
@@ -308,16 +284,11 @@ def populate_buildgroup(self, job_names):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||||
|
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
|
||||||
group_id = self.create_buildgroup(
|
|
||||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||||
tty.warn(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -329,15 +300,12 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
except OSError as e:
|
||||||
if response_code != 200:
|
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||||
@@ -735,9 +703,6 @@ def _apply_section(dest, src):
|
|||||||
for value in header.values():
|
for value in header.values():
|
||||||
value = os.path.expandvars(value)
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
|
||||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
required = mapping.get("require", [])
|
||||||
allowed = mapping.get("allow", [])
|
allowed = mapping.get("allow", [])
|
||||||
ignored = mapping.get("ignore", [])
|
ignored = mapping.get("ignore", [])
|
||||||
@@ -771,19 +736,15 @@ def job_query(job):
|
|||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response = _dyn_mapping_urlopener(
|
response = _urlopen(request)
|
||||||
request, verify_ssl=verify_ssl, timeout=timeout
|
config = json.load(response)
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
# This is still experimental, and failures should not stop CI
|
# This is still experimental, and failures should not stop CI
|
||||||
# from running normally
|
# from running normally
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||||
tty.warn(f"{e}")
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
config = json.load(codecs.getreader("utf-8")(response))
|
|
||||||
|
|
||||||
# Strip ignore keys
|
# Strip ignore keys
|
||||||
if ignored:
|
if ignored:
|
||||||
for key in ignored:
|
for key in ignored:
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
|
import warnings
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -51,10 +52,10 @@ def setup_parser(subparser):
|
|||||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
||||||
)
|
)
|
||||||
parts2.add_argument(
|
parts2.add_argument(
|
||||||
"-b", "--backend", action="store_true", default=False, help="print backend"
|
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -98,15 +99,14 @@ def arch(parser, args):
|
|||||||
display_targets(archspec.cpu.TARGETS)
|
display_targets(archspec.cpu.TARGETS)
|
||||||
return
|
return
|
||||||
|
|
||||||
os_args, target_args = "default_os", "default_target"
|
|
||||||
if args.frontend:
|
if args.frontend:
|
||||||
os_args, target_args = "frontend", "frontend"
|
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
||||||
elif args.backend:
|
elif args.backend:
|
||||||
os_args, target_args = "backend", "backend"
|
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
||||||
|
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system(os_args)
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target(target_args)
|
host_target = host_platform.default_target()
|
||||||
if args.family:
|
if args.family:
|
||||||
host_target = host_target.family
|
host_target = host_target.family
|
||||||
elif args.generic:
|
elif args.generic:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -436,6 +436,7 @@ def write_metadata(subdir, metadata):
|
|||||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||||
instructions += cmd.format("local-binaries", rel_directory)
|
instructions += cmd.format("local-binaries", rel_directory)
|
||||||
|
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os.path
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
|
|||||||
@@ -86,8 +86,8 @@ def create_db_tarball(args):
|
|||||||
|
|
||||||
def report(args):
|
def report(args):
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system("frontend")
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target("frontend")
|
host_target = host_platform.default_target()
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
print("* **Spack:**", spack.get_version())
|
print("* **Spack:**", spack.get_version())
|
||||||
print("* **Python:**", platform.python_version())
|
print("* **Python:**", platform.python_version())
|
||||||
|
|||||||
@@ -110,10 +110,7 @@ def external_find(args):
|
|||||||
# Note that KeyboardInterrupt does not subclass Exception
|
# Note that KeyboardInterrupt does not subclass Exception
|
||||||
# (so CTRL-C will terminate the program as expected).
|
# (so CTRL-C will terminate the program as expected).
|
||||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
||||||
errno.EPERM,
|
|
||||||
errno.EACCES,
|
|
||||||
]:
|
|
||||||
# The manifest file does not have sufficient permissions enabled:
|
# The manifest file does not have sufficient permissions enabled:
|
||||||
# print a warning and keep going
|
# print a warning and keep going
|
||||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||||
|
|||||||
@@ -54,10 +54,6 @@
|
|||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|
||||||
cross-compiling:
|
|
||||||
@m{os=backend} or @m{os=be} build for compute node (backend)
|
|
||||||
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
^dependency [constraints] specify constraints on dependencies
|
^dependency [constraints] specify constraints on dependencies
|
||||||
^@K{/hash} build with a specific installed
|
^@K{/hash} build with a specific installed
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"""Implementation details of the ``spack module`` command."""
|
"""Implementation details of the ``spack module`` command."""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -136,20 +136,7 @@ def solve(parser, args):
|
|||||||
setup_only = set(show) == {"asp"}
|
setup_only = set(show) == {"asp"}
|
||||||
unify = spack.config.get("concretizer:unify")
|
unify = spack.config.get("concretizer:unify")
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
if unify != "when_possible":
|
if unify == "when_possible":
|
||||||
# set up solver parameters
|
|
||||||
# Note: reuse and other concretizer prefs are passed as configuration
|
|
||||||
result = solver.solve(
|
|
||||||
specs,
|
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
allow_deprecated=allow_deprecated,
|
|
||||||
)
|
|
||||||
if not setup_only:
|
|
||||||
_process_result(result, show, required_format, kwargs)
|
|
||||||
else:
|
|
||||||
for idx, result in enumerate(
|
for idx, result in enumerate(
|
||||||
solver.solve_in_rounds(
|
solver.solve_in_rounds(
|
||||||
specs,
|
specs,
|
||||||
@@ -166,3 +153,29 @@ def solve(parser, args):
|
|||||||
print("% END ROUND {0}\n".format(idx))
|
print("% END ROUND {0}\n".format(idx))
|
||||||
if not setup_only:
|
if not setup_only:
|
||||||
_process_result(result, show, required_format, kwargs)
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
elif unify:
|
||||||
|
# set up solver parameters
|
||||||
|
# Note: reuse and other concretizer prefs are passed as configuration
|
||||||
|
result = solver.solve(
|
||||||
|
specs,
|
||||||
|
out=output,
|
||||||
|
timers=args.timers,
|
||||||
|
stats=args.stats,
|
||||||
|
setup_only=setup_only,
|
||||||
|
allow_deprecated=allow_deprecated,
|
||||||
|
)
|
||||||
|
if not setup_only:
|
||||||
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
else:
|
||||||
|
for spec in specs:
|
||||||
|
print("SOLVING SPEC:", spec)
|
||||||
|
result = solver.solve(
|
||||||
|
[spec],
|
||||||
|
out=output,
|
||||||
|
timers=args.timers,
|
||||||
|
stats=args.stats,
|
||||||
|
setup_only=setup_only,
|
||||||
|
allow_deprecated=allow_deprecated,
|
||||||
|
)
|
||||||
|
if not setup_only:
|
||||||
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
|||||||
@@ -177,16 +177,15 @@ def test_run(args):
|
|||||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
"""
|
|
||||||
TODO: Need to write out a log message and/or CDASH Testing
|
|
||||||
output that package not installed IF continue to process
|
|
||||||
these issues here.
|
|
||||||
|
|
||||||
if args.log_format:
|
# TODO: Need to write out a log message and/or CDASH Testing
|
||||||
# Proceed with the spec assuming the test process
|
# output that package not installed IF continue to process
|
||||||
# to ensure report package as skipped (e.g., for CI)
|
# these issues here.
|
||||||
specs_to_test.append(spec)
|
|
||||||
"""
|
# if args.log_format:
|
||||||
|
# # Proceed with the spec assuming the test process
|
||||||
|
# # to ensure report package as skipped (e.g., for CI)
|
||||||
|
# specs_to_test.append(spec)
|
||||||
|
|
||||||
specs_to_test.extend(matching)
|
specs_to_test.extend(matching)
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
|||||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.default_operating_system()
|
||||||
target = host_platform.target("default_target")
|
target = host_platform.default_target()
|
||||||
else:
|
else:
|
||||||
target = spec.architecture.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
target = spack.platforms.host().target("default_target")
|
target = spack.platforms.host().default_target()
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.default_operating_system()
|
||||||
return operating_system, target
|
return operating_system, target
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -53,6 +53,7 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.env_vars
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -70,6 +71,7 @@
|
|||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ def validate(configuration_file):
|
|||||||
# Set the default value of the concretization strategy to unify and
|
# Set the default value of the concretization strategy to unify and
|
||||||
# warn if the user explicitly set another value
|
# warn if the user explicitly set another value
|
||||||
env_dict.setdefault("concretizer", {"unify": True})
|
env_dict.setdefault("concretizer", {"unify": True})
|
||||||
if not env_dict["concretizer"]["unify"] is True:
|
if env_dict["concretizer"]["unify"] is not True:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'"concretizer:unify" is not set to "true", which means the '
|
'"concretizer:unify" is not set to "true", which means the '
|
||||||
"generated image may contain different variants of the same "
|
"generated image may contain different variants of the same "
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Manages the details on the images used in the various stages."""
|
"""Manages the details on the images used in the various stages."""
|
||||||
import json
|
import json
|
||||||
import os.path
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@
|
|||||||
import glob
|
import glob
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
|||||||
"""
|
"""
|
||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
|
|||||||
@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
|
|||||||
# Check if the target path lexists
|
# Check if the target path lexists
|
||||||
try:
|
try:
|
||||||
st = os.lstat(new_root)
|
st = os.lstat(new_root)
|
||||||
except (IOError, OSError):
|
except OSError:
|
||||||
return
|
return
|
||||||
|
|
||||||
# Empty directories are fine
|
# Empty directories are fine
|
||||||
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(old_root)
|
shutil.rmtree(old_root)
|
||||||
except (IOError, OSError) as e:
|
except OSError as e:
|
||||||
msg = "Failed to remove old view at %s\n" % old_root
|
msg = "Failed to remove old view at %s\n" % old_root
|
||||||
msg += str(e)
|
msg += str(e)
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
@@ -2554,7 +2554,7 @@ def is_latest_format(manifest):
|
|||||||
try:
|
try:
|
||||||
with open(manifest, encoding="utf-8") as f:
|
with open(manifest, encoding="utf-8") as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
except (OSError, IOError):
|
except OSError:
|
||||||
return True
|
return True
|
||||||
top_level_key = _top_level_key(data)
|
top_level_key = _top_level_key(data)
|
||||||
changed = spack.schema.env.update(data[top_level_key])
|
changed = spack.schema.env.update(data[top_level_key])
|
||||||
@@ -2634,6 +2634,32 @@ def _ensure_env_dir():
|
|||||||
|
|
||||||
shutil.copy(envfile, target_manifest)
|
shutil.copy(envfile, target_manifest)
|
||||||
|
|
||||||
|
# Copy relative path includes that live inside the environment dir
|
||||||
|
try:
|
||||||
|
manifest = EnvironmentManifestFile(environment_dir)
|
||||||
|
except Exception:
|
||||||
|
# error handling for bad manifests is handled on other code paths
|
||||||
|
return
|
||||||
|
|
||||||
|
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||||
|
for include in includes:
|
||||||
|
if os.path.isabs(include):
|
||||||
|
continue
|
||||||
|
|
||||||
|
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||||
|
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||||
|
if common_path != environment_dir:
|
||||||
|
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||||
|
if not os.path.exists(orig_abspath):
|
||||||
|
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
||||||
|
continue
|
||||||
|
|
||||||
|
fs.touchp(abspath)
|
||||||
|
shutil.copy(orig_abspath, abspath)
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentManifestFile(collections.abc.Mapping):
|
class EnvironmentManifestFile(collections.abc.Mapping):
|
||||||
"""Manages the in-memory representation of a manifest file, and its synchronization
|
"""Manages the in-memory representation of a manifest file, and its synchronization
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.schema.environment
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
@@ -156,6 +157,11 @@ def activate(
|
|||||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||||
# become PATH variables.
|
# become PATH variables.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||||
|
if env_vars_yaml:
|
||||||
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if view and env.has_view(view):
|
if view and env.has_view(view):
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
@@ -189,6 +195,10 @@ def deactivate() -> EnvironmentModifications:
|
|||||||
if active is None:
|
if active is None:
|
||||||
return env_mods
|
return env_mods
|
||||||
|
|
||||||
|
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||||
|
if env_vars_yaml:
|
||||||
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||||
|
|
||||||
active_view = os.getenv(ev.spack_env_view_var)
|
active_view = os.getenv(ev.spack_env_view_var)
|
||||||
|
|
||||||
if active_view and active.has_view(active_view):
|
if active_view and active.has_view(active_view):
|
||||||
|
|||||||
@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
|||||||
if name == target_name:
|
if name == target_name:
|
||||||
return path
|
return path
|
||||||
else:
|
else:
|
||||||
raise IOError('extension "{0}" not found'.format(target_name))
|
raise OSError('extension "{0}" not found'.format(target_name))
|
||||||
|
|
||||||
|
|
||||||
def get_module(cmd_name):
|
def get_module(cmd_name):
|
||||||
|
|||||||
@@ -25,7 +25,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import http.client
|
import http.client
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@@ -321,9 +320,15 @@ def _fetch_urllib(self, url):
|
|||||||
|
|
||||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||||
|
|
||||||
|
if os.path.lexists(save_file):
|
||||||
|
os.remove(save_file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = web_util.urlopen(request)
|
response = web_util.urlopen(request)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
tty.msg(f"Fetching {url}")
|
||||||
|
with open(save_file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
except OSError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -331,14 +336,6 @@ def _fetch_urllib(self, url):
|
|||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
tty.msg(f"Fetching {url}")
|
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
|
||||||
os.remove(save_file)
|
|
||||||
|
|
||||||
with open(save_file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||||
# for users to know which URL was actually fetched.
|
# for users to know which URL was actually fetched.
|
||||||
@@ -535,11 +532,16 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
file = self.stage.save_filename
|
file = self.stage.save_filename
|
||||||
tty.msg(f"Fetching {self.url}")
|
|
||||||
|
if os.path.lexists(file):
|
||||||
|
os.remove(file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._urlopen(self.url)
|
response = self._urlopen(self.url)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
tty.msg(f"Fetching {self.url}")
|
||||||
|
with open(file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
except OSError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -547,12 +549,6 @@ def fetch(self):
|
|||||||
os.remove(file)
|
os.remove(file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
if os.path.lexists(file):
|
|
||||||
os.remove(file)
|
|
||||||
|
|
||||||
with open(file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
|
|
||||||
class VCSFetchStrategy(FetchStrategy):
|
class VCSFetchStrategy(FetchStrategy):
|
||||||
"""Superclass for version control system fetch strategies.
|
"""Superclass for version control system fetch strategies.
|
||||||
|
|||||||
@@ -427,7 +427,7 @@ def needs_file(spec, file):
|
|||||||
try:
|
try:
|
||||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||||
manifest = s_json.load(f)
|
manifest = s_json.load(f)
|
||||||
except (OSError, IOError):
|
except OSError:
|
||||||
# if we can't load it, assume it doesn't know about the file.
|
# if we can't load it, assume it doesn't know about the file.
|
||||||
manifest = {}
|
manifest = {}
|
||||||
return test_path in manifest
|
return test_path in manifest
|
||||||
@@ -831,7 +831,7 @@ def get_spec_from_file(filename):
|
|||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
except IOError:
|
except OSError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
|
|||||||
with open(filepath, "rb") as f:
|
with open(filepath, "rb") as f:
|
||||||
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
||||||
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
||||||
except (IOError, OSError, ElfParsingError):
|
except (OSError, ElfParsingError):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
|
|||||||
# Only look at executable, non-symlink files.
|
# Only look at executable, non-symlink files.
|
||||||
try:
|
try:
|
||||||
st = os.lstat(path)
|
st = os.lstat(path)
|
||||||
except (IOError, OSError):
|
except OSError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
||||||
|
|||||||
@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
|||||||
# Include build dependencies if pkg is going to be built from sources, or
|
# Include build dependencies if pkg is going to be built from sources, or
|
||||||
# if build deps are explicitly requested.
|
# if build deps are explicitly requested.
|
||||||
if include_build_deps or not (
|
if include_build_deps or not (
|
||||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
|
||||||
):
|
):
|
||||||
depflag |= dt.BUILD
|
depflag |= dt.BUILD
|
||||||
if self.run_tests(pkg):
|
if self.run_tests(pkg):
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
import io
|
import io
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pstats
|
import pstats
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
@@ -164,7 +163,7 @@ def format_help_sections(self, level):
|
|||||||
# lazily add all commands to the parser when needed.
|
# lazily add all commands to the parser when needed.
|
||||||
add_all_commands(self)
|
add_all_commands(self)
|
||||||
|
|
||||||
"""Print help on subcommands in neatly formatted sections."""
|
# Print help on subcommands in neatly formatted sections.
|
||||||
formatter = self._get_formatter()
|
formatter = self._get_formatter()
|
||||||
|
|
||||||
# Create a list of subcommand actions. Argparse internals are nasty!
|
# Create a list of subcommand actions. Argparse internals are nasty!
|
||||||
@@ -729,7 +728,7 @@ def _compatible_sys_types():
|
|||||||
with the current host.
|
with the current host.
|
||||||
"""
|
"""
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = str(host_platform.operating_system("default_os"))
|
host_os = str(host_platform.default_operating_system())
|
||||||
host_target = archspec.cpu.host()
|
host_target = archspec.cpu.host()
|
||||||
compatible_targets = [host_target] + host_target.ancestors
|
compatible_targets = [host_target] + host_target.ancestors
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.url
|
import llnl.url
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ def from_local_path(path: str):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def from_url(url: str):
|
def from_url(url: str):
|
||||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'"{url}" is not a valid mirror URL. '
|
f'"{url}" is not a valid mirror URL. '
|
||||||
f"Scheme must be one of {supported_url_schemes}."
|
f"Scheme must be one of {supported_url_schemes}."
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|||||||
@@ -31,7 +31,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import inspect
|
import inspect
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"""This module implements the classes necessary to generate Tcl
|
"""This module implements the classes necessary to generate Tcl
|
||||||
non-hierarchical modules.
|
non-hierarchical modules.
|
||||||
"""
|
"""
|
||||||
import os.path
|
import os
|
||||||
from typing import Dict, Optional, Tuple
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
import time
|
import time
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -382,6 +383,7 @@ def create_opener():
|
|||||||
"""Create an opener that can handle OCI authentication."""
|
"""Create an opener that can handle OCI authentication."""
|
||||||
opener = urllib.request.OpenerDirector()
|
opener = urllib.request.OpenerDirector()
|
||||||
for handler in [
|
for handler in [
|
||||||
|
urllib.request.ProxyHandler(),
|
||||||
urllib.request.UnknownHandler(),
|
urllib.request.UnknownHandler(),
|
||||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||||
@@ -410,7 +412,7 @@ def wrapper(*args, **kwargs):
|
|||||||
for i in range(retries):
|
for i in range(retries):
|
||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except (urllib.error.URLError, TimeoutError) as e:
|
except OSError as e:
|
||||||
# Retry on internal server errors, and rate limit errors
|
# Retry on internal server errors, and rate limit errors
|
||||||
# Potentially this could take into account the Retry-After header
|
# Potentially this could take into account the Retry-After header
|
||||||
# if registries support it
|
# if registries support it
|
||||||
@@ -420,9 +422,10 @@ def wrapper(*args, **kwargs):
|
|||||||
and (500 <= e.code < 600 or e.code == 429)
|
and (500 <= e.code < 600 or e.code == 429)
|
||||||
)
|
)
|
||||||
or (
|
or (
|
||||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
isinstance(e, urllib.error.URLError)
|
||||||
|
and isinstance(e.reason, socket.timeout)
|
||||||
)
|
)
|
||||||
or isinstance(e, TimeoutError)
|
or isinstance(e, socket.timeout)
|
||||||
):
|
):
|
||||||
# Exponential backoff
|
# Exponential backoff
|
||||||
sleep(2**i)
|
sleep(2**i)
|
||||||
|
|||||||
@@ -2,31 +2,64 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
# flake8: noqa: F401
|
# flake8: noqa: F401, E402
|
||||||
"""spack.util.package is a set of useful build tools and directives for packages.
|
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||||
|
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||||
|
to ensure forward compatibility with future versions of Spack."""
|
||||||
|
|
||||||
Everything in this module is automatically imported into Spack package files.
|
|
||||||
"""
|
|
||||||
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
|
||||||
from shutil import move, rmtree
|
from shutil import move, rmtree
|
||||||
|
|
||||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
|
||||||
|
|
||||||
# Emulate some shell commands for convenience
|
|
||||||
env = environ
|
|
||||||
cd = chdir
|
|
||||||
pwd = getcwd
|
|
||||||
|
|
||||||
# import most common types used in packages
|
# import most common types used in packages
|
||||||
from typing import Dict, List, Optional
|
from typing import Dict, List, Optional
|
||||||
|
|
||||||
import llnl.util.filesystem
|
|
||||||
from llnl.util.filesystem import *
|
class tty:
|
||||||
|
import llnl.util.tty as _tty
|
||||||
|
|
||||||
|
debug = _tty.debug
|
||||||
|
error = _tty.error
|
||||||
|
info = _tty.info
|
||||||
|
msg = _tty.msg
|
||||||
|
warn = _tty.warn
|
||||||
|
|
||||||
|
|
||||||
|
from llnl.util.filesystem import (
|
||||||
|
FileFilter,
|
||||||
|
FileList,
|
||||||
|
HeaderList,
|
||||||
|
LibraryList,
|
||||||
|
ancestor,
|
||||||
|
can_access,
|
||||||
|
change_sed_delimiter,
|
||||||
|
copy,
|
||||||
|
copy_tree,
|
||||||
|
filter_file,
|
||||||
|
find,
|
||||||
|
find_all_headers,
|
||||||
|
find_first,
|
||||||
|
find_headers,
|
||||||
|
find_libraries,
|
||||||
|
find_system_libraries,
|
||||||
|
force_remove,
|
||||||
|
force_symlink,
|
||||||
|
install,
|
||||||
|
install_tree,
|
||||||
|
is_exe,
|
||||||
|
join_path,
|
||||||
|
keep_modification_time,
|
||||||
|
library_extensions,
|
||||||
|
mkdirp,
|
||||||
|
remove_directory_contents,
|
||||||
|
remove_linked_tree,
|
||||||
|
rename,
|
||||||
|
set_executable,
|
||||||
|
set_install_permissions,
|
||||||
|
touch,
|
||||||
|
working_dir,
|
||||||
|
)
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
import spack.util.executable
|
|
||||||
|
|
||||||
# These props will be overridden when the build env is set up.
|
|
||||||
from spack.build_environment import MakeExecutable
|
from spack.build_environment import MakeExecutable
|
||||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||||
from spack.build_systems.autotools import AutotoolsPackage
|
from spack.build_systems.autotools import AutotoolsPackage
|
||||||
@@ -76,7 +109,24 @@
|
|||||||
from spack.builder import BaseBuilder
|
from spack.builder import BaseBuilder
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||||
from spack.directives import *
|
from spack.directives import (
|
||||||
|
build_system,
|
||||||
|
can_splice,
|
||||||
|
conditional,
|
||||||
|
conflicts,
|
||||||
|
depends_on,
|
||||||
|
extends,
|
||||||
|
license,
|
||||||
|
maintainers,
|
||||||
|
patch,
|
||||||
|
provides,
|
||||||
|
redistribute,
|
||||||
|
requires,
|
||||||
|
resource,
|
||||||
|
variant,
|
||||||
|
version,
|
||||||
|
)
|
||||||
|
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import (
|
from spack.install_test import (
|
||||||
SkipTest,
|
SkipTest,
|
||||||
cache_extra_test_sources,
|
cache_extra_test_sources,
|
||||||
@@ -86,26 +136,28 @@
|
|||||||
install_test_root,
|
install_test_root,
|
||||||
test_part,
|
test_part,
|
||||||
)
|
)
|
||||||
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
|
|
||||||
from spack.mixins import filter_compiler_wrappers
|
from spack.mixins import filter_compiler_wrappers
|
||||||
from spack.multimethod import default_args, when
|
from spack.multimethod import default_args, when
|
||||||
from spack.package_base import (
|
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
|
||||||
DependencyConflictError,
|
from spack.package_completions import (
|
||||||
build_system_flags,
|
bash_completion_path,
|
||||||
env_flags,
|
fish_completion_path,
|
||||||
flatten_dependencies,
|
zsh_completion_path,
|
||||||
inject_flags,
|
|
||||||
install_dependency_symlinks,
|
|
||||||
on_package_attributes,
|
|
||||||
)
|
)
|
||||||
from spack.package_completions import *
|
|
||||||
from spack.phase_callbacks import run_after, run_before
|
from spack.phase_callbacks import run_after, run_before
|
||||||
from spack.spec import InvalidSpecDetected, Spec
|
from spack.spec import Spec
|
||||||
from spack.util.executable import *
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
from spack.util.executable import Executable, ProcessError, which, which_string
|
||||||
from spack.util.filesystem import fix_darwin_install_name
|
from spack.util.filesystem import fix_darwin_install_name
|
||||||
|
from spack.util.prefix import Prefix
|
||||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
|
# Emulate some shell commands for convenience
|
||||||
|
env = environ
|
||||||
|
cd = chdir
|
||||||
|
pwd = getcwd
|
||||||
|
|
||||||
# These are just here for editor support; they may be set when the build env is set up.
|
# These are just here for editor support; they may be set when the build env is set up.
|
||||||
configure: Executable
|
configure: Executable
|
||||||
make_jobs: int
|
make_jobs: int
|
||||||
|
|||||||
@@ -30,7 +30,6 @@
|
|||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty, memoized
|
from llnl.util.lang import classproperty, memoized
|
||||||
from llnl.util.link_tree import LinkTree
|
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -67,10 +66,6 @@
|
|||||||
]
|
]
|
||||||
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
||||||
|
|
||||||
"""Allowed URL schemes for spack packages."""
|
|
||||||
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
|
||||||
|
|
||||||
|
|
||||||
#: Filename for the Spack build/install log.
|
#: Filename for the Spack build/install log.
|
||||||
_spack_build_logfile = "spack-build-out.txt"
|
_spack_build_logfile = "spack-build-out.txt"
|
||||||
|
|
||||||
@@ -702,9 +697,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
|||||||
#: Verbosity level, preserved across installs.
|
#: Verbosity level, preserved across installs.
|
||||||
_verbose = None
|
_verbose = None
|
||||||
|
|
||||||
#: index of patches by sha256 sum, built lazily
|
|
||||||
_patches_by_hash = None
|
|
||||||
|
|
||||||
#: Package homepage where users can find more information about the package
|
#: Package homepage where users can find more information about the package
|
||||||
homepage: Optional[str] = None
|
homepage: Optional[str] = None
|
||||||
|
|
||||||
@@ -2292,44 +2284,6 @@ def rpath_args(self):
|
|||||||
build_system_flags = PackageBase.build_system_flags
|
build_system_flags = PackageBase.build_system_flags
|
||||||
|
|
||||||
|
|
||||||
def install_dependency_symlinks(pkg, spec, prefix):
|
|
||||||
"""
|
|
||||||
Execute a dummy install and flatten dependencies.
|
|
||||||
|
|
||||||
This routine can be used in a ``package.py`` definition by setting
|
|
||||||
``install = install_dependency_symlinks``.
|
|
||||||
|
|
||||||
This feature comes in handy for creating a common location for the
|
|
||||||
the installation of third-party libraries.
|
|
||||||
"""
|
|
||||||
flatten_dependencies(spec, prefix)
|
|
||||||
|
|
||||||
|
|
||||||
def use_cray_compiler_names():
|
|
||||||
"""Compiler names for builds that rely on cray compiler names."""
|
|
||||||
os.environ["CC"] = "cc"
|
|
||||||
os.environ["CXX"] = "CC"
|
|
||||||
os.environ["FC"] = "ftn"
|
|
||||||
os.environ["F77"] = "ftn"
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_dependencies(spec, flat_dir):
|
|
||||||
"""Make each dependency of spec present in dir via symlink."""
|
|
||||||
for dep in spec.traverse(root=False):
|
|
||||||
name = dep.name
|
|
||||||
|
|
||||||
dep_path = spack.store.STORE.layout.path_for_spec(dep)
|
|
||||||
dep_files = LinkTree(dep_path)
|
|
||||||
|
|
||||||
os.mkdir(flat_dir + "/" + name)
|
|
||||||
|
|
||||||
conflict = dep_files.find_conflict(flat_dir + "/" + name)
|
|
||||||
if conflict:
|
|
||||||
raise DependencyConflictError(conflict)
|
|
||||||
|
|
||||||
dep_files.merge(flat_dir + "/" + name)
|
|
||||||
|
|
||||||
|
|
||||||
def possible_dependencies(
|
def possible_dependencies(
|
||||||
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
|
||||||
transitive: bool = True,
|
transitive: bool = True,
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Optional, Tuple, Type, Union
|
from typing import Any, Dict, Optional, Tuple, Type, Union
|
||||||
|
|||||||
@@ -52,8 +52,7 @@ def use_platform(new_platform):
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
|
||||||
msg = '"{0}" must be an instance of Platform'
|
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
|
||||||
assert isinstance(new_platform, Platform), msg.format(new_platform)
|
|
||||||
|
|
||||||
original_host_fn = host
|
original_host_fn = host
|
||||||
|
|
||||||
|
|||||||
@@ -1,42 +1,22 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import warnings
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.error
|
|
||||||
|
|
||||||
|
|
||||||
class NoPlatformError(spack.error.SpackError):
|
|
||||||
def __init__(self):
|
|
||||||
msg = "Could not determine a platform for this machine"
|
|
||||||
super().__init__(msg)
|
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.lazy_lexicographic_ordering
|
@llnl.util.lang.lazy_lexicographic_ordering
|
||||||
class Platform:
|
class Platform:
|
||||||
"""Platform is an abstract class extended by subclasses.
|
"""Platform is an abstract class extended by subclasses.
|
||||||
|
|
||||||
To add a new type of platform (such as cray_xe), create a subclass and set all the
|
|
||||||
class attributes such as priority, front_target, back_target, front_os, back_os.
|
|
||||||
|
|
||||||
Platform also contain a priority class attribute. A lower number signifies higher
|
Platform also contain a priority class attribute. A lower number signifies higher
|
||||||
priority. These numbers are arbitrarily set and can be changed though often there
|
priority. These numbers are arbitrarily set and can be changed though often there
|
||||||
isn't much need unless a new platform is added and the user wants that to be
|
isn't much need unless a new platform is added and the user wants that to be
|
||||||
detected first.
|
detected first.
|
||||||
|
|
||||||
Targets are created inside the platform subclasses. Most architecture (like linux,
|
|
||||||
and darwin) will have only one target family (x86_64) but in the case of Cray
|
|
||||||
machines, there is both a frontend and backend processor. The user can specify
|
|
||||||
which targets are present on front-end and back-end architecture.
|
|
||||||
|
|
||||||
Depending on the platform, operating systems are either autodetected or are
|
|
||||||
set. The user can set the frontend and backend operating setting by the class
|
|
||||||
attributes front_os and back_os. The operating system will be responsible for
|
|
||||||
compiler detection.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Subclass sets number. Controls detection order
|
# Subclass sets number. Controls detection order
|
||||||
@@ -45,82 +25,72 @@ class attributes such as priority, front_target, back_target, front_os, back_os.
|
|||||||
#: binary formats used on this platform; used by relocation logic
|
#: binary formats used on this platform; used by relocation logic
|
||||||
binary_formats = ["elf"]
|
binary_formats = ["elf"]
|
||||||
|
|
||||||
front_end: Optional[str] = None
|
default: str
|
||||||
back_end: Optional[str] = None
|
default_os: str
|
||||||
default: Optional[str] = None # The default back end target.
|
|
||||||
|
|
||||||
front_os: Optional[str] = None
|
|
||||||
back_os: Optional[str] = None
|
|
||||||
default_os: Optional[str] = None
|
|
||||||
|
|
||||||
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
|
||||||
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
|
||||||
|
deprecated_names = ["frontend", "fe", "backend", "be"]
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name):
|
||||||
self.targets = {}
|
self.targets = {}
|
||||||
self.operating_sys = {}
|
self.operating_sys = {}
|
||||||
self.name = name
|
self.name = name
|
||||||
|
self._init_targets()
|
||||||
|
|
||||||
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
|
||||||
"""Used by the platform specific subclass to list available targets.
|
|
||||||
Raises an error if the platform specifies a name
|
|
||||||
that is reserved by spack as an alias.
|
|
||||||
"""
|
|
||||||
if name in Platform.reserved_targets:
|
if name in Platform.reserved_targets:
|
||||||
msg = "{0} is a spack reserved alias and cannot be the name of a target"
|
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
|
||||||
raise ValueError(msg.format(name))
|
raise ValueError(msg)
|
||||||
self.targets[name] = target
|
self.targets[name] = target
|
||||||
|
|
||||||
def _add_archspec_targets(self):
|
def _init_targets(self):
|
||||||
|
self.default = archspec.cpu.host().name
|
||||||
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
for name, microarchitecture in archspec.cpu.TARGETS.items():
|
||||||
self.add_target(name, microarchitecture)
|
self.add_target(name, microarchitecture)
|
||||||
|
|
||||||
def target(self, name):
|
def target(self, name):
|
||||||
"""This is a getter method for the target dictionary
|
|
||||||
that handles defaulting based on the values provided by default,
|
|
||||||
front-end, and back-end. This can be overwritten
|
|
||||||
by a subclass for which we want to provide further aliasing options.
|
|
||||||
"""
|
|
||||||
# TODO: Check if we can avoid using strings here
|
|
||||||
name = str(name)
|
name = str(name)
|
||||||
if name == "default_target":
|
if name in Platform.deprecated_names:
|
||||||
|
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
|
||||||
|
|
||||||
|
if name in Platform.reserved_targets:
|
||||||
name = self.default
|
name = self.default
|
||||||
elif name == "frontend" or name == "fe":
|
|
||||||
name = self.front_end
|
|
||||||
elif name == "backend" or name == "be":
|
|
||||||
name = self.back_end
|
|
||||||
|
|
||||||
return self.targets.get(name, None)
|
return self.targets.get(name, None)
|
||||||
|
|
||||||
def add_operating_system(self, name, os_class):
|
def add_operating_system(self, name, os_class):
|
||||||
"""Add the operating_system class object into the
|
if name in Platform.reserved_oss + Platform.deprecated_names:
|
||||||
platform.operating_sys dictionary.
|
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
|
||||||
"""
|
raise ValueError(msg)
|
||||||
if name in Platform.reserved_oss:
|
|
||||||
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
|
|
||||||
raise ValueError(msg.format(name))
|
|
||||||
self.operating_sys[name] = os_class
|
self.operating_sys[name] = os_class
|
||||||
|
|
||||||
|
def default_target(self):
|
||||||
|
return self.target(self.default)
|
||||||
|
|
||||||
|
def default_operating_system(self):
|
||||||
|
return self.operating_system(self.default_os)
|
||||||
|
|
||||||
def operating_system(self, name):
|
def operating_system(self, name):
|
||||||
if name == "default_os":
|
if name in Platform.deprecated_names:
|
||||||
|
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
|
||||||
|
|
||||||
|
if name in Platform.reserved_oss:
|
||||||
name = self.default_os
|
name = self.default_os
|
||||||
if name == "frontend" or name == "fe":
|
|
||||||
name = self.front_os
|
|
||||||
if name == "backend" or name == "be":
|
|
||||||
name = self.back_os
|
|
||||||
|
|
||||||
return self.operating_sys.get(name, None)
|
return self.operating_sys.get(name, None)
|
||||||
|
|
||||||
def setup_platform_environment(self, pkg, env):
|
def setup_platform_environment(self, pkg, env):
|
||||||
"""Subclass can override this method if it requires any
|
"""Platform-specific build environment modifications.
|
||||||
platform-specific build environment modifications.
|
|
||||||
|
This method is meant toi be overridden by subclasses, when needed.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
"""Return True if the the host platform is detected to be the current
|
"""Returns True if the host platform is detected to be the current Platform class,
|
||||||
Platform class, False otherwise.
|
False otherwise.
|
||||||
|
|
||||||
Derived classes are responsible for implementing this method.
|
Derived classes are responsible for implementing this method.
|
||||||
"""
|
"""
|
||||||
@@ -135,11 +105,7 @@ def __str__(self):
|
|||||||
def _cmp_iter(self):
|
def _cmp_iter(self):
|
||||||
yield self.name
|
yield self.name
|
||||||
yield self.default
|
yield self.default
|
||||||
yield self.front_end
|
|
||||||
yield self.back_end
|
|
||||||
yield self.default_os
|
yield self.default_os
|
||||||
yield self.front_os
|
|
||||||
yield self.back_os
|
|
||||||
|
|
||||||
def targets():
|
def targets():
|
||||||
for t in sorted(self.targets.values()):
|
for t in sorted(self.targets.values()):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
|
|||||||
@@ -4,8 +4,6 @@
|
|||||||
|
|
||||||
import platform as py_platform
|
import platform as py_platform
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
from spack.operating_systems.mac_os import MacOs
|
from spack.operating_systems.mac_os import MacOs
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -19,18 +17,8 @@ class Darwin(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("darwin")
|
super().__init__("darwin")
|
||||||
self._add_archspec_targets()
|
|
||||||
|
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
self.front_end = self.default
|
|
||||||
self.back_end = self.default
|
|
||||||
|
|
||||||
mac_os = MacOs()
|
mac_os = MacOs()
|
||||||
|
|
||||||
self.default_os = str(mac_os)
|
self.default_os = str(mac_os)
|
||||||
self.front_os = str(mac_os)
|
|
||||||
self.back_os = str(mac_os)
|
|
||||||
|
|
||||||
self.add_operating_system(str(mac_os), mac_os)
|
self.add_operating_system(str(mac_os), mac_os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
from spack.operating_systems.freebsd import FreeBSDOs
|
from spack.operating_systems.freebsd import FreeBSDOs
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -15,18 +13,8 @@ class FreeBSD(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("freebsd")
|
super().__init__("freebsd")
|
||||||
|
|
||||||
self._add_archspec_targets()
|
|
||||||
|
|
||||||
# Get specific default
|
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
self.front_end = self.default
|
|
||||||
self.back_end = self.default
|
|
||||||
|
|
||||||
os = FreeBSDOs()
|
os = FreeBSDOs()
|
||||||
self.default_os = str(os)
|
self.default_os = str(os)
|
||||||
self.front_os = self.default_os
|
|
||||||
self.back_os = self.default_os
|
|
||||||
self.add_operating_system(str(os), os)
|
self.add_operating_system(str(os), os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
from spack.operating_systems.linux_distro import LinuxDistro
|
from spack.operating_systems.linux_distro import LinuxDistro
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -15,18 +13,8 @@ class Linux(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("linux")
|
super().__init__("linux")
|
||||||
|
|
||||||
self._add_archspec_targets()
|
|
||||||
|
|
||||||
# Get specific default
|
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
self.front_end = self.default
|
|
||||||
self.back_end = self.default
|
|
||||||
|
|
||||||
linux_dist = LinuxDistro()
|
linux_dist = LinuxDistro()
|
||||||
self.default_os = str(linux_dist)
|
self.default_os = str(linux_dist)
|
||||||
self.front_os = self.default_os
|
|
||||||
self.back_os = self.default_os
|
|
||||||
self.add_operating_system(str(linux_dist), linux_dist)
|
self.add_operating_system(str(linux_dist), linux_dist)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -16,31 +16,19 @@ class Test(Platform):
|
|||||||
if platform.system().lower() == "darwin":
|
if platform.system().lower() == "darwin":
|
||||||
binary_formats = ["macho"]
|
binary_formats = ["macho"]
|
||||||
|
|
||||||
if platform.machine() == "arm64":
|
|
||||||
front_end = "aarch64"
|
|
||||||
back_end = "m1"
|
|
||||||
default = "m1"
|
|
||||||
else:
|
|
||||||
front_end = "x86_64"
|
|
||||||
back_end = "core2"
|
|
||||||
default = "core2"
|
|
||||||
|
|
||||||
front_os = "redhat6"
|
|
||||||
back_os = "debian6"
|
|
||||||
default_os = "debian6"
|
default_os = "debian6"
|
||||||
|
default = "m1" if platform.machine() == "arm64" else "core2"
|
||||||
|
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
name = name or "test"
|
name = name or "test"
|
||||||
super().__init__(name)
|
super().__init__(name)
|
||||||
self.add_target(self.default, archspec.cpu.TARGETS[self.default])
|
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6))
|
||||||
self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
|
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6))
|
||||||
|
|
||||||
self.add_operating_system(
|
def _init_targets(self):
|
||||||
self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
|
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
|
||||||
)
|
for t in targets:
|
||||||
self.add_operating_system(
|
self.add_target(t, archspec.cpu.TARGETS[t])
|
||||||
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def detect(cls):
|
def detect(cls):
|
||||||
|
|||||||
@@ -4,8 +4,6 @@
|
|||||||
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
from spack.operating_systems.windows_os import WindowsOs
|
from spack.operating_systems.windows_os import WindowsOs
|
||||||
|
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
@@ -16,18 +14,8 @@ class Windows(Platform):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__("windows")
|
super().__init__("windows")
|
||||||
self._add_archspec_targets()
|
|
||||||
|
|
||||||
self.default = archspec.cpu.host().name
|
|
||||||
self.front_end = self.default
|
|
||||||
self.back_end = self.default
|
|
||||||
|
|
||||||
windows_os = WindowsOs()
|
windows_os = WindowsOs()
|
||||||
|
|
||||||
self.default_os = str(windows_os)
|
self.default_os = str(windows_os)
|
||||||
self.front_os = str(windows_os)
|
|
||||||
self.back_os = str(windows_os)
|
|
||||||
|
|
||||||
self.add_operating_system(str(windows_os), windows_os)
|
self.add_operating_system(str(windows_os), windows_os)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -236,22 +236,15 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
|
|||||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||||
|
|
||||||
|
|
||||||
def _warn_if_link_cant_be_relocated(link: str, target: str):
|
|
||||||
if not os.path.isabs(target):
|
|
||||||
return
|
|
||||||
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
|
|
||||||
|
|
||||||
|
|
||||||
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||||
"""Relocate links to a new install prefix."""
|
"""Relocate links to a new install prefix."""
|
||||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||||
for link in links:
|
for link in links:
|
||||||
old_target = readlink(link)
|
old_target = readlink(link)
|
||||||
|
if not os.path.isabs(old_target):
|
||||||
|
continue
|
||||||
match = regex.match(old_target)
|
match = regex.match(old_target)
|
||||||
|
|
||||||
# No match.
|
|
||||||
if match is None:
|
if match is None:
|
||||||
_warn_if_link_cant_be_relocated(link, old_target)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
@@ -1042,7 +1041,7 @@ def _read_config(self) -> Dict[str, str]:
|
|||||||
|
|
||||||
return yaml_data["repo"]
|
return yaml_data["repo"]
|
||||||
|
|
||||||
except IOError:
|
except OSError:
|
||||||
tty.die(f"Error reading {self.config_file} when opening {self.root}")
|
tty.die(f"Error reading {self.config_file} when opening {self.root}")
|
||||||
|
|
||||||
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||||
@@ -1370,7 +1369,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
if subdir != packages_dir_name:
|
if subdir != packages_dir_name:
|
||||||
config.write(f" subdirectory: '{subdir}'\n")
|
config.write(f" subdirectory: '{subdir}'\n")
|
||||||
|
|
||||||
except (IOError, OSError) as e:
|
except OSError as e:
|
||||||
# try to clean up.
|
# try to clean up.
|
||||||
if existed:
|
if existed:
|
||||||
shutil.rmtree(config_path, ignore_errors=True)
|
shutil.rmtree(config_path, ignore_errors=True)
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import os.path
|
import os
|
||||||
import platform
|
import platform
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
@@ -13,7 +14,7 @@
|
|||||||
import xml.sax.saxutils
|
import xml.sax.saxutils
|
||||||
from typing import Dict, Optional
|
from typing import Dict, Optional
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPSHandler, Request, build_opener
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
@@ -24,10 +25,10 @@
|
|||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
import spack.util.web as web_util
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.util.crypto import checksum
|
from spack.util.crypto import checksum
|
||||||
from spack.util.log_parse import parse_log_events
|
from spack.util.log_parse import parse_log_events
|
||||||
from spack.util.web import ssl_create_default_context
|
|
||||||
|
|
||||||
from .base import Reporter
|
from .base import Reporter
|
||||||
from .extract import extract_test_parts
|
from .extract import extract_test_parts
|
||||||
@@ -106,7 +107,7 @@ def __init__(self, configuration: CDashConfiguration):
|
|||||||
self.site = configuration.site or socket.gethostname()
|
self.site = configuration.site or socket.gethostname()
|
||||||
self.osname = platform.system()
|
self.osname = platform.system()
|
||||||
self.osrelease = platform.release()
|
self.osrelease = platform.release()
|
||||||
self.target = spack.platforms.host().target("default_target")
|
self.target = spack.platforms.host().default_target()
|
||||||
self.starttime = int(time.time())
|
self.starttime = int(time.time())
|
||||||
self.endtime = self.starttime
|
self.endtime = self.starttime
|
||||||
self.buildstamp = (
|
self.buildstamp = (
|
||||||
@@ -433,7 +434,6 @@ def upload(self, filename):
|
|||||||
# Compute md5 checksum for the contents of this file.
|
# Compute md5 checksum for the contents of this file.
|
||||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||||
|
|
||||||
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
|
||||||
with open(filename, "rb") as f:
|
with open(filename, "rb") as f:
|
||||||
params_dict = {
|
params_dict = {
|
||||||
"build": self.buildname,
|
"build": self.buildname,
|
||||||
@@ -443,26 +443,21 @@ def upload(self, filename):
|
|||||||
}
|
}
|
||||||
encoded_params = urlencode(params_dict)
|
encoded_params = urlencode(params_dict)
|
||||||
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
|
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
|
||||||
request = Request(url, data=f)
|
request = Request(url, data=f, method="PUT")
|
||||||
request.add_header("Content-Type", "text/xml")
|
request.add_header("Content-Type", "text/xml")
|
||||||
request.add_header("Content-Length", os.path.getsize(filename))
|
request.add_header("Content-Length", os.path.getsize(filename))
|
||||||
if self.authtoken:
|
if self.authtoken:
|
||||||
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
||||||
try:
|
try:
|
||||||
# By default, urllib2 only support GET and POST.
|
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
# CDash expects this file to be uploaded via PUT.
|
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
|
||||||
if self.current_package_name not in self.buildIds:
|
if self.current_package_name not in self.buildIds:
|
||||||
resp_value = response.read()
|
resp_value = codecs.getreader("utf-8")(response).read()
|
||||||
if isinstance(resp_value, bytes):
|
|
||||||
resp_value = resp_value.decode("utf-8")
|
|
||||||
match = self.buildid_regexp.search(resp_value)
|
match = self.buildid_regexp.search(resp_value)
|
||||||
if match:
|
if match:
|
||||||
buildid = match.group(1)
|
buildid = match.group(1)
|
||||||
self.buildIds[self.current_package_name] = buildid
|
self.buildIds[self.current_package_name] = buildid
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Upload to CDash failed: {0}".format(e))
|
print(f"Upload to CDash failed: {e}")
|
||||||
|
|
||||||
def finalize_report(self):
|
def finalize_report(self):
|
||||||
if self.buildIds:
|
if self.buildIds:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
|
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
|
|
||||||
|
|||||||
@@ -3,15 +3,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.deptypes as dt
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.platforms
|
|
||||||
import spack.relocate as relocate
|
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
|
|
||||||
@@ -42,63 +38,11 @@ def rewire_node(spec, explicit):
|
|||||||
|
|
||||||
spack.hooks.pre_install(spec)
|
spack.hooks.pre_install(spec)
|
||||||
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
|
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
|
||||||
buildinfo = bindist.read_buildinfo_file(spec.prefix)
|
bindist.relocate_package(spec)
|
||||||
|
|
||||||
# compute prefix-to-prefix for every node from the build spec to the spliced
|
# run post install hooks and add to db
|
||||||
# spec
|
|
||||||
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
|
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
|
||||||
for s in bindist.specs_to_relocate(spec):
|
|
||||||
analog = s
|
|
||||||
if id(s) not in build_spec_ids:
|
|
||||||
analogs = [
|
|
||||||
d
|
|
||||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
|
||||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
|
||||||
]
|
|
||||||
if analogs:
|
|
||||||
# Prefer same-name analogs and prefer higher versions
|
|
||||||
# This matches the preferences in Spec.splice, so we will find same node
|
|
||||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
|
||||||
|
|
||||||
prefix_to_prefix[analog.prefix] = s.prefix
|
|
||||||
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
|
||||||
|
|
||||||
text_to_relocate = [
|
|
||||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_textfiles"]
|
|
||||||
]
|
|
||||||
if text_to_relocate:
|
|
||||||
relocate.relocate_text(files=text_to_relocate, prefix_to_prefix=prefix_to_prefix)
|
|
||||||
links = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_links"]]
|
|
||||||
relocate.relocate_links(links, prefix_to_prefix)
|
|
||||||
bins_to_relocate = [
|
|
||||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_binaries"]
|
|
||||||
]
|
|
||||||
if bins_to_relocate:
|
|
||||||
if "macho" in platform.binary_formats:
|
|
||||||
relocate.relocate_macho_binaries(bins_to_relocate, prefix_to_prefix)
|
|
||||||
if "elf" in platform.binary_formats:
|
|
||||||
relocate.relocate_elf_binaries(bins_to_relocate, prefix_to_prefix)
|
|
||||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefix_to_prefix=prefix_to_prefix)
|
|
||||||
shutil.rmtree(tempdir)
|
|
||||||
install_manifest = os.path.join(
|
|
||||||
spec.prefix,
|
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
os.unlink(install_manifest)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
# Write the spliced spec into spec.json. Without this, Database.add would fail because it
|
|
||||||
# checks the spec.json in the prefix against the spec being added to look for mismatches
|
|
||||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
|
||||||
# add to database, not sure about explicit
|
|
||||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
|
||||||
|
|
||||||
# run post install hooks
|
|
||||||
spack.hooks.post_install(spec, explicit)
|
spack.hooks.post_install(spec, explicit)
|
||||||
|
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||||
|
|
||||||
|
|
||||||
class RewireError(spack.error.SpackError):
|
class RewireError(spack.error.SpackError):
|
||||||
|
|||||||
22
lib/spack/spack/schema/env_vars.py
Normal file
22
lib/spack/spack/schema/env_vars.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Schema for env_vars.yaml configuration file.
|
||||||
|
|
||||||
|
.. literalinclude:: _spack_root/lib/spack/spack/schema/env_vars.py
|
||||||
|
:lines: 15-
|
||||||
|
"""
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
import spack.schema.environment
|
||||||
|
|
||||||
|
properties: Dict[str, Any] = {"env_vars": spack.schema.environment.definition}
|
||||||
|
|
||||||
|
#: Full schema with metadata
|
||||||
|
schema = {
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Spack env_vars configuration file schema",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"properties": properties,
|
||||||
|
}
|
||||||
@@ -20,6 +20,7 @@
|
|||||||
import spack.schema.container
|
import spack.schema.container
|
||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
|
import spack.schema.env_vars
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -38,6 +39,7 @@
|
|||||||
spack.schema.ci.properties,
|
spack.schema.ci.properties,
|
||||||
spack.schema.definitions.properties,
|
spack.schema.definitions.properties,
|
||||||
spack.schema.develop.properties,
|
spack.schema.develop.properties,
|
||||||
|
spack.schema.env_vars.properties,
|
||||||
spack.schema.mirrors.properties,
|
spack.schema.mirrors.properties,
|
||||||
spack.schema.modules.properties,
|
spack.schema.modules.properties,
|
||||||
spack.schema.packages.properties,
|
spack.schema.packages.properties,
|
||||||
|
|||||||
@@ -237,23 +237,14 @@ def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
|
|||||||
class ArchSpec:
|
class ArchSpec:
|
||||||
"""Aggregate the target platform, the operating system and the target microarchitecture."""
|
"""Aggregate the target platform, the operating system and the target microarchitecture."""
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _return_arch(os_tag, target_tag):
|
|
||||||
platform = spack.platforms.host()
|
|
||||||
default_os = platform.operating_system(os_tag)
|
|
||||||
default_target = platform.target(target_tag)
|
|
||||||
arch_tuple = str(platform), str(default_os), str(default_target)
|
|
||||||
return ArchSpec(arch_tuple)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def default_arch():
|
def default_arch():
|
||||||
"""Return the default architecture"""
|
"""Return the default architecture"""
|
||||||
return ArchSpec._return_arch("default_os", "default_target")
|
platform = spack.platforms.host()
|
||||||
|
default_os = platform.default_operating_system()
|
||||||
@staticmethod
|
default_target = platform.default_target()
|
||||||
def frontend_arch():
|
arch_tuple = str(platform), str(default_os), str(default_target)
|
||||||
"""Return the frontend architecture"""
|
return ArchSpec(arch_tuple)
|
||||||
return ArchSpec._return_arch("frontend", "frontend")
|
|
||||||
|
|
||||||
__slots__ = "_platform", "_os", "_target"
|
__slots__ = "_platform", "_os", "_target"
|
||||||
|
|
||||||
@@ -461,6 +452,9 @@ def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
|||||||
return bool(self._target_intersection(other))
|
return bool(self._target_intersection(other))
|
||||||
|
|
||||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||||
|
if self.target is None and other.target is None:
|
||||||
|
return False
|
||||||
|
|
||||||
if not other._target_satisfies(self, strict=False):
|
if not other._target_satisfies(self, strict=False):
|
||||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||||
|
|
||||||
@@ -509,21 +503,56 @@ def _target_intersection(self, other):
|
|||||||
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
||||||
results.append(o_min)
|
results.append(o_min)
|
||||||
else:
|
else:
|
||||||
# Take intersection of two ranges
|
# Take the "min" of the two max, if there is a partial ordering.
|
||||||
# Lots of comparisons needed
|
n_max = ""
|
||||||
_s_min = _make_microarchitecture(s_min)
|
if s_max and o_max:
|
||||||
_s_max = _make_microarchitecture(s_max)
|
_s_max = _make_microarchitecture(s_max)
|
||||||
_o_min = _make_microarchitecture(o_min)
|
_o_max = _make_microarchitecture(o_max)
|
||||||
_o_max = _make_microarchitecture(o_max)
|
if _s_max.family != _o_max.family:
|
||||||
|
continue
|
||||||
|
if _s_max <= _o_max:
|
||||||
|
n_max = s_max
|
||||||
|
elif _o_max < _s_max:
|
||||||
|
n_max = o_max
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif s_max:
|
||||||
|
n_max = s_max
|
||||||
|
elif o_max:
|
||||||
|
n_max = o_max
|
||||||
|
|
||||||
|
# Take the "max" of the two min.
|
||||||
|
n_min = ""
|
||||||
|
if s_min and o_min:
|
||||||
|
_s_min = _make_microarchitecture(s_min)
|
||||||
|
_o_min = _make_microarchitecture(o_min)
|
||||||
|
if _s_min.family != _o_min.family:
|
||||||
|
continue
|
||||||
|
if _s_min >= _o_min:
|
||||||
|
n_min = s_min
|
||||||
|
elif _o_min > _s_min:
|
||||||
|
n_min = o_min
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif s_min:
|
||||||
|
n_min = s_min
|
||||||
|
elif o_min:
|
||||||
|
n_min = o_min
|
||||||
|
|
||||||
|
if n_min and n_max:
|
||||||
|
_n_min = _make_microarchitecture(n_min)
|
||||||
|
_n_max = _make_microarchitecture(n_max)
|
||||||
|
if _n_min.family != _n_max.family or not _n_min <= _n_max:
|
||||||
|
continue
|
||||||
|
if n_min == n_max:
|
||||||
|
results.append(n_min)
|
||||||
|
else:
|
||||||
|
results.append(f"{n_min}:{n_max}")
|
||||||
|
elif n_min:
|
||||||
|
results.append(f"{n_min}:")
|
||||||
|
elif n_max:
|
||||||
|
results.append(f":{n_max}")
|
||||||
|
|
||||||
n_min = s_min if _s_min >= _o_min else o_min
|
|
||||||
n_max = s_max if _s_max <= _o_max else o_max
|
|
||||||
_n_min = _make_microarchitecture(n_min)
|
|
||||||
_n_max = _make_microarchitecture(n_max)
|
|
||||||
if _n_min == _n_max:
|
|
||||||
results.append(n_min)
|
|
||||||
elif not n_min or not n_max or _n_min < _n_max:
|
|
||||||
results.append("%s:%s" % (n_min, n_max))
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def constrain(self, other: "ArchSpec") -> bool:
|
def constrain(self, other: "ArchSpec") -> bool:
|
||||||
@@ -1498,9 +1527,8 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
|
|||||||
self._external_path = external_path
|
self._external_path = external_path
|
||||||
self.external_modules = Spec._format_module_list(external_modules)
|
self.external_modules = Spec._format_module_list(external_modules)
|
||||||
|
|
||||||
# This attribute is used to store custom information for
|
# This attribute is used to store custom information for external specs.
|
||||||
# external specs. None signal that it was not set yet.
|
self.extra_attributes: dict = {}
|
||||||
self.extra_attributes = None
|
|
||||||
|
|
||||||
# This attribute holds the original build copy of the spec if it is
|
# This attribute holds the original build copy of the spec if it is
|
||||||
# deployed differently than it was built. None signals that the spec
|
# deployed differently than it was built. None signals that the spec
|
||||||
@@ -2322,15 +2350,10 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.external:
|
if self.external:
|
||||||
if self.extra_attributes:
|
|
||||||
extra_attributes = syaml.sorted_dict(self.extra_attributes)
|
|
||||||
else:
|
|
||||||
extra_attributes = None
|
|
||||||
|
|
||||||
d["external"] = {
|
d["external"] = {
|
||||||
"path": self.external_path,
|
"path": self.external_path,
|
||||||
"module": self.external_modules,
|
"module": self.external_modules or None,
|
||||||
"extra_attributes": extra_attributes,
|
"extra_attributes": syaml.sorted_dict(self.extra_attributes),
|
||||||
}
|
}
|
||||||
|
|
||||||
if not self._concrete:
|
if not self._concrete:
|
||||||
@@ -3151,18 +3174,13 @@ def constrain(self, other, deps=True):
|
|||||||
if not self.variants[v].compatible(other.variants[v]):
|
if not self.variants[v].compatible(other.variants[v]):
|
||||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||||
|
|
||||||
# TODO: Check out the logic here
|
|
||||||
sarch, oarch = self.architecture, other.architecture
|
sarch, oarch = self.architecture, other.architecture
|
||||||
if sarch is not None and oarch is not None:
|
if (
|
||||||
if sarch.platform is not None and oarch.platform is not None:
|
sarch is not None
|
||||||
if sarch.platform != oarch.platform:
|
and oarch is not None
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
and not self.architecture.intersects(other.architecture)
|
||||||
if sarch.os is not None and oarch.os is not None:
|
):
|
||||||
if sarch.os != oarch.os:
|
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
|
||||||
if sarch.target is not None and oarch.target is not None:
|
|
||||||
if sarch.target != oarch.target:
|
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
|
||||||
|
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
@@ -3185,18 +3203,12 @@ def constrain(self, other, deps=True):
|
|||||||
|
|
||||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||||
|
|
||||||
old = str(self.architecture)
|
|
||||||
sarch, oarch = self.architecture, other.architecture
|
sarch, oarch = self.architecture, other.architecture
|
||||||
if sarch is None or other.architecture is None:
|
if sarch is not None and oarch is not None:
|
||||||
self.architecture = sarch or oarch
|
changed |= self.architecture.constrain(other.architecture)
|
||||||
else:
|
elif oarch is not None:
|
||||||
if sarch.platform is None or oarch.platform is None:
|
self.architecture = oarch
|
||||||
self.architecture.platform = sarch.platform or oarch.platform
|
changed = True
|
||||||
if sarch.os is None or oarch.os is None:
|
|
||||||
sarch.os = sarch.os or oarch.os
|
|
||||||
if sarch.target is None or oarch.target is None:
|
|
||||||
sarch.target = sarch.target or oarch.target
|
|
||||||
changed |= str(self.architecture) != old
|
|
||||||
|
|
||||||
if deps:
|
if deps:
|
||||||
changed |= self._constrain_dependencies(other)
|
changed |= self._constrain_dependencies(other)
|
||||||
@@ -3843,6 +3855,13 @@ def _cmp_iter(self):
|
|||||||
for item in self._cmp_node():
|
for item in self._cmp_node():
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
|
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
|
||||||
|
# two specs can be identical modulo DAG hash, depending on what time they were concretized
|
||||||
|
# From the perspective of many operation in Spack (database, build cache, etc) a different
|
||||||
|
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
|
||||||
|
# serialized before the hash change and one after, are considered different.
|
||||||
|
yield self.dag_hash() if self.concrete else None
|
||||||
|
|
||||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||||
# are considered the same by `__hash__` or `__eq__`.
|
# are considered the same by `__hash__` or `__eq__`.
|
||||||
#
|
#
|
||||||
@@ -4708,7 +4727,10 @@ def __str__(self):
|
|||||||
bool_keys = []
|
bool_keys = []
|
||||||
kv_keys = []
|
kv_keys = []
|
||||||
for key in sorted_keys:
|
for key in sorted_keys:
|
||||||
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
|
if isinstance(self[key].value, bool):
|
||||||
|
bool_keys.append(key)
|
||||||
|
else:
|
||||||
|
kv_keys.append(key)
|
||||||
|
|
||||||
# add spaces before and after key/value variants.
|
# add spaces before and after key/value variants.
|
||||||
string = io.StringIO()
|
string = io.StringIO()
|
||||||
@@ -4887,7 +4909,7 @@ def from_node_dict(cls, node):
|
|||||||
spec.external_modules = node["external"]["module"]
|
spec.external_modules = node["external"]["module"]
|
||||||
if spec.external_modules is False:
|
if spec.external_modules is False:
|
||||||
spec.external_modules = None
|
spec.external_modules = None
|
||||||
spec.extra_attributes = node["external"].get("extra_attributes", {})
|
spec.extra_attributes = node["external"].get("extra_attributes") or {}
|
||||||
|
|
||||||
# specs read in are concrete unless marked abstract
|
# specs read in are concrete unless marked abstract
|
||||||
if node.get("concrete", True):
|
if node.get("concrete", True):
|
||||||
@@ -5164,12 +5186,10 @@ def get_host_environment_metadata() -> Dict[str, str]:
|
|||||||
|
|
||||||
|
|
||||||
def get_host_environment() -> Dict[str, Any]:
|
def get_host_environment() -> Dict[str, Any]:
|
||||||
"""Return a dictionary (lookup) with host information (not including the
|
"""Returns a dictionary with host information (not including the os.environ)."""
|
||||||
os.environ).
|
|
||||||
"""
|
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_target = host_platform.target("default_target")
|
host_target = host_platform.default_target()
|
||||||
host_os = host_platform.operating_system("default_os")
|
host_os = host_platform.default_operating_system()
|
||||||
arch_fmt = "platform={0} os={1} target={2}"
|
arch_fmt = "platform={0} os={1} target={2}"
|
||||||
arch_spec = Spec(arch_fmt.format(host_platform, host_os, host_target))
|
arch_spec = Spec(arch_fmt.format(host_platform, host_os, host_target))
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -60,8 +60,7 @@ def test_user_input_combination(config, target_str, os_str):
|
|||||||
"""Test for all the valid user input combinations that both the target and
|
"""Test for all the valid user input combinations that both the target and
|
||||||
the operating system match.
|
the operating system match.
|
||||||
"""
|
"""
|
||||||
spec_str = "libelf os={} target={}".format(os_str, target_str)
|
spec = Spec(f"libelf os={os_str} target={target_str}")
|
||||||
spec = Spec(spec_str)
|
|
||||||
assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str))
|
assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str))
|
||||||
assert spec.architecture.target == TEST_PLATFORM.target(target_str)
|
assert spec.architecture.target == TEST_PLATFORM.target(target_str)
|
||||||
|
|
||||||
@@ -71,8 +70,8 @@ def test_default_os_and_target(default_mock_concretization):
|
|||||||
after concretization.
|
after concretization.
|
||||||
"""
|
"""
|
||||||
spec = default_mock_concretization("libelf")
|
spec = default_mock_concretization("libelf")
|
||||||
assert spec.architecture.os == str(TEST_PLATFORM.operating_system("default_os"))
|
assert spec.architecture.os == str(TEST_PLATFORM.default_operating_system())
|
||||||
assert spec.architecture.target == TEST_PLATFORM.target("default_target")
|
assert spec.architecture.target == TEST_PLATFORM.default_target()
|
||||||
|
|
||||||
|
|
||||||
def test_operating_system_conversion_to_dict():
|
def test_operating_system_conversion_to_dict():
|
||||||
|
|||||||
@@ -36,6 +36,7 @@
|
|||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
@@ -94,7 +95,7 @@ def config_directory(tmp_path_factory):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
def default_config(tmp_path, config_directory, mock_repo_path, install_mockery):
|
||||||
# This fixture depends on install_mockery to ensure
|
# This fixture depends on install_mockery to ensure
|
||||||
# there is a clear order of initialization. The substitution of the
|
# there is a clear order of initialization. The substitution of the
|
||||||
# config scopes here is done on top of the substitution that comes with
|
# config scopes here is done on top of the substitution that comes with
|
||||||
@@ -109,7 +110,6 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
|||||||
]
|
]
|
||||||
|
|
||||||
with spack.config.use_configuration(*scopes):
|
with spack.config.use_configuration(*scopes):
|
||||||
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
|
||||||
njobs = spack.config.get("config:build_jobs")
|
njobs = spack.config.get("config:build_jobs")
|
||||||
if not njobs:
|
if not njobs:
|
||||||
spack.config.set("config:build_jobs", 4, scope="user")
|
spack.config.set("config:build_jobs", 4, scope="user")
|
||||||
@@ -130,8 +130,8 @@ def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
|
|||||||
timeout = spack.config.get("config:connect_timeout")
|
timeout = spack.config.get("config:connect_timeout")
|
||||||
if not timeout:
|
if not timeout:
|
||||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||||
|
with spack.repo.use_repositories(mock_repo_path):
|
||||||
yield spack.config.CONFIG
|
yield spack.config.CONFIG
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def builder_test_repository():
|
def builder_test_repository(config):
|
||||||
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
|
builder_test_path = os.path.join(spack.paths.repos_path, "builder.test")
|
||||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||||
yield mock_repo
|
yield mock_repo
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from urllib.error import HTTPError
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -15,6 +17,7 @@
|
|||||||
import spack.paths as spack_paths
|
import spack.paths as spack_paths
|
||||||
import spack.repo as repo
|
import spack.repo as repo
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
|
from spack.test.conftest import MockHTTPResponse
|
||||||
|
|
||||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||||
|
|
||||||
@@ -162,38 +165,8 @@ def test_import_signing_key(mock_gnupghome):
|
|||||||
ci.import_signing_key(signing_key)
|
ci.import_signing_key(signing_key)
|
||||||
|
|
||||||
|
|
||||||
class FakeWebResponder:
|
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
|
||||||
def __init__(self, response_code=200, content_to_read=[]):
|
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
|
||||||
self._resp_code = response_code
|
|
||||||
self._content = content_to_read
|
|
||||||
self._read = [False for c in content_to_read]
|
|
||||||
|
|
||||||
def open(self, request, data=None, timeout=object()):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def getcode(self):
|
|
||||||
return self._resp_code
|
|
||||||
|
|
||||||
def read(self, length=None):
|
|
||||||
if len(self._content) <= 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._read[-1]:
|
|
||||||
return_content = self._content[-1]
|
|
||||||
if length:
|
|
||||||
self._read[-1] = True
|
|
||||||
else:
|
|
||||||
self._read.pop()
|
|
||||||
self._content.pop()
|
|
||||||
return return_content
|
|
||||||
|
|
||||||
self._read.pop()
|
|
||||||
self._content.pop()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
|
||||||
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
|
|
||||||
|
|
||||||
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
|
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
|
||||||
working_dir = os.path.join(tmpdir.strpath, "repro")
|
working_dir = os.path.join(tmpdir.strpath, "repro")
|
||||||
@@ -201,10 +174,13 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
|||||||
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
|
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(test_artifacts_path, "rb") as fd:
|
def _urlopen_OK(*args, **kwargs):
|
||||||
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
|
with open(test_artifacts_path, "rb") as f:
|
||||||
|
return MockHTTPResponse(
|
||||||
|
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
|
||||||
|
)
|
||||||
|
|
||||||
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
|
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
|
||||||
|
|
||||||
ci.download_and_extract_artifacts(url, working_dir)
|
ci.download_and_extract_artifacts(url, working_dir)
|
||||||
|
|
||||||
@@ -214,7 +190,11 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
|||||||
found_install = fs.find(working_dir, "install.sh")
|
found_install = fs.find(working_dir, "install.sh")
|
||||||
assert len(found_install) == 1
|
assert len(found_install) == 1
|
||||||
|
|
||||||
fake_responder._resp_code = 400
|
def _urlopen_500(*args, **kwargs):
|
||||||
|
raise HTTPError(url, 500, "Internal Server Error", {}, None)
|
||||||
|
|
||||||
|
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
|
||||||
|
|
||||||
with pytest.raises(spack.error.SpackError):
|
with pytest.raises(spack.error.SpackError):
|
||||||
ci.download_and_extract_artifacts(url, working_dir)
|
ci.download_and_extract_artifacts(url, working_dir)
|
||||||
|
|
||||||
@@ -328,16 +308,14 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
|||||||
e1.add("hypre")
|
e1.add("hypre")
|
||||||
e1.concretize()
|
e1.concretize()
|
||||||
|
|
||||||
"""
|
# Concretizing the above environment results in the following graphs:
|
||||||
Concretizing the above environment results in the following graphs:
|
|
||||||
|
|
||||||
mpileaks -> mpich (provides mpi virtual dep of mpileaks)
|
# mpileaks -> mpich (provides mpi virtual dep of mpileaks)
|
||||||
-> callpath -> dyninst -> libelf
|
# -> callpath -> dyninst -> libelf
|
||||||
-> libdwarf -> libelf
|
# -> libdwarf -> libelf
|
||||||
-> mpich (provides mpi dep of callpath)
|
# -> mpich (provides mpi dep of callpath)
|
||||||
|
|
||||||
hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
|
# hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
|
||||||
"""
|
|
||||||
|
|
||||||
touched = ["libdwarf"]
|
touched = ["libdwarf"]
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import shutil
|
import shutil
|
||||||
from io import BytesIO
|
|
||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
@@ -32,6 +31,7 @@
|
|||||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||||
from spack.schema.database_index import schema as db_idx_schema
|
from spack.schema.database_index import schema as db_idx_schema
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
from spack.test.conftest import MockHTTPResponse
|
||||||
|
|
||||||
config_cmd = spack.main.SpackCommand("config")
|
config_cmd = spack.main.SpackCommand("config")
|
||||||
ci_cmd = spack.main.SpackCommand("ci")
|
ci_cmd = spack.main.SpackCommand("ci")
|
||||||
@@ -239,7 +239,7 @@ def test_ci_generate_with_cdash_token(ci_generate_test, tmp_path, mock_binary_in
|
|||||||
# That fake token should have resulted in being unable to
|
# That fake token should have resulted in being unable to
|
||||||
# register build group with cdash, but the workload should
|
# register build group with cdash, but the workload should
|
||||||
# still have been generated.
|
# still have been generated.
|
||||||
assert "Problem populating buildgroup" in output
|
assert "Failed to create or retrieve buildgroups" in output
|
||||||
expected_keys = ["rebuild-index", "stages", "variables", "workflow"]
|
expected_keys = ["rebuild-index", "stages", "variables", "workflow"]
|
||||||
assert all([key in yaml_contents.keys() for key in expected_keys])
|
assert all([key in yaml_contents.keys() for key in expected_keys])
|
||||||
|
|
||||||
@@ -329,14 +329,14 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
|
|||||||
f"""\
|
f"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
mirrors:
|
mirrors:
|
||||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- submapping:
|
- submapping:
|
||||||
- match:
|
- match:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
- donotcare
|
- donotcare
|
||||||
@@ -355,12 +355,12 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
|
|||||||
assert "stage" in ci_obj
|
assert "stage" in ci_obj
|
||||||
assert ci_obj["stage"] == "stage-0"
|
assert ci_obj["stage"] == "stage-0"
|
||||||
found.append("dependency-install")
|
found.append("dependency-install")
|
||||||
if "flatten-deps" in ci_key:
|
if "dependent-install" in ci_key:
|
||||||
assert "stage" in ci_obj
|
assert "stage" in ci_obj
|
||||||
assert ci_obj["stage"] == "stage-1"
|
assert ci_obj["stage"] == "stage-1"
|
||||||
found.append("flatten-deps")
|
found.append("dependent-install")
|
||||||
|
|
||||||
assert "flatten-deps" in found
|
assert "dependent-install" in found
|
||||||
assert "dependency-install" in found
|
assert "dependency-install" in found
|
||||||
|
|
||||||
|
|
||||||
@@ -372,14 +372,14 @@ def test_ci_generate_for_pr_pipeline(ci_generate_test, tmp_path, monkeypatch):
|
|||||||
f"""\
|
f"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
mirrors:
|
mirrors:
|
||||||
buildcache-destination: {tmp_path / 'ci-mirror'}
|
buildcache-destination: {tmp_path / 'ci-mirror'}
|
||||||
ci:
|
ci:
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- submapping:
|
- submapping:
|
||||||
- match:
|
- match:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
- donotcare
|
- donotcare
|
||||||
@@ -899,7 +899,7 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
f"""\
|
f"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
- pkg-a
|
- pkg-a
|
||||||
mirrors:
|
mirrors:
|
||||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||||
@@ -908,7 +908,7 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
- match_behavior: {match_behavior}
|
- match_behavior: {match_behavior}
|
||||||
submapping:
|
submapping:
|
||||||
- match:
|
- match:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
- specific-one
|
- specific-one
|
||||||
@@ -1006,8 +1006,8 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
assert the_elt["script"][0] == "main step"
|
assert the_elt["script"][0] == "main step"
|
||||||
assert len(the_elt["after_script"]) == 1
|
assert len(the_elt["after_script"]) == 1
|
||||||
assert the_elt["after_script"][0] == "post step one"
|
assert the_elt["after_script"][0] == "post step one"
|
||||||
if "flatten-deps" in ci_key:
|
if "dependent-install" in ci_key:
|
||||||
# The flatten-deps match specifies that we keep the two
|
# The dependent-install match specifies that we keep the two
|
||||||
# top level variables, but add a third specifc one. It
|
# top level variables, but add a third specifc one. It
|
||||||
# also adds a custom tag which should be combined with
|
# also adds a custom tag which should be combined with
|
||||||
# the top-level tag.
|
# the top-level tag.
|
||||||
@@ -1182,12 +1182,12 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
spec_a = spack.concretize.concretize_one("pkg-a")
|
spec_a = spack.concretize.concretize_one("pkg-a")
|
||||||
a_dag_hash = spec_a.dag_hash()
|
a_dag_hash = spec_a.dag_hash()
|
||||||
|
|
||||||
spec_flattendeps = spack.concretize.concretize_one("flatten-deps")
|
spec_flattendeps = spack.concretize.concretize_one("dependent-install")
|
||||||
flattendeps_dag_hash = spec_flattendeps.dag_hash()
|
flattendeps_dag_hash = spec_flattendeps.dag_hash()
|
||||||
|
|
||||||
broken_specs_url = tmp_path.as_uri()
|
broken_specs_url = tmp_path.as_uri()
|
||||||
|
|
||||||
# Mark 'a' as broken (but not 'flatten-deps')
|
# Mark 'a' as broken (but not 'dependent-install')
|
||||||
broken_spec_a_url = "{0}/{1}".format(broken_specs_url, a_dag_hash)
|
broken_spec_a_url = "{0}/{1}".format(broken_specs_url, a_dag_hash)
|
||||||
job_stack = "job_stack"
|
job_stack = "job_stack"
|
||||||
a_job_url = "a_job_url"
|
a_job_url = "a_job_url"
|
||||||
@@ -1201,7 +1201,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
f"""\
|
f"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
- pkg-a
|
- pkg-a
|
||||||
mirrors:
|
mirrors:
|
||||||
buildcache-destination: {(tmp_path / "ci-mirror").as_uri()}
|
buildcache-destination: {(tmp_path / "ci-mirror").as_uri()}
|
||||||
@@ -1211,7 +1211,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
- submapping:
|
- submapping:
|
||||||
- match:
|
- match:
|
||||||
- pkg-a
|
- pkg-a
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
- pkg-b
|
- pkg-b
|
||||||
- dependency-install
|
- dependency-install
|
||||||
build-job:
|
build-job:
|
||||||
@@ -1234,7 +1234,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
)
|
)
|
||||||
assert expected in output
|
assert expected in output
|
||||||
|
|
||||||
not_expected = f"flatten-deps/{flattendeps_dag_hash[:7]} (in stack"
|
not_expected = f"dependent-install/{flattendeps_dag_hash[:7]} (in stack"
|
||||||
assert not_expected not in output
|
assert not_expected not in output
|
||||||
|
|
||||||
|
|
||||||
@@ -1447,7 +1447,7 @@ def test_gitlab_config_scopes(ci_generate_test, tmp_path):
|
|||||||
include: [{configs_path}]
|
include: [{configs_path}]
|
||||||
view: false
|
view: false
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- dependent-install
|
||||||
mirrors:
|
mirrors:
|
||||||
buildcache-destination: {tmp_path / "ci-mirror"}
|
buildcache-destination: {tmp_path / "ci-mirror"}
|
||||||
ci:
|
ci:
|
||||||
@@ -1548,10 +1548,10 @@ def test_ci_dynamic_mapping_empty(
|
|||||||
ci_base_environment,
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
# The test will always return an empty dictionary
|
# The test will always return an empty dictionary
|
||||||
def fake_dyn_mapping_urlopener(*args, **kwargs):
|
def _urlopen(*args, **kwargs):
|
||||||
return BytesIO("{}".encode())
|
return MockHTTPResponse.with_json(200, "OK", headers={}, body={})
|
||||||
|
|
||||||
monkeypatch.setattr(ci.common, "_dyn_mapping_urlopener", fake_dyn_mapping_urlopener)
|
monkeypatch.setattr(ci.common, "_urlopen", _urlopen)
|
||||||
|
|
||||||
_ = dynamic_mapping_setup(tmpdir)
|
_ = dynamic_mapping_setup(tmpdir)
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
@@ -1572,15 +1572,15 @@ def test_ci_dynamic_mapping_full(
|
|||||||
monkeypatch,
|
monkeypatch,
|
||||||
ci_base_environment,
|
ci_base_environment,
|
||||||
):
|
):
|
||||||
# The test will always return an empty dictionary
|
def _urlopen(*args, **kwargs):
|
||||||
def fake_dyn_mapping_urlopener(*args, **kwargs):
|
return MockHTTPResponse.with_json(
|
||||||
return BytesIO(
|
200,
|
||||||
json.dumps(
|
"OK",
|
||||||
{"variables": {"MY_VAR": "hello"}, "ignored_field": 0, "unallowed_field": 0}
|
headers={},
|
||||||
).encode()
|
body={"variables": {"MY_VAR": "hello"}, "ignored_field": 0, "unallowed_field": 0},
|
||||||
)
|
)
|
||||||
|
|
||||||
monkeypatch.setattr(ci.common, "_dyn_mapping_urlopener", fake_dyn_mapping_urlopener)
|
monkeypatch.setattr(ci.common, "_urlopen", _urlopen)
|
||||||
|
|
||||||
label = dynamic_mapping_setup(tmpdir)
|
label = dynamic_mapping_setup(tmpdir)
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -52,8 +51,8 @@ def test_create_db_tarball(tmpdir, database):
|
|||||||
def test_report():
|
def test_report():
|
||||||
out = debug("report")
|
out = debug("report")
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system("frontend")
|
host_os = host_platform.default_operating_system()
|
||||||
host_target = host_platform.target("frontend")
|
host_target = host_platform.default_target()
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
|
|
||||||
assert spack.get_version() in out
|
assert spack.get_version() in out
|
||||||
|
|||||||
@@ -1038,6 +1038,58 @@ def test_init_from_yaml(environment_from_manifest):
|
|||||||
assert not e2.specs_by_hash
|
assert not e2.specs_by_hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_from_yaml_relative_includes(tmp_path):
|
||||||
|
files = [
|
||||||
|
"relative_copied/packages.yaml",
|
||||||
|
"./relative_copied/compilers.yaml",
|
||||||
|
"repos.yaml",
|
||||||
|
"./config.yaml",
|
||||||
|
]
|
||||||
|
|
||||||
|
manifest = f"""
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
include: {files}
|
||||||
|
"""
|
||||||
|
|
||||||
|
e1_path = tmp_path / "e1"
|
||||||
|
e1_manifest = e1_path / "spack.yaml"
|
||||||
|
fs.mkdirp(e1_path)
|
||||||
|
with open(e1_manifest, "w", encoding="utf-8") as f:
|
||||||
|
f.write(manifest)
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
fs.touchp(e1_path / f)
|
||||||
|
|
||||||
|
e2 = _env_create("test2", init_file=e1_manifest)
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
assert os.path.exists(os.path.join(e2.path, f))
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
||||||
|
files = ["../outside_env_not_copied/repos.yaml"]
|
||||||
|
|
||||||
|
manifest = f"""
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
include: {files}
|
||||||
|
"""
|
||||||
|
|
||||||
|
# subdir to ensure parent of environment dir is not shared
|
||||||
|
e1_path = tmp_path / "e1_subdir" / "e1"
|
||||||
|
e1_manifest = e1_path / "spack.yaml"
|
||||||
|
fs.mkdirp(e1_path)
|
||||||
|
with open(e1_manifest, "w", encoding="utf-8") as f:
|
||||||
|
f.write(manifest)
|
||||||
|
|
||||||
|
for f in files:
|
||||||
|
fs.touchp(e1_path / f)
|
||||||
|
|
||||||
|
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
|
||||||
|
_ = _env_create("test2", init_file=e1_manifest)
|
||||||
|
|
||||||
|
|
||||||
def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||||
fake_prefix = tmp_path / "a-prefix"
|
fake_prefix = tmp_path / "a-prefix"
|
||||||
fake_bin = fake_prefix / "bin"
|
fake_bin = fake_prefix / "bin"
|
||||||
@@ -2614,7 +2666,7 @@ def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
|
|||||||
- packages:
|
- packages:
|
||||||
- matrix:
|
- matrix:
|
||||||
- [mpileaks, callpath]
|
- [mpileaks, callpath]
|
||||||
- [target=be]
|
- [target=default_target]
|
||||||
specs:
|
specs:
|
||||||
- $packages
|
- $packages
|
||||||
"""
|
"""
|
||||||
@@ -2639,7 +2691,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
|||||||
- packages:
|
- packages:
|
||||||
- matrix:
|
- matrix:
|
||||||
- [mpileaks, callpath]
|
- [mpileaks, callpath]
|
||||||
- [target=be]
|
- [target=default_target]
|
||||||
specs:
|
specs:
|
||||||
- $packages
|
- $packages
|
||||||
"""
|
"""
|
||||||
@@ -2659,7 +2711,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
|||||||
|
|
||||||
assert before_user == after_user
|
assert before_user == after_user
|
||||||
|
|
||||||
mpileaks_spec = Spec("mpileaks target=be")
|
mpileaks_spec = Spec("mpileaks target=default_target")
|
||||||
assert mpileaks_spec in before_conc
|
assert mpileaks_spec in before_conc
|
||||||
assert mpileaks_spec not in after_conc
|
assert mpileaks_spec not in after_conc
|
||||||
|
|
||||||
@@ -3023,6 +3075,35 @@ def test_stack_view_activate_from_default(
|
|||||||
assert "FOOBAR=mpileaks" in shell
|
assert "FOOBAR=mpileaks" in shell
|
||||||
|
|
||||||
|
|
||||||
|
def test_envvar_set_in_activate(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
|
||||||
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
|
with open(filename, "w", encoding="utf-8") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- cmake%gcc
|
||||||
|
env_vars:
|
||||||
|
set:
|
||||||
|
ENVAR_SET_IN_ENV_LOAD: "True"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
env("create", "test", "./spack.yaml")
|
||||||
|
with ev.read("test"):
|
||||||
|
install()
|
||||||
|
|
||||||
|
test_env = ev.read("test")
|
||||||
|
output = env("activate", "--sh", "test")
|
||||||
|
|
||||||
|
assert "ENVAR_SET_IN_ENV_LOAD=True" in output
|
||||||
|
|
||||||
|
with test_env:
|
||||||
|
with spack.util.environment.set_env(ENVAR_SET_IN_ENV_LOAD="True"):
|
||||||
|
output = env("deactivate", "--sh")
|
||||||
|
assert "unset ENVAR_SET_IN_ENV_LOAD" in output
|
||||||
|
|
||||||
|
|
||||||
def test_stack_view_no_activate_without_default(
|
def test_stack_view_no_activate_without_default(
|
||||||
installed_environment, template_combinatorial_env, tmp_path
|
installed_environment, template_combinatorial_env, tmp_path
|
||||||
):
|
):
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ def test_gc_except_specific_environments(mutable_database, mutable_mock_env_path
|
|||||||
def test_gc_except_nonexisting_dir_env(mutable_database, mutable_mock_env_path, tmpdir):
|
def test_gc_except_nonexisting_dir_env(mutable_database, mutable_mock_env_path, tmpdir):
|
||||||
output = gc("-ye", tmpdir.strpath, fail_on_error=False)
|
output = gc("-ye", tmpdir.strpath, fail_on_error=False)
|
||||||
assert "No such environment" in output
|
assert "No such environment" in output
|
||||||
gc.returncode == 1
|
assert gc.returncode == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.db
|
@pytest.mark.db
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import sys
|
import sys
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|||||||
@@ -26,9 +26,9 @@ def test_manpath_trailing_colon(
|
|||||||
else ("--sh", "export %s=%s", ";")
|
else ("--sh", "export %s=%s", ";")
|
||||||
)
|
)
|
||||||
|
|
||||||
"""Test that the commands generated by load add the MANPATH prefix
|
# Test that the commands generated by load add the MANPATH prefix
|
||||||
inspections. Also test that Spack correctly preserves the default/existing
|
# inspections. Also test that Spack correctly preserves the default/existing
|
||||||
manpath search path via a trailing colon"""
|
# manpath search path via a trailing colon
|
||||||
install("mpileaks")
|
install("mpileaks")
|
||||||
|
|
||||||
sh_out = load(shell, "mpileaks")
|
sh_out = load(shell, "mpileaks")
|
||||||
@@ -81,7 +81,9 @@ def extract_value(output, variable):
|
|||||||
|
|
||||||
# Finally, do we list them in topo order?
|
# Finally, do we list them in topo order?
|
||||||
for i, pkg in enumerate(pkgs):
|
for i, pkg in enumerate(pkgs):
|
||||||
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
|
assert {s.name for s in mpileaks_spec[pkg].traverse(direction="parents")}.issubset(
|
||||||
|
pkgs[: i + 1]
|
||||||
|
)
|
||||||
|
|
||||||
# Lastly, do we keep track that mpileaks was loaded?
|
# Lastly, do we keep track that mpileaks was loaded?
|
||||||
assert (
|
assert (
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user