Compare commits
81 Commits
hs/feature
...
v1.0.0-alp
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0cdece73b8 | ||
![]() |
d8447a4c28 | ||
![]() |
e9f537fcde | ||
![]() |
fc683b87f1 | ||
![]() |
337ab120b3 | ||
![]() |
8f1be225dd | ||
![]() |
dc96b3cbc1 | ||
![]() |
c43673f63b | ||
![]() |
791c35ae5b | ||
![]() |
ca6f74d3bf | ||
![]() |
8b2809b4ac | ||
![]() |
179a05bc5b | ||
![]() |
5f808db261 | ||
![]() |
b38ab78765 | ||
![]() |
7841978488 | ||
![]() |
396d7ab2a9 | ||
![]() |
d0d77d253d | ||
![]() |
762d4fd7d5 | ||
![]() |
90d2034d8b | ||
![]() |
f1958fbd7e | ||
![]() |
254f5c920b | ||
![]() |
23bd090436 | ||
![]() |
19a4171c38 | ||
![]() |
d3176767a0 | ||
![]() |
41bb68a623 | ||
![]() |
eaf4483db7 | ||
![]() |
37704c5708 | ||
![]() |
de24715e05 | ||
![]() |
35dea05048 | ||
![]() |
a69cf5b9ad | ||
![]() |
1da005f6bd | ||
![]() |
787f7d7729 | ||
![]() |
bb90d02d90 | ||
![]() |
cf34db4676 | ||
![]() |
bddd41aa26 | ||
![]() |
db2d077e2c | ||
![]() |
8f96ac3d03 | ||
![]() |
2fd89f7e59 | ||
![]() |
c6fb85eed5 | ||
![]() |
33cf8d3f5e | ||
![]() |
c595a7008d | ||
![]() |
591b659242 | ||
![]() |
99d8f2ddbd | ||
![]() |
673703a17d | ||
![]() |
2017dfea4c | ||
![]() |
df464681bf | ||
![]() |
b6ce34607e | ||
![]() |
10479101fa | ||
![]() |
eb270afa0c | ||
![]() |
7edcc4da88 | ||
![]() |
a316f23d79 | ||
![]() |
92d96d747c | ||
![]() |
96b54ec59c | ||
![]() |
da06ad3303 | ||
![]() |
058e19458d | ||
![]() |
575483d041 | ||
![]() |
e6729320f7 | ||
![]() |
9ca0bd5d97 | ||
![]() |
cdc8236169 | ||
![]() |
f8ab75244b | ||
![]() |
f211789087 | ||
![]() |
09ccccbec5 | ||
![]() |
45e1573a17 | ||
![]() |
352442a383 | ||
![]() |
543f830f57 | ||
![]() |
94fdd3dc5b | ||
![]() |
403e2db50f | ||
![]() |
7832c56537 | ||
![]() |
bec58a1554 | ||
![]() |
67b04f1b8d | ||
![]() |
8c66a1699e | ||
![]() |
86a8ec5bbc | ||
![]() |
4f9be7b00a | ||
![]() |
dc7df70b81 | ||
![]() |
6f5e6f3a5b | ||
![]() |
414b323e01 | ||
![]() |
0705a5f468 | ||
![]() |
e432641166 | ||
![]() |
cf2b5a1e4f | ||
![]() |
510623fe47 | ||
![]() |
ae6c1a7abe |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
|
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -81,10 +81,6 @@ jobs:
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/import-check.yaml
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
|
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,4 +33,3 @@ jobs:
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
49
.github/workflows/import-check.yaml
vendored
49
.github/workflows/import-check.yaml
vendored
@@ -1,49 +0,0 @@
|
||||
name: import-check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
7
.github/workflows/unit_tests.yaml
vendored
7
.github/workflows/unit_tests.yaml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
@@ -185,7 +185,6 @@ jobs:
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
@@ -223,7 +222,7 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python')
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
@@ -254,7 +253,7 @@ jobs:
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-windows
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
|
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -86,6 +86,66 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Problematic imports before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
|
@@ -25,6 +25,7 @@ exit 1
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 6)
|
||||
|
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
|
@@ -15,12 +15,11 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
|
@@ -15,19 +15,18 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran: [gcc, llvm]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -36,7 +35,7 @@ packages:
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
@@ -73,27 +72,15 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
cray-pmi:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
mpt:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -15,8 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
|
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
||||
"environment variables" section lists environment variables that the
|
||||
build system uses to pass flags to the compiler and linker.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Addings flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For most of the flags you encounter, you will want a variant to
|
||||
optionally enable/disable them. You can then optionally pass these
|
||||
@@ -285,7 +285,7 @@ function like so:
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
else:
|
||||
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("mpi"))
|
||||
return args
|
||||
return [self.enable_or_disable("mpi")]
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
)
|
||||
...
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
return args
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("bootstrap", "build", "install")
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Environments created from a manifest will copy any included configs
|
||||
from relative paths inside the environment. Relative paths from
|
||||
outside the environment will cause errors, and absolute paths will be
|
||||
kept absolute. For example, if ``spack.yaml`` includes:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include: [./config.yaml]
|
||||
|
||||
then the created environment will have its own copy of the file
|
||||
``config.yaml`` copied from the location in the original environment.
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
@@ -448,9 +435,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
is to check if ``mtime`` is newer than the installation.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
|
||||
Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
@@ -466,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
|
@@ -456,13 +456,14 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3: 'python{^python.version.up_to_2}'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
|
@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
|
1
lib/spack/env/aocc/clang
vendored
1
lib/spack/env/aocc/clang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/aocc/clang++
vendored
1
lib/spack/env/aocc/clang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/aocc/flang
vendored
1
lib/spack/env/aocc/flang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/arm/armclang
vendored
1
lib/spack/env/arm/armclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armclang++
vendored
1
lib/spack/env/arm/armclang++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armflang
vendored
1
lib/spack/env/arm/armflang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/c++
vendored
1
lib/spack/env/c++
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c89
vendored
1
lib/spack/env/c89
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c99
vendored
1
lib/spack/env/c99
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/case-insensitive/CC
vendored
1
lib/spack/env/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/case-insensitive/CC
vendored
1
lib/spack/env/cce/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/cc
vendored
1
lib/spack/env/cce/cc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/craycc
vendored
1
lib/spack/env/cce/craycc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/crayftn
vendored
1
lib/spack/env/cce/crayftn
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/ftn
vendored
1
lib/spack/env/cce/ftn
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang
vendored
1
lib/spack/env/clang/clang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang++
vendored
1
lib/spack/env/clang/clang++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/flang
vendored
1
lib/spack/env/clang/flang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/gfortran
vendored
1
lib/spack/env/clang/gfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cpp
vendored
1
lib/spack/env/cpp
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f77
vendored
1
lib/spack/env/f77
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f90
vendored
1
lib/spack/env/f90
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f95
vendored
1
lib/spack/env/f95
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fc
vendored
1
lib/spack/env/fc
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fj/case-insensitive/FCC
vendored
1
lib/spack/env/fj/case-insensitive/FCC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/fj/fcc
vendored
1
lib/spack/env/fj/fcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/fj/frt
vendored
1
lib/spack/env/fj/frt
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ftn
vendored
1
lib/spack/env/ftn
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/gcc/g++
vendored
1
lib/spack/env/gcc/g++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gcc
vendored
1
lib/spack/env/gcc/gcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gfortran
vendored
1
lib/spack/env/gcc/gfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icc
vendored
1
lib/spack/env/intel/icc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icpc
vendored
1
lib/spack/env/intel/icpc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/ifort
vendored
1
lib/spack/env/intel/ifort
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ld
vendored
1
lib/spack/env/ld
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.gold
vendored
1
lib/spack/env/ld.gold
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
1
lib/spack/env/ld.lld
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/nag/nagfor
vendored
1
lib/spack/env/nag/nagfor
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc
vendored
1
lib/spack/env/nvhpc/nvc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc++
vendored
1
lib/spack/env/nvhpc/nvc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvfortran
vendored
1
lib/spack/env/nvhpc/nvfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/dpcpp
vendored
1
lib/spack/env/oneapi/dpcpp
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icpx
vendored
1
lib/spack/env/oneapi/icpx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icx
vendored
1
lib/spack/env/oneapi/icx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/ifx
vendored
1
lib/spack/env/oneapi/ifx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgc++
vendored
1
lib/spack/env/pgi/pgc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgcc
vendored
1
lib/spack/env/pgi/pgcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgfortran
vendored
1
lib/spack/env/pgi/pgfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/xl/xlc
vendored
1
lib/spack/env/xl/xlc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlc++
vendored
1
lib/spack/env/xl/xlc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf
vendored
1
lib/spack/env/xl/xlf
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf90
vendored
1
lib/spack/env/xl/xlf90
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc++_r
vendored
1
lib/spack/env/xl_r/xlc++_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc_r
vendored
1
lib/spack/env/xl_r/xlc_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf90_r
vendored
1
lib/spack/env/xl_r/xlf90_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf_r
vendored
1
lib/spack/env/xl_r/xlf_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
@@ -75,6 +75,7 @@
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"last_modification_time_recursive",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"partition_path",
|
||||
@@ -668,7 +669,7 @@ def copy(src, dest, _permissions=False):
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -679,7 +680,7 @@ def copy(src, dest, _permissions=False):
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||
@@ -710,7 +711,7 @@ def install(src, dest):
|
||||
dest (str): the destination file or directory
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -748,7 +749,7 @@ def copy_tree(
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if _permissions:
|
||||
@@ -762,7 +763,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
@@ -843,7 +844,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
OSError: if *src* does not match any files or directories
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
@@ -1469,36 +1470,15 @@ def set_executable(path):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||
# use bfs order to increase likelihood of early return
|
||||
queue: Deque[str] = collections.deque([path])
|
||||
|
||||
if os.stat(path).st_mtime > time:
|
||||
return True
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
|
||||
try:
|
||||
entries = os.scandir(current)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
st = entry.stat(follow_symlinks=False)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if st.st_mtime > time:
|
||||
return True
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
queue.append(entry.path)
|
||||
|
||||
return False
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
times.extend(
|
||||
os.lstat(os.path.join(root, name)).st_mtime
|
||||
for root, dirs, files in os.walk(path)
|
||||
for name in dirs + files
|
||||
)
|
||||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1760,7 +1740,8 @@ def find(
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
tty.debug(f"find must skip {path}: {e}")
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
|
@@ -72,7 +72,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@@ -996,11 +996,8 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
|
@@ -308,7 +308,7 @@ class LinkTree:
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise OSError("No such file or directory: '%s'", source_root)
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
|
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return True
|
||||
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
# EAGAIN and EACCES == locked by another process (so try again)
|
||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||
raise
|
||||
|
@@ -918,7 +918,7 @@ def _writer_daemon(
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
# to let the caller know the read failed b/c it
|
||||
# was in the bg. Ignore that too.
|
||||
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
||||
while True:
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except OSError as e:
|
||||
except IOError as e:
|
||||
if e.errno == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "1.0.0.dev0"
|
||||
__version__ = "1.0.0-alpha.3"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -1356,8 +1356,14 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1373,7 +1379,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1388,10 +1394,6 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -24,7 +23,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -92,9 +91,6 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -506,7 +502,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -629,7 +625,14 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return spec.format_path(
|
||||
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||
)
|
||||
|
||||
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -637,9 +640,17 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
spec_formatted = (
|
||||
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||
f"-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
else:
|
||||
spec_formatted = (
|
||||
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
@@ -673,24 +684,19 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list: List[str],
|
||||
read_method: Callable,
|
||||
cache_prefix: str,
|
||||
db: BuildCacheDatabase,
|
||||
temp_dir: str,
|
||||
concurrency: int,
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list: List of urls or file paths pointing at spec files to read
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir: Location to write index.json and hash for pushing
|
||||
concurrency: Number of parallel processes to use when fetching
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
@@ -708,7 +714,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -718,14 +724,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -733,7 +739,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -802,7 +808,7 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (web_util.SpackWebError, OSError) as e:
|
||||
except web_util.SpackWebError as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
@@ -870,12 +876,9 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db._write()
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(
|
||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||
)
|
||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||
|
||||
@@ -1789,7 +1792,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2010,7 +2013,7 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
@@ -2137,9 +2140,10 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||
when relocating."""
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
when relocating files in parallel and in-place. This
|
||||
means we must preserve inodes when relocating."""
|
||||
|
||||
# New archives don't need this.
|
||||
if buildinfo.get("hardlinks_deduped", False):
|
||||
@@ -2168,47 +2172,69 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
spec_prefix = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(spec_prefix)
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
old_sbang_install_path = None
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
|
||||
# Warn about old style tarballs created with the now removed --rel flag.
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||
f"Tarball for {spec} uses relative rpaths, " "which can cause library loading issues."
|
||||
)
|
||||
|
||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||
# prefixes were not unique.
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
else:
|
||||
raise NewLayoutException(
|
||||
"Package tarball was created from an install prefix with a different directory layout "
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
hash_to_old_prefix = dict()
|
||||
|
||||
prefix_to_prefix: Dict[str, str] = {}
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = collections.OrderedDict()
|
||||
prefix_to_prefix_bin = collections.OrderedDict()
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||
# the new spack store root.
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||
# that s replaced when we spliced.
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
@@ -2228,66 +2254,72 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
|
||||
# Delete identity mappings from prefix_to_prefix
|
||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
# If there's nothing to relocate, we're done.
|
||||
if not prefix_to_prefix:
|
||||
return
|
||||
|
||||
for old, new in prefix_to_prefix.items():
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
|
||||
# Text files containing the prefix text
|
||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
text_names = [os.path.join(workdir, f) for f in buildinfo["relocate_textfiles"]]
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(install_manifest):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# overwrite old metadata with new
|
||||
if spec.spliced:
|
||||
# rewrite spec on disk
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# de-cache the install manifest
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(install_manifest)
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
@@ -2455,6 +2487,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
@@ -2559,6 +2600,10 @@ def install_root_node(
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
@@ -2596,14 +2641,11 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = True
|
||||
except (web_util.SpackWebError, OSError) as e1:
|
||||
except web_util.SpackWebError as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = False
|
||||
except (web_util.SpackWebError, OSError) as e2:
|
||||
except web_util.SpackWebError as e2:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
@@ -2613,6 +2655,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
@@ -2706,9 +2749,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(json_file)
|
||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||
# TODO: avoid repeated request
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
@@ -2955,14 +2997,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
remote_hash = response.read(64)
|
||||
except OSError:
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
@@ -2976,17 +3018,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -3020,7 +3062,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
@@ -3030,12 +3072,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except OSError as e: # URLError, socket.timeout, etc.
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
except ValueError as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
@@ -3067,11 +3109,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except OSError as e:
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
@@ -3086,16 +3128,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (OSError, ValueError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
|
@@ -5,14 +5,12 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from typing import Dict, Optional, Sequence, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,17 +18,13 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class QueryInfo(TypedDict, total=False):
|
||||
spec: spack.spec.Spec
|
||||
command: spack.util.executable.Executable
|
||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
@@ -217,9 +211,7 @@ def _executables_in_store(
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(
|
||||
*executables, path=bin_dir, required=True
|
||||
)
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
@@ -234,12 +226,13 @@ def _root_spec(spec_str: str) -> str:
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
# FIXME (compiler as nodes): recover the compiler for source bootstrapping
|
||||
# if platform == "darwin":
|
||||
# spec_str += " %apple-clang"
|
||||
if platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
# elif platform == "linux":
|
||||
# spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
@@ -15,11 +15,13 @@
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
import spack.version
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
@@ -27,9 +29,9 @@
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.default_operating_system()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
@@ -38,7 +40,7 @@ def __init__(self, configuration):
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
def _valid_compiler_or_raise(self):
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
@@ -46,17 +48,30 @@ def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
compiler_name = "llvm"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
|
||||
candidates = [
|
||||
x
|
||||
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||
if x.name == compiler_name
|
||||
]
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||
best = candidates[0]
|
||||
# Get compilers for bootstrapping from the 'builtin' repository
|
||||
best.namespace = "builtin"
|
||||
# If the compiler does not support C++ 14, fail with a legible error message
|
||||
try:
|
||||
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(
|
||||
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||
) from e
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
@@ -75,9 +90,6 @@ def _externals_from_yaml(
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
@@ -110,11 +122,14 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
node.versions = self.host_compiler.versions
|
||||
|
||||
# Can't use re2c@3.1 with Python 3.6
|
||||
if self.host_python.satisfies("@3.6"):
|
||||
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
@@ -126,6 +141,9 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if edge.spec.name == self.host_compiler.name:
|
||||
edge.spec = self.host_compiler
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
@@ -141,12 +159,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||
result = detector.default_libc()
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
|
@@ -4,13 +4,13 @@
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
@@ -141,9 +141,9 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.default_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.config.compilers_for_arch(arch):
|
||||
spack.compilers.config.find_compilers()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
@@ -33,10 +34,8 @@
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -45,17 +44,10 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import (
|
||||
QueryInfo,
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
@@ -97,12 +89,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Check for relative paths, and turn them into absolute paths
|
||||
# root is the metadata_dir
|
||||
maybe_url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -146,7 +134,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
@@ -223,14 +211,14 @@ def _install_and_test(
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
@@ -243,7 +231,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -261,11 +249,11 @@ class SourceBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -282,17 +270,22 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
abstract_spec = spack.spec.Spec(
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
PackageInstaller(
|
||||
[concrete_spec.package],
|
||||
fail_fast=True,
|
||||
package_use_cache=False,
|
||||
dependencies_use_cache=False,
|
||||
).install()
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -300,7 +293,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -311,7 +304,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
@@ -328,9 +321,10 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||
"""Returns true if the source is not enabled for bootstrapping"""
|
||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||
def source_is_enabled(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
return trusted.get(name, False)
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -362,21 +356,20 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += exception_handler.grouped_message(with_tracebacks=tty.is_debug())
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -424,7 +417,6 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
assert cmd is not None, "expected an Executable"
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
@@ -432,17 +424,18 @@ def ensure_executables_in_path_or_raise(
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -63,6 +63,7 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user