Compare commits
81 Commits
hs/fix/tar
...
prerelease
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0cdece73b8 | ||
![]() |
d8447a4c28 | ||
![]() |
e9f537fcde | ||
![]() |
fc683b87f1 | ||
![]() |
337ab120b3 | ||
![]() |
8f1be225dd | ||
![]() |
dc96b3cbc1 | ||
![]() |
c43673f63b | ||
![]() |
791c35ae5b | ||
![]() |
ca6f74d3bf | ||
![]() |
8b2809b4ac | ||
![]() |
179a05bc5b | ||
![]() |
5f808db261 | ||
![]() |
b38ab78765 | ||
![]() |
7841978488 | ||
![]() |
396d7ab2a9 | ||
![]() |
d0d77d253d | ||
![]() |
762d4fd7d5 | ||
![]() |
90d2034d8b | ||
![]() |
f1958fbd7e | ||
![]() |
254f5c920b | ||
![]() |
23bd090436 | ||
![]() |
19a4171c38 | ||
![]() |
d3176767a0 | ||
![]() |
41bb68a623 | ||
![]() |
eaf4483db7 | ||
![]() |
37704c5708 | ||
![]() |
de24715e05 | ||
![]() |
35dea05048 | ||
![]() |
a69cf5b9ad | ||
![]() |
1da005f6bd | ||
![]() |
787f7d7729 | ||
![]() |
bb90d02d90 | ||
![]() |
cf34db4676 | ||
![]() |
bddd41aa26 | ||
![]() |
db2d077e2c | ||
![]() |
8f96ac3d03 | ||
![]() |
2fd89f7e59 | ||
![]() |
c6fb85eed5 | ||
![]() |
33cf8d3f5e | ||
![]() |
c595a7008d | ||
![]() |
591b659242 | ||
![]() |
99d8f2ddbd | ||
![]() |
673703a17d | ||
![]() |
2017dfea4c | ||
![]() |
df464681bf | ||
![]() |
b6ce34607e | ||
![]() |
10479101fa | ||
![]() |
eb270afa0c | ||
![]() |
7edcc4da88 | ||
![]() |
a316f23d79 | ||
![]() |
92d96d747c | ||
![]() |
96b54ec59c | ||
![]() |
da06ad3303 | ||
![]() |
058e19458d | ||
![]() |
575483d041 | ||
![]() |
e6729320f7 | ||
![]() |
9ca0bd5d97 | ||
![]() |
cdc8236169 | ||
![]() |
f8ab75244b | ||
![]() |
f211789087 | ||
![]() |
09ccccbec5 | ||
![]() |
45e1573a17 | ||
![]() |
352442a383 | ||
![]() |
543f830f57 | ||
![]() |
94fdd3dc5b | ||
![]() |
403e2db50f | ||
![]() |
7832c56537 | ||
![]() |
bec58a1554 | ||
![]() |
67b04f1b8d | ||
![]() |
8c66a1699e | ||
![]() |
86a8ec5bbc | ||
![]() |
4f9be7b00a | ||
![]() |
dc7df70b81 | ||
![]() |
6f5e6f3a5b | ||
![]() |
414b323e01 | ||
![]() |
0705a5f468 | ||
![]() |
e432641166 | ||
![]() |
cf2b5a1e4f | ||
![]() |
510623fe47 | ||
![]() |
ae6c1a7abe |
7
.github/workflows/unit_tests.yaml
vendored
7
.github/workflows/unit_tests.yaml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov clingo
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
@@ -185,7 +185,6 @@ jobs:
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
@@ -223,7 +222,7 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python')
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
@@ -254,7 +253,7 @@ jobs:
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-windows
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
|
@@ -25,6 +25,7 @@ exit 1
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 6)
|
||||
|
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
|
@@ -15,12 +15,11 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
|
@@ -15,19 +15,18 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran: [gcc, llvm]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -36,7 +35,7 @@ packages:
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
@@ -73,27 +72,15 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
cray-pmi:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
mpt:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -15,8 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
phases = ["configure", "build", "install"]
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ("bootstrap", "build", "install")
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
@@ -456,13 +456,14 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3: 'python{^python.version.up_to_2}'
|
||||
^python@3: 'python{^python.version}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
|
@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
||||
- spack --version
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
|
1
lib/spack/env/aocc/clang
vendored
1
lib/spack/env/aocc/clang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/aocc/clang++
vendored
1
lib/spack/env/aocc/clang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/aocc/flang
vendored
1
lib/spack/env/aocc/flang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/arm/armclang
vendored
1
lib/spack/env/arm/armclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armclang++
vendored
1
lib/spack/env/arm/armclang++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/arm/armflang
vendored
1
lib/spack/env/arm/armflang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/c++
vendored
1
lib/spack/env/c++
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c89
vendored
1
lib/spack/env/c89
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/c99
vendored
1
lib/spack/env/c99
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/case-insensitive/CC
vendored
1
lib/spack/env/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/case-insensitive/CC
vendored
1
lib/spack/env/cce/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/cce/cc
vendored
1
lib/spack/env/cce/cc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/craycc
vendored
1
lib/spack/env/cce/craycc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/crayftn
vendored
1
lib/spack/env/cce/crayftn
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cce/ftn
vendored
1
lib/spack/env/cce/ftn
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang
vendored
1
lib/spack/env/clang/clang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/clang++
vendored
1
lib/spack/env/clang/clang++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/flang
vendored
1
lib/spack/env/clang/flang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/clang/gfortran
vendored
1
lib/spack/env/clang/gfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/cpp
vendored
1
lib/spack/env/cpp
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f77
vendored
1
lib/spack/env/f77
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f90
vendored
1
lib/spack/env/f90
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/f95
vendored
1
lib/spack/env/f95
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fc
vendored
1
lib/spack/env/fc
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/fj/case-insensitive/FCC
vendored
1
lib/spack/env/fj/case-insensitive/FCC
vendored
@@ -1 +0,0 @@
|
||||
../../cc
|
1
lib/spack/env/fj/fcc
vendored
1
lib/spack/env/fj/fcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/fj/frt
vendored
1
lib/spack/env/fj/frt
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ftn
vendored
1
lib/spack/env/ftn
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/gcc/g++
vendored
1
lib/spack/env/gcc/g++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gcc
vendored
1
lib/spack/env/gcc/gcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/gcc/gfortran
vendored
1
lib/spack/env/gcc/gfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icc
vendored
1
lib/spack/env/intel/icc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/icpc
vendored
1
lib/spack/env/intel/icpc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/intel/ifort
vendored
1
lib/spack/env/intel/ifort
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/ld
vendored
1
lib/spack/env/ld
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.gold
vendored
1
lib/spack/env/ld.gold
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/ld.lld
vendored
1
lib/spack/env/ld.lld
vendored
@@ -1 +0,0 @@
|
||||
cc
|
1
lib/spack/env/nag/nagfor
vendored
1
lib/spack/env/nag/nagfor
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc
vendored
1
lib/spack/env/nvhpc/nvc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvc++
vendored
1
lib/spack/env/nvhpc/nvc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/nvhpc/nvfortran
vendored
1
lib/spack/env/nvhpc/nvfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/dpcpp
vendored
1
lib/spack/env/oneapi/dpcpp
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icpx
vendored
1
lib/spack/env/oneapi/icpx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/icx
vendored
1
lib/spack/env/oneapi/icx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/oneapi/ifx
vendored
1
lib/spack/env/oneapi/ifx
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgc++
vendored
1
lib/spack/env/pgi/pgc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgcc
vendored
1
lib/spack/env/pgi/pgcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/pgi/pgfortran
vendored
1
lib/spack/env/pgi/pgfortran
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@@ -1 +0,0 @@
|
||||
../cpp
|
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@@ -1 +0,0 @@
|
||||
../fc
|
1
lib/spack/env/xl/xlc
vendored
1
lib/spack/env/xl/xlc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlc++
vendored
1
lib/spack/env/xl/xlc++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf
vendored
1
lib/spack/env/xl/xlf
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl/xlf90
vendored
1
lib/spack/env/xl/xlf90
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc++_r
vendored
1
lib/spack/env/xl_r/xlc++_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlc_r
vendored
1
lib/spack/env/xl_r/xlc_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf90_r
vendored
1
lib/spack/env/xl_r/xlf90_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
1
lib/spack/env/xl_r/xlf_r
vendored
1
lib/spack/env/xl_r/xlf_r
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
@@ -75,6 +75,7 @@
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"last_modification_time_recursive",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"partition_path",
|
||||
@@ -1469,36 +1470,15 @@ def set_executable(path):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||
# use bfs order to increase likelihood of early return
|
||||
queue: Deque[str] = collections.deque([path])
|
||||
|
||||
if os.stat(path).st_mtime > time:
|
||||
return True
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
|
||||
try:
|
||||
entries = os.scandir(current)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
st = entry.stat(follow_symlinks=False)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if st.st_mtime > time:
|
||||
return True
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
queue.append(entry.path)
|
||||
|
||||
return False
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
times.extend(
|
||||
os.lstat(os.path.join(root, name)).st_mtime
|
||||
for root, dirs, files in os.walk(path)
|
||||
for name in dirs + files
|
||||
)
|
||||
return max(times)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1760,7 +1740,8 @@ def find(
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
tty.debug(f"find must skip {path}: {e}")
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
|
@@ -72,7 +72,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@@ -996,11 +996,8 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.24.0.dev0"
|
||||
__version__ = "1.0.0-alpha.3"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -1356,8 +1356,14 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1373,7 +1379,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1388,10 +1394,6 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -24,7 +23,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -92,9 +91,6 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -506,7 +502,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -629,7 +625,14 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return spec.format_path(
|
||||
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||
)
|
||||
|
||||
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -637,9 +640,17 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
spec_formatted = (
|
||||
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||
f"-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
else:
|
||||
spec_formatted = (
|
||||
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
@@ -673,24 +684,19 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list: List[str],
|
||||
read_method: Callable,
|
||||
cache_prefix: str,
|
||||
db: BuildCacheDatabase,
|
||||
temp_dir: str,
|
||||
concurrency: int,
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list: List of urls or file paths pointing at spec files to read
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir: Location to write index.json and hash for pushing
|
||||
concurrency: Number of parallel processes to use when fetching
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
@@ -708,7 +714,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -718,14 +724,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -733,7 +739,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -802,7 +808,7 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (web_util.SpackWebError, OSError) as e:
|
||||
except web_util.SpackWebError as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
@@ -870,12 +876,9 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db._write()
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(
|
||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||
)
|
||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||
|
||||
@@ -1789,7 +1792,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2010,7 +2013,7 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
@@ -2137,9 +2140,10 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||
when relocating."""
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
when relocating files in parallel and in-place. This
|
||||
means we must preserve inodes when relocating."""
|
||||
|
||||
# New archives don't need this.
|
||||
if buildinfo.get("hardlinks_deduped", False):
|
||||
@@ -2168,47 +2172,69 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
spec_prefix = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(spec_prefix)
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
old_sbang_install_path = None
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
|
||||
# Warn about old style tarballs created with the now removed --rel flag.
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||
f"Tarball for {spec} uses relative rpaths, " "which can cause library loading issues."
|
||||
)
|
||||
|
||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||
# prefixes were not unique.
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
else:
|
||||
raise NewLayoutException(
|
||||
"Package tarball was created from an install prefix with a different directory layout "
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
hash_to_old_prefix = dict()
|
||||
|
||||
prefix_to_prefix: Dict[str, str] = {}
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = collections.OrderedDict()
|
||||
prefix_to_prefix_bin = collections.OrderedDict()
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||
# the new spack store root.
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||
# that s replaced when we spliced.
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
@@ -2228,66 +2254,72 @@ def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
|
||||
# Delete identity mappings from prefix_to_prefix
|
||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
|
||||
# If there's nothing to relocate, we're done.
|
||||
if not prefix_to_prefix:
|
||||
return
|
||||
|
||||
for old, new in prefix_to_prefix.items():
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
|
||||
# Text files containing the prefix text
|
||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
text_names = [os.path.join(workdir, f) for f in buildinfo["relocate_textfiles"]]
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(install_manifest):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# overwrite old metadata with new
|
||||
if spec.spliced:
|
||||
# rewrite spec on disk
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# de-cache the install manifest
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(install_manifest)
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
@@ -2455,6 +2487,15 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
@@ -2559,6 +2600,10 @@ def install_root_node(
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
@@ -2596,14 +2641,11 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = True
|
||||
except (web_util.SpackWebError, OSError) as e1:
|
||||
except web_util.SpackWebError as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = False
|
||||
except (web_util.SpackWebError, OSError) as e2:
|
||||
except web_util.SpackWebError as e2:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
@@ -2613,6 +2655,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
@@ -2706,9 +2749,8 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(json_file)
|
||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||
# TODO: avoid repeated request
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
@@ -2955,14 +2997,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
remote_hash = response.read(64)
|
||||
except OSError:
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
@@ -2976,17 +3018,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -3020,7 +3062,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
@@ -3030,12 +3072,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except OSError as e: # URLError, socket.timeout, etc.
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (ValueError, OSError) as e:
|
||||
except ValueError as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
@@ -3067,11 +3109,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except OSError as e:
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.load(response)
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
@@ -3086,16 +3128,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (OSError, ValueError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
|
@@ -5,14 +5,12 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from typing import Dict, Optional, Sequence, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,17 +18,13 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class QueryInfo(TypedDict, total=False):
|
||||
spec: spack.spec.Spec
|
||||
command: spack.util.executable.Executable
|
||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
@@ -217,9 +211,7 @@ def _executables_in_store(
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(
|
||||
*executables, path=bin_dir, required=True
|
||||
)
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
@@ -234,12 +226,13 @@ def _root_spec(spec_str: str) -> str:
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
# FIXME (compiler as nodes): recover the compiler for source bootstrapping
|
||||
# if platform == "darwin":
|
||||
# spec_str += " %apple-clang"
|
||||
if platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
# elif platform == "linux":
|
||||
# spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
@@ -15,11 +15,13 @@
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
import spack.version
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
@@ -38,7 +40,7 @@ def __init__(self, configuration):
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
def _valid_compiler_or_raise(self):
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
@@ -46,17 +48,30 @@ def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
compiler_name = "llvm"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
|
||||
candidates = [
|
||||
x
|
||||
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||
if x.name == compiler_name
|
||||
]
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||
best = candidates[0]
|
||||
# Get compilers for bootstrapping from the 'builtin' repository
|
||||
best.namespace = "builtin"
|
||||
# If the compiler does not support C++ 14, fail with a legible error message
|
||||
try:
|
||||
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(
|
||||
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||
) from e
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
@@ -75,9 +90,6 @@ def _externals_from_yaml(
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
@@ -110,11 +122,14 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
node.versions = self.host_compiler.versions
|
||||
|
||||
# Can't use re2c@3.1 with Python 3.6
|
||||
if self.host_python.satisfies("@3.6"):
|
||||
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
@@ -126,6 +141,9 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if edge.spec.name == self.host_compiler.name:
|
||||
edge.spec = self.host_compiler
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
@@ -141,12 +159,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||
result = detector.default_libc()
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
|
@@ -4,13 +4,13 @@
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.compilers.config
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
@@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
if not spack.compilers.config.compilers_for_arch(arch):
|
||||
spack.compilers.config.find_compilers()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
@@ -33,10 +34,8 @@
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -45,17 +44,10 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import (
|
||||
QueryInfo,
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
@@ -97,12 +89,8 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Check for relative paths, and turn them into absolute paths
|
||||
# root is the metadata_dir
|
||||
maybe_url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -146,7 +134,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
@@ -223,14 +211,14 @@ def _install_and_test(
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
@@ -243,7 +231,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -261,11 +249,11 @@ class SourceBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -282,17 +270,22 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
abstract_spec = spack.spec.Spec(
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
PackageInstaller(
|
||||
[concrete_spec.package],
|
||||
fail_fast=True,
|
||||
package_use_cache=False,
|
||||
dependencies_use_cache=False,
|
||||
).install()
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -300,7 +293,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: QueryInfo = {}
|
||||
info: ConfigDictionary = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -311,7 +304,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
@@ -328,9 +321,10 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||
"""Returns true if the source is not enabled for bootstrapping"""
|
||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||
def source_is_enabled(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
return trusted.get(name, False)
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -362,21 +356,20 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += exception_handler.grouped_message(with_tracebacks=tty.is_debug())
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -424,7 +417,6 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
assert cmd is not None, "expected an Executable"
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
@@ -432,17 +424,18 @@ def ensure_executables_in_path_or_raise(
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -63,6 +63,7 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
|
@@ -36,7 +36,6 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
@@ -44,19 +43,7 @@
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -71,7 +58,7 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
@@ -85,7 +72,6 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
@@ -93,6 +79,7 @@
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
PrependPath,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
@@ -158,128 +145,48 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify parallelism options
|
||||
on a per-invocation basis.
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
'parallel' to the call will override whatever the package's
|
||||
global setting is, so you can either default to true or false and
|
||||
override particular calls. Specifying 'jobs_env' to a particular
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super().__init__(name, **kwargs)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable] = ...,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = True,
|
||||
jobs_env: Optional[str] = None,
|
||||
jobs_env_supports_jobserver: bool = False,
|
||||
**kwargs,
|
||||
) -> Optional[str]:
|
||||
"""Runs this "make" executable in a subprocess.
|
||||
|
||||
Args:
|
||||
parallel: if False, parallelism is disabled
|
||||
jobs_env: environment variable that will be set to the current level of parallelism
|
||||
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||
|
||||
For all the other **kwargs, refer to the base class.
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = kwargs.pop("parallel", True)
|
||||
jobs_env = kwargs.pop("jobs_env", None)
|
||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||
|
||||
jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||
)
|
||||
if jobs is not None:
|
||||
args = (f"-j{jobs}",) + args
|
||||
args = ("-j{0}".format(jobs),) + args
|
||||
|
||||
if jobs_env:
|
||||
# Caller wants us to set an environment variable to control the parallelism
|
||||
# Caller wants us to set an environment variable to
|
||||
# control the parallelism.
|
||||
jobs_env_jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||
)
|
||||
if jobs_env_jobs is not None:
|
||||
extra_env = kwargs.setdefault("extra_env", {})
|
||||
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
class UndeclaredDependencyError(spack.error.SpackError):
|
||||
"""Raised if a dependency is invoking an executable through a module global, without
|
||||
declaring a dependency on it.
|
||||
"""
|
||||
|
||||
|
||||
class DeprecatedExecutable:
|
||||
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||
self.pkg = pkg
|
||||
self.exe = exe
|
||||
self.exe_pkg = exe_pkg
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise UndeclaredDependencyError(
|
||||
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||
)
|
||||
|
||||
def add_default_env(self, key: str, value: str):
|
||||
self.__call__()
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -301,13 +208,11 @@ def clean_environment():
|
||||
env.unset("CPLUS_INCLUDE_PATH")
|
||||
env.unset("OBJC_INCLUDE_PATH")
|
||||
|
||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||
env.set("CONFIG_SITE", os.devnull)
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
@@ -390,62 +295,10 @@ def _add_werror_handling(keep_werror, env):
|
||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||
|
||||
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
spec = pkg.spec
|
||||
|
||||
# Make sure the executables for this compiler exist
|
||||
compiler.verify_executables()
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.paths.build_env_path
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to
|
||||
# call. If there is no compiler configured then use a default
|
||||
# wrapper which will emit an error if it is used.
|
||||
if compiler.cc:
|
||||
env.set("SPACK_CC", compiler.cc)
|
||||
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "cc"))
|
||||
if compiler.cxx:
|
||||
env.set("SPACK_CXX", compiler.cxx)
|
||||
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "c++"))
|
||||
if compiler.f77:
|
||||
env.set("SPACK_F77", compiler.f77)
|
||||
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
|
||||
else:
|
||||
env.set("F77", os.path.join(link_dir, "f77"))
|
||||
if compiler.fc:
|
||||
env.set("SPACK_FC", compiler.fc)
|
||||
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
|
||||
else:
|
||||
env.set("FC", os.path.join(link_dir, "fc"))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
|
||||
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
|
||||
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
|
||||
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
|
||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||
|
||||
if pkg.keep_werror is not None:
|
||||
keep_werror = pkg.keep_werror
|
||||
else:
|
||||
@@ -453,10 +306,6 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
isa_arg = optimization_flags(compiler, spec.target)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
# env_flags are easy to accidentally override.
|
||||
inject_flags = {}
|
||||
@@ -489,75 +338,23 @@ def set_compiler_environment_variables(pkg, env):
|
||||
# implicit variables
|
||||
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
||||
pkg.flags_to_build_system_args(build_system_flags)
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def optimization_flags(compiler, target):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = (
|
||||
"microarchitecture specific optimizations are not "
|
||||
"supported yet on mixed compiler toolchains [check"
|
||||
f" {compiler.name}@{compiler.version} for further details]"
|
||||
)
|
||||
tty.debug(msg)
|
||||
return ""
|
||||
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||
if not version_number or suffix:
|
||||
try:
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||
try:
|
||||
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
||||
result = target.optimization_flags(compiler.name, version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
result = ""
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class FilterDefaultDynamicLinkerSearchPaths:
|
||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||
|
||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||
if not dynamic_linker:
|
||||
return
|
||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||
try:
|
||||
s = os.stat(p)
|
||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def __call__(self, dirs: List[str]) -> List[str]:
|
||||
if not self.default_path_identifiers:
|
||||
return dirs
|
||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@@ -566,39 +363,8 @@ def set_wrapper_variables(pkg, env):
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
||||
|
||||
if compiler.extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path("PATH", item)
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
# Set compiler flags injected from the spec
|
||||
set_wrapper_environment_variables_for_flags(pkg, env)
|
||||
|
||||
# Working directory for the spack command itself, for debug logs.
|
||||
if spack.config.get("config:debug"):
|
||||
@@ -664,22 +430,15 @@ def set_wrapper_variables(pkg, env):
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||
pkg.compiler.default_dynamic_linker
|
||||
)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||
|
||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||
# just this filter.
|
||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||
if implicit_rpaths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
|
||||
if default_dynamic_linker_filter:
|
||||
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
@@ -715,9 +474,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
@@ -731,26 +491,6 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
module.configure = Executable("./configure")
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
module.prefix = pkg.prefix
|
||||
@@ -916,7 +656,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
# Platform specific setup goes before package specific setup. This is for setting
|
||||
@@ -928,6 +667,11 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
tty.debug("setup_package: adding compiler wrappers paths")
|
||||
for x in env_mods.group_by_name()["SPACK_ENV_PATH"]:
|
||||
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
|
||||
env_mods.prepend_path("PATH", x.value)
|
||||
|
||||
if context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||
@@ -941,11 +685,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
|
@@ -6,9 +6,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -19,18 +17,19 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
to the Aspell extensions.
|
||||
"""
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
):
|
||||
def configure(self, pkg, spec, prefix):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
"ASPELL={0}".format(aspell),
|
||||
"PREZIP={0}".format(prezip),
|
||||
"DESTDIR={0}".format(destdir),
|
||||
)
|
||||
|
||||
|
||||
# Aspell dictionaries install their bits into their prefix.lib
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
@@ -11,6 +12,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
@@ -355,13 +357,6 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@@ -398,33 +393,44 @@ def _do_patch_libtool(self) -> None:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.pkg.compiler.name == "nag":
|
||||
if self.spec.satisfies("%nag"):
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
nag_pkg = self.spec["fortran"].package
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
else:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
||||
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||
if compiler_spec:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for cc, marker in markers.items():
|
||||
for compiler, marker in markers.items():
|
||||
if compiler == "cc":
|
||||
language = "c"
|
||||
elif compiler == "cxx":
|
||||
language = "cxx"
|
||||
else:
|
||||
language = "fortran"
|
||||
|
||||
if language not in self.spec:
|
||||
continue
|
||||
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl='pic_flag="{0}"'.format(
|
||||
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
||||
),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.pkg.compiler.name == "fj":
|
||||
if self.spec.satisfies("%fj"):
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
@@ -437,7 +443,7 @@ def _do_patch_libtool(self) -> None:
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
elif self.spec.satisfies("%nag"):
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
@@ -533,7 +539,7 @@ def build_directory(self) -> str:
|
||||
return build_dir
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@@ -546,7 +552,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _set_configure_or_die(self) -> None:
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
@@ -570,7 +576,10 @@ def configure_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
@@ -599,7 +608,10 @@ def autoreconf(
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
@@ -612,7 +624,10 @@ def configure(
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
@@ -622,7 +637,10 @@ def build(
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -819,7 +837,7 @@ def installcheck(self) -> None:
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
|
@@ -10,8 +10,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import depends_on
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -69,12 +68,7 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
||||
self.pkg.name,
|
||||
self.pkg.spec.architecture,
|
||||
self.pkg.spec.compiler.name,
|
||||
self.pkg.spec.compiler.version,
|
||||
)
|
||||
return f"{self.pkg.name}-{self.spec.architecture.platform}-{self.spec.dag_hash()}.cmake"
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
@@ -117,7 +111,9 @@ def initconfig_compiler_entries(self):
|
||||
# Fortran compiler is optional
|
||||
if "FC" in os.environ:
|
||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
||||
system_fc_entry = cmake_cache_path(
|
||||
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
|
||||
)
|
||||
else:
|
||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||
@@ -133,8 +129,8 @@ def initconfig_compiler_entries(self):
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||
" " + spack_fc_entry,
|
||||
"else()\n",
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
|
||||
" " + system_fc_entry,
|
||||
"endif()\n",
|
||||
]
|
||||
@@ -295,13 +291,6 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
@@ -332,9 +321,7 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def initconfig(self, pkg, spec, prefix):
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
@@ -371,6 +358,10 @@ class CachedCMakePackage(CMakePackage):
|
||||
|
||||
CMakeBuilder = CachedCMakeBuilder
|
||||
|
||||
# These dependencies are assumed in the builder
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||
return None, None, None # handled in the cmake cache
|
||||
|
@@ -7,8 +7,6 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -83,16 +81,12 @@ def check_args(self):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
@@ -454,7 +454,10 @@ def cmake_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
@@ -471,7 +474,10 @@ def cmake(
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -482,7 +488,10 @@ def build(
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -6,12 +6,13 @@
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
|
||||
import spack.compiler
|
||||
import spack
|
||||
import spack.compilers.error
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
@@ -43,6 +44,9 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
link_paths: Dict[str, str] = {}
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
@@ -77,14 +81,14 @@ def executables(cls) -> Sequence[str]:
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path):
|
||||
def determine_version(cls, exe: Path) -> str:
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
output = compiler_output(exe, version_argument=va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
@@ -95,10 +99,11 @@ def determine_version(cls, exe: Path):
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||
"""Overridable method for the location of the compiler bindir within the prefix"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
@@ -142,3 +147,109 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||
|
||||
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||
#: Flag that needs to be used to pass an argument to the linker
|
||||
linker_arg: str = "-Wl,"
|
||||
#: Flag used to produce Position Independent Code
|
||||
pic_flag: str = "-fPIC"
|
||||
#: Flag used to get verbose output
|
||||
verbose_flags: str = "-v"
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
required_libs: List[str] = []
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
if language not in self.supported_languages:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' language"
|
||||
)
|
||||
try:
|
||||
return self._standard_flag(language=language, standard=standard)
|
||||
except (KeyError, RuntimeError) as e:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||
) from e
|
||||
|
||||
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||
raise NotImplementedError("Must be implemented by derived classes")
|
||||
|
||||
def archspec_name(self) -> str:
|
||||
"""Name that archspec uses to refer to this compiler"""
|
||||
return self.spec.name
|
||||
|
||||
@property
|
||||
def cc(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||
return self._cc_path()
|
||||
|
||||
def _cc_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def cxx(self) -> Optional[str]:
|
||||
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||
return self._cxx_path()
|
||||
|
||||
def _cxx_path(self) -> Optional[str]:
|
||||
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
@property
|
||||
def fortran(self):
|
||||
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||
if self.spec.external:
|
||||
return self.spec.extra_attributes["compilers"].get("fortran", None)
|
||||
return self._fortran_path()
|
||||
|
||||
def _fortran_path(self) -> Optional[str]:
|
||||
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
|
||||
return None
|
||||
|
||||
|
||||
@memoized
|
||||
def _compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Returns the output from the compiler invoked with the given version argument.
|
||||
|
||||
Args:
|
||||
compiler_path: path of the compiler to be invoked
|
||||
version_argument: the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
if not version_argument:
|
||||
return compiler(
|
||||
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
|
||||
)
|
||||
return compiler(
|
||||
version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=ignore_errors,
|
||||
timeout=120,
|
||||
fail_on_error=True,
|
||||
)
|
||||
|
||||
|
||||
def compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
|
||||
|
||||
return _compiler_output(
|
||||
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||
)
|
||||
|
@@ -7,8 +7,6 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
@@ -50,8 +48,3 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(
|
||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
@@ -7,9 +7,7 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
@@ -28,7 +26,9 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
build_system("go")
|
||||
|
||||
with when("build_system=go"):
|
||||
depends_on("go", type="build")
|
||||
# TODO: this seems like it should be depends_on, see
|
||||
# setup_dependent_build_environment in go for why I kept it like this
|
||||
extends("go@1.14:", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
@@ -71,7 +71,6 @@ class GoBuilder(BuilderWithDefaults):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
@@ -82,31 +81,19 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return [
|
||||
"-p",
|
||||
str(self.pkg.module.make_jobs),
|
||||
"-modcacherw",
|
||||
"-ldflags",
|
||||
"-s -w",
|
||||
"-o",
|
||||
f"{self.pkg.name}",
|
||||
]
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
|
@@ -7,9 +7,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -57,9 +55,7 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def unpack(self, pkg, spec, prefix):
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -70,16 +66,15 @@ def unpack(
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
if d.package.extends(self.pkg.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
@@ -90,26 +85,23 @@ def generate_luarocks_config(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def preprocess(self, pkg, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
@@ -98,20 +98,29 @@ def build_directory(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user