Compare commits
3 Commits
v1.0.0-alp
...
docs/testi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a9ea92de0 | ||
|
|
508aa228a7 | ||
|
|
03c29ad13e |
11
.github/workflows/build-containers.yml
vendored
11
.github/workflows/build-containers.yml
vendored
@@ -57,13 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- name: Determine latest release tag
|
||||
id: latest
|
||||
run: |
|
||||
git fetch --quiet --tags
|
||||
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
@@ -77,7 +71,6 @@ jobs:
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
@@ -120,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
26
.github/workflows/ci.yaml
vendored
26
.github/workflows/ci.yaml
vendored
@@ -83,17 +83,10 @@ jobs:
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
run: "true"
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
@@ -101,19 +94,8 @@ jobs:
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
needs: [ coverage, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
- name: Success
|
||||
run: "true"
|
||||
|
||||
3
.github/workflows/coverage.yml
vendored
3
.github/workflows/coverage.yml
vendored
@@ -29,7 +29,6 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: true
|
||||
|
||||
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241105
|
||||
types-six==1.16.21.20241009
|
||||
vermin==1.6.0
|
||||
|
||||
9
.github/workflows/unit_tests.yaml
vendored
9
.github/workflows/unit_tests.yaml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov clingo
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
@@ -173,7 +173,8 @@ jobs:
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
@@ -210,7 +211,7 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python')
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
@@ -241,7 +242,7 @@ jobs:
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-windows
|
||||
run: |
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
|
||||
@@ -70,7 +70,7 @@ Tutorial
|
||||
----------------
|
||||
|
||||
We maintain a
|
||||
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
|
||||
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
|
||||
It covers basic to advanced usage, packaging, developer features, and large HPC
|
||||
deployments. You can do all of the exercises on your own laptop using a
|
||||
Docker container.
|
||||
|
||||
@@ -39,8 +39,7 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
@@ -48,18 +47,3 @@ concretizer:
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
|
||||
timeout: 0
|
||||
# If set to true, exceeding the timeout will always result in a concretization error. If false,
|
||||
# the best (suboptimal) model computed before the timeout is used.
|
||||
#
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
||||
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture.platform}/{architecture.target}/{name}-{version}-{hash}"
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
|
||||
@@ -15,11 +15,12 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
|
||||
@@ -15,18 +15,19 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc, llvm]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -75,8 +76,6 @@ packages:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
||||
@@ -237,35 +237,3 @@ is optional -- by default, splices will be transitive.
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
||||
@@ -210,7 +210,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
@@ -221,7 +221,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
||||
@@ -1042,7 +1042,7 @@ file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the GCC compiler at version 18.5.
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
@@ -1056,7 +1056,7 @@ directories.
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
select: [^mpi]
|
||||
exclude: ['%gcc@18.5']
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
|
||||
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install gcc git zip
|
||||
brew install curl gcc git gnupg zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -283,6 +283,10 @@ compilers`` or ``spack compiler list``:
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -802,6 +806,65 @@ flags to the ``icc`` command:
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
PGI
|
||||
^^^
|
||||
|
||||
PGI comes with two sets of compilers for C++ and Fortran,
|
||||
distinguishable by their names. "Old" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||
|
||||
"New" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
|
||||
Older installations of PGI contains just the old compilers; whereas
|
||||
newer installations contain the old and the new. The new compiler is
|
||||
considered preferable, as some packages
|
||||
(``hdf``) will not build with the old compiler.
|
||||
|
||||
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||
find the old compilers, when you really want it to find the new
|
||||
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||
``pgfortran``.
|
||||
|
||||
Other issues:
|
||||
|
||||
* There are reports that some packages will not build with PGI,
|
||||
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||
build these packages with another compiler and then use them as
|
||||
dependencies for PGI-build packages. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||
|
||||
|
||||
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||
information on installation.
|
||||
|
||||
.. note::
|
||||
|
||||
It is believed the problem with HDF 4 is that everything is
|
||||
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||
work.
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
@@ -1326,7 +1389,6 @@ Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
* 7z
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
@@ -1392,13 +1454,6 @@ as the project providing Git support on Windows. This is additionally the recomm
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
"""
|
||||
7zip
|
||||
"""
|
||||
|
||||
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
|
||||
can be located at https://sourceforge.net/projects/sevenzip/.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
23
lib/spack/docs/in_the_news.rst
Normal file
23
lib/spack/docs/in_the_news.rst
Normal file
@@ -0,0 +1,23 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _in-the-news:
|
||||
|
||||
==================
|
||||
Spack In The News
|
||||
==================
|
||||
|
||||
Since its inception, Spack has been featured in many blog posts and news stories. Here we collect a
|
||||
|
||||
.. warning::
|
||||
Disclaimer:
|
||||
Spack is not responsible for and does not control the content of these external pages.
|
||||
Spack makes no representatations about their accuracy or applicability.
|
||||
|
||||
----
|
||||
2024
|
||||
----
|
||||
|
||||
- May 22, 2024: `Spack on Windows: A New Era in Cross-Platform Dependency Management <https://www.kitware.com/spack-on-windows-a-new-era-in-cross-platform-dependency-management/>`_ (Kitware)
|
||||
@@ -97,6 +97,13 @@ or refer to the full manual below.
|
||||
build_systems
|
||||
developer_guide
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Testimonials
|
||||
|
||||
in_the_news
|
||||
user_reports
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: API Docs
|
||||
|
||||
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1367,8 +1367,8 @@ Submodules
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
@@ -1928,29 +1928,71 @@ to the empty list.
|
||||
String. A URL pointing to license setup instructions for the software.
|
||||
Defaults to the empty string.
|
||||
|
||||
For example, let's take a look at the Arm Forge package.
|
||||
For example, let's take a look at the package for the PGI compilers.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Licensing
|
||||
license_required = True
|
||||
license_comment = "#"
|
||||
license_files = ["licences/Licence"]
|
||||
license_vars = [
|
||||
"ALLINEA_LICENSE_DIR",
|
||||
"ALLINEA_LICENCE_DIR",
|
||||
"ALLINEA_LICENSE_FILE",
|
||||
"ALLINEA_LICENCE_FILE",
|
||||
]
|
||||
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
||||
license_comment = "#"
|
||||
license_files = ["license.dat"]
|
||||
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
|
||||
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
|
||||
|
||||
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
|
||||
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
|
||||
in the installation prefix.
|
||||
As you can see, PGI requires a license. Its license manager, FlexNet, uses
|
||||
the ``#`` symbol to denote a comment. It expects the license file to be
|
||||
named ``license.dat`` and to be located directly in the installation prefix.
|
||||
If you would like the installation file to be located elsewhere, simply set
|
||||
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
|
||||
further instructions on installation and licensing, see the URL provided.
|
||||
|
||||
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
|
||||
one of the other license variables after installation. For further instructions on installation and
|
||||
licensing, see the URL provided.
|
||||
Let's walk through a sample PGI installation to see exactly what Spack is
|
||||
and isn't capable of. Since PGI does not provide a download URL, it must
|
||||
be downloaded manually. It can either be added to a mirror or located in
|
||||
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
|
||||
for instructions on setting up a mirror.
|
||||
|
||||
After running ``spack install pgi``, the first thing that will happen is
|
||||
Spack will create a global license file located at
|
||||
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
|
||||
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
|
||||
this:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# A license is required to use pgi.
|
||||
#
|
||||
# The recommended solution is to store your license key in this global
|
||||
# license file. After installation, the following symlink(s) will be
|
||||
# added to point to this file (relative to the installation prefix):
|
||||
#
|
||||
# license.dat
|
||||
#
|
||||
# Alternatively, use one of the following environment variable(s):
|
||||
#
|
||||
# PGROUPD_LICENSE_FILE
|
||||
# LM_LICENSE_FILE
|
||||
#
|
||||
# If you choose to store your license in a non-standard location, you may
|
||||
# set one of these variable(s) to the full pathname to the license file, or
|
||||
# port@host if you store your license keys on a dedicated license server.
|
||||
# You will likely want to set this variable in a module file so that it
|
||||
# gets loaded every time someone tries to use pgi.
|
||||
#
|
||||
# For further information on how to acquire a license, please refer to:
|
||||
#
|
||||
# http://www.pgroup.com/doc/pgiinstall.pdf
|
||||
#
|
||||
# You may enter your license below.
|
||||
|
||||
You can add your license directly to this file, or tell FlexNet to use a
|
||||
license stored on a separate license server. Here is an example that
|
||||
points to a license server called licman1:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||
USE_SERVER
|
||||
|
||||
If your package requires the license to install, you can reference the
|
||||
location of this global license using ``self.global_license_file``.
|
||||
@@ -2350,7 +2392,7 @@ by the ``--jobs`` option:
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
@@ -2925,9 +2967,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
@@ -5378,7 +5420,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
@@ -5695,7 +5737,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
@@ -7071,46 +7113,6 @@ might write:
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
.. _abi_compatibility:
|
||||
|
||||
----------------------------
|
||||
Specifying ABI Compatibility
|
||||
----------------------------
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
||||
always replace version 1.0, then the package could have:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.0", when="@1.1")
|
||||
|
||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
||||
other packages providing the same virtual. For example, ``zlib-ng``
|
||||
could specify:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
||||
|
||||
Some packages have ABI-compatibility that is dependent on matching
|
||||
variant values, either for all variants or for some set of
|
||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
||||
cover all single-value variants.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
||||
|
||||
The concretizer will use ABI compatibility to determine automatic
|
||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental, and may be replaced
|
||||
by a higher-level interface in future versions of Spack.
|
||||
|
||||
.. _package_class_structure:
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.4
|
||||
pytest==8.3.3
|
||||
isort==5.13.2
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
|
||||
28
lib/spack/docs/user_reports.rst
Normal file
28
lib/spack/docs/user_reports.rst
Normal file
@@ -0,0 +1,28 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _user-reports:
|
||||
|
||||
============
|
||||
User Reports
|
||||
============
|
||||
|
||||
Spack has an active user community which deploys Spack in a multitude of contexts.
|
||||
Here we collect a growing list of reports by users on how they have applied Spack
|
||||
in their context.
|
||||
|
||||
If you have a user report that you think merits inclusion, feel free to open a pull
|
||||
request and add it to this list.
|
||||
|
||||
.. warning::
|
||||
Disclaimer:
|
||||
Spack is not responsible for and does not control the content of these external pages.
|
||||
Spack makes no representatations about their accuracy or applicability.
|
||||
|
||||
----
|
||||
2024
|
||||
----
|
||||
|
||||
- September 3, 2024: `Aidan Heerdegen: RRR: Reliability, Replicability, Reproducibility for climate models <https://www.youtube.com/watch?v=BVoVliqgx1U>`_ (ACCESS-NRI)
|
||||
26
lib/spack/env/cc
vendored
26
lib/spack/env/cc
vendored
@@ -40,6 +40,11 @@ readonly params="\
|
||||
SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS
|
||||
@@ -218,7 +223,6 @@ for param in $params; do
|
||||
if eval "test -z \"\${${param}:-}\""; then
|
||||
die "Spack compiler must be run from Spack! Input '$param' is missing."
|
||||
fi
|
||||
# FIXME (compiler as nodes) add checks on whether `SPACK_XX_RPATH` is set if `SPACK_XX` is set
|
||||
done
|
||||
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
@@ -342,9 +346,6 @@ case "$command" in
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
if [ -z "$SPACK_CC_RPATH_ARG" ]; then
|
||||
comp="CXX"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
die "Unknown compiler: $command"
|
||||
@@ -402,7 +403,7 @@ dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
|
||||
linker_arg="${SPACK_LINKER_ARG}"
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET"
|
||||
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
|
||||
|
||||
# Dump the mode and exit if the command is dump-mode.
|
||||
@@ -411,6 +412,13 @@ if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
|
||||
exit
|
||||
fi
|
||||
|
||||
# If, say, SPACK_CC is set but SPACK_FC is not, we want to know. Compilers do not
|
||||
# *have* to set up Fortran executables, so we need to tell the user when a build is
|
||||
# about to attempt to use them unsuccessfully.
|
||||
if [ -z "$command" ]; then
|
||||
die "Compiler '$SPACK_COMPILER_SPEC' does not have a $language compiler configured."
|
||||
fi
|
||||
|
||||
#
|
||||
# Filter '.' and Spack environment directories out of PATH so that
|
||||
# this script doesn't just call itself
|
||||
@@ -780,17 +788,15 @@ case "$mode" in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CC
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CXX
|
||||
;;
|
||||
F)
|
||||
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
|
||||
;;
|
||||
esac
|
||||
|
||||
# prepend target args
|
||||
preextend flags_list SPACK_TARGET_ARGS
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -20,20 +20,7 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from typing import Callable, Deque, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
@@ -98,8 +85,6 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
@@ -1689,44 +1674,51 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
|
||||
|
||||
def find(
|
||||
root: Union[Path, Sequence[Path]],
|
||||
files: Union[str, Sequence[str]],
|
||||
root: Union[str, List[str]],
|
||||
files: Union[str, List[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
||||
returns a deterministic result for the same input and directory structure when run multiple
|
||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
||||
"""Finds all non-directory files matching the filename patterns from ``files`` starting from
|
||||
``root``. This function returns a deterministic result for the same input and directory
|
||||
structure when run multiple times. Symlinked directories are followed, and unique directories
|
||||
are searched only once. Each matching file is returned only once at lowest depth in case
|
||||
multiple paths exist due to symlinked directories. The function has similarities to the Unix
|
||||
``find`` utility.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find -L /usr -name python3 -type f
|
||||
|
||||
is roughly equivalent to
|
||||
|
||||
>>> find("/usr", "python3")
|
||||
|
||||
with the notable difference that this function only lists a single path to each file in case of
|
||||
symlinked directories.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find -L /usr/local/bin /usr/local/sbin -maxdepth 1 '(' -name python3 -o -name getcap \\
|
||||
')' -type f
|
||||
|
||||
is roughly equivalent to:
|
||||
|
||||
>>> find(["/usr/local/bin", "/usr/local/sbin"], ["python3", "getcap"], recursive=False)
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches one or more characters
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Examples:
|
||||
|
||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
||||
|
||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
||||
depth 2.
|
||||
|
||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
||||
|
||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
||||
``/var`` at any depth.
|
||||
|
||||
>>> find("/usr", "GL/*.h", recursive=True)
|
||||
|
||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
@@ -1735,25 +1727,22 @@ def find(
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
"""
|
||||
if isinstance(root, (str, pathlib.Path)):
|
||||
if not isinstance(root, list):
|
||||
root = [root]
|
||||
elif not isinstance(root, collections.abc.Sequence):
|
||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
||||
|
||||
if isinstance(files, str):
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
elif not isinstance(files, collections.abc.Sequence):
|
||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
||||
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
return result
|
||||
@@ -1764,47 +1753,21 @@ def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
def _dir_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
|
||||
|
||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
||||
unique_files: List[str] = []
|
||||
# tuple of (inode, device) for each file without following symlinks
|
||||
visited: Set[Tuple[int, int]] = set()
|
||||
for path in paths:
|
||||
try:
|
||||
stat_info = os.lstat(path)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, path)
|
||||
continue
|
||||
file_id = _file_id(stat_info)
|
||||
if file_id not in visited:
|
||||
unique_files.append(path)
|
||||
visited.add(file_id)
|
||||
return unique_files
|
||||
|
||||
|
||||
def _find_max_depth(
|
||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
||||
) -> List[str]:
|
||||
def _find_max_depth(roots: List[str], globs: List[str], max_depth: int = sys.maxsize) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
||||
# every directory we visit within max_depth.
|
||||
filename_only_patterns = {
|
||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
||||
}
|
||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
||||
# Ordered dictionary that keeps track of what pattern found which files
|
||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
||||
# Apply normcase to file patterns and filenames to respect case insensitive filesystems
|
||||
regex, groups = fnmatch_translate_multiple([os.path.normcase(x) for x in globs])
|
||||
# Ordered dictionary that keeps track of the files found for each pattern
|
||||
capture_group_to_paths: Dict[str, List[str]] = {group: [] for group in groups}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, directory)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
@@ -1815,76 +1778,58 @@ def _find_max_depth(
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _file_id(stat_root)
|
||||
dir_id = _dir_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
|
||||
while dir_queue:
|
||||
depth, curr_dir = dir_queue.pop()
|
||||
depth, next_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(curr_dir)
|
||||
dir_iter = os.scandir(next_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, curr_dir)
|
||||
_log_file_access_issue(e, next_dir)
|
||||
continue
|
||||
|
||||
# Use glob.glob for complex patterns.
|
||||
for pattern_name, pattern in complex_patterns.items():
|
||||
matched_paths[pattern_name].extend(
|
||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
||||
)
|
||||
|
||||
# List of subdirectories by path and (inode, device) tuple
|
||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
||||
|
||||
with dir_iter:
|
||||
for dir_entry in dir_iter:
|
||||
|
||||
# Match filename only patterns
|
||||
if filename_only_patterns:
|
||||
m = regex.match(os.path.normcase(dir_entry.name))
|
||||
if m:
|
||||
for pattern_name in filename_only_patterns:
|
||||
if m.group(pattern_name):
|
||||
matched_paths[pattern_name].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Collect subdirectories
|
||||
if depth >= max_depth:
|
||||
continue
|
||||
|
||||
ordered_entries = sorted(dir_iter, key=lambda x: x.name)
|
||||
for dir_entry in ordered_entries:
|
||||
try:
|
||||
if not dir_entry.is_dir(follow_symlinks=True):
|
||||
continue
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
||||
# to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
it_is_a_dir = dir_entry.is_dir(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
||||
if it_is_a_dir and depth < max_depth:
|
||||
try:
|
||||
# The stat should be performed in a try/except block. We repeat that here
|
||||
# vs. moving to the above block because we only want to call `stat` if we
|
||||
# haven't exceeded our max_depth
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we
|
||||
# have to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
# Enqueue subdirectories in a deterministic order
|
||||
if subdirs:
|
||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
||||
for subdir, subdir_id in subdirs:
|
||||
if subdir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, subdir))
|
||||
visited_dirs.add(subdir_id)
|
||||
dir_id = _dir_id(stat_info)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, dir_entry.path))
|
||||
visited_dirs.add(dir_id)
|
||||
else:
|
||||
m = regex.match(os.path.normcase(os.path.basename(dir_entry.path)))
|
||||
if not m:
|
||||
continue
|
||||
for group in capture_group_to_paths:
|
||||
if m.group(group):
|
||||
capture_group_to_paths[group].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Sort the matched paths for deterministic output
|
||||
for paths in matched_paths.values():
|
||||
paths.sort()
|
||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
||||
|
||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
||||
# multiple times
|
||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
||||
return [path for paths in capture_group_to_paths.values() for path in paths]
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2773,6 +2718,22 @@ def prefixes(path):
|
||||
return paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
Args:
|
||||
file (str): file to be checksummed
|
||||
|
||||
Returns:
|
||||
MD5 sum of the file's content
|
||||
"""
|
||||
md5 = hashlib.md5()
|
||||
with open(file, "rb") as f:
|
||||
md5.update(f.read())
|
||||
return md5.digest()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
@@ -2823,25 +2784,6 @@ def temporary_dir(
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
||||
for functions or external tools that do not support in-place editing. Notice that this function
|
||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
||||
since we assume the call site will create a new inode at the same path."""
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
||||
)
|
||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
||||
os.close(tmp_fd)
|
||||
try:
|
||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
||||
yield tmp_path
|
||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -73,7 +73,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@@ -867,11 +867,24 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
||||
names, and values are filename patterns. The output is a regex that matches any of the
|
||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
||||
def fnmatch_translate_multiple(patterns: List[str]) -> Tuple[PatternStr, List[str]]:
|
||||
"""Same as fnmatch.translate, but creates a single regex of the form
|
||||
``(?P<pattern0>...)|(?P<pattern1>...)|...`` for each pattern in the iterable, where
|
||||
``patternN`` is a named capture group that matches the corresponding pattern translated by
|
||||
``fnmatch.translate``. This can be used to match multiple patterns in a single pass. No case
|
||||
normalization is performed on the patterns.
|
||||
|
||||
Args:
|
||||
patterns: list of fnmatch patterns
|
||||
|
||||
Returns:
|
||||
Tuple of the combined regex and the list of named capture groups corresponding to each
|
||||
pattern in the input list.
|
||||
"""
|
||||
groups = [f"pattern{i}" for i in range(len(patterns))]
|
||||
regexes = (fnmatch.translate(p) for p in patterns)
|
||||
combined = re.compile("|".join(f"(?P<{g}>{r})" for g, r in zip(groups, regexes)))
|
||||
return combined, groups
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -995,8 +1008,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.24.0.dev0"
|
||||
__version__ = "0.23.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ def _search_duplicate_compilers(error_cls):
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes
|
||||
import spack.fetch_strategy
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
@@ -572,13 +571,8 @@ def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
reserved_names = ("all",)
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if pkg_name in reserved_names:
|
||||
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
@@ -694,19 +688,19 @@ def invalid_sha256_digest(fetcher):
|
||||
return h, True
|
||||
return None, False
|
||||
|
||||
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
|
||||
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
|
||||
details = []
|
||||
for v, args in pkg.versions.items():
|
||||
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
||||
digest, is_bad = invalid_sha256_digest(fetcher)
|
||||
if is_bad:
|
||||
details.append(f"{pkg_name}@{v} uses {digest}")
|
||||
details.append("{}@{} uses {}".format(pkg_name, v, digest))
|
||||
|
||||
for _, resources in pkg.resources.items():
|
||||
for resource in resources:
|
||||
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
||||
if is_bad:
|
||||
details.append(f"Resource in '{pkg_name}' uses {digest}")
|
||||
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
|
||||
if details:
|
||||
errors.append(error_cls(error_msg, details))
|
||||
|
||||
@@ -720,9 +714,9 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# values are either ConditionalValue objects or the values themselves
|
||||
# values are either Value objects (for conditional values) or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
||||
v.value if isinstance(v, spack.variant.Value) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
@@ -1015,14 +1009,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
nested_dependencies = dep.spec.edges_to_dependencies()
|
||||
# Filter out pure build dependencies, like:
|
||||
#
|
||||
# depends_on('foo+bar%gcc')
|
||||
#
|
||||
nested_dependencies = [
|
||||
x for x in nested_dependencies if x.depflag != spack.deptypes.BUILD
|
||||
]
|
||||
nested_dependencies = dep.spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||
ndir = len(nested_dependencies) + 1
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -87,8 +87,6 @@
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
@@ -254,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
for s in db.query_local(installed=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
@@ -369,7 +367,7 @@ def update(self, with_cooldown=False):
|
||||
on disk under ``_index_cache_root``)."""
|
||||
self._init_local_index_cache()
|
||||
configured_mirror_urls = [
|
||||
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||
]
|
||||
items_to_remove = []
|
||||
spec_cache_clear_needed = False
|
||||
@@ -765,14 +763,7 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
return spec.format_path(
|
||||
f"{spec.architecture}/{compiler.name}-{compiler.version}/{spec.name}-{spec.version}"
|
||||
)
|
||||
|
||||
return spec.format_path(f"{spec.architecture.platform}/{spec.name}-{spec.version}")
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -780,17 +771,9 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
if spec.original_spec_format() < 5:
|
||||
compiler = spec.annotations.compiler_node_attribute
|
||||
assert compiler is not None, "a compiler spec is expected"
|
||||
spec_formatted = (
|
||||
f"{spec.architecture}-{compiler.name}-{compiler.version}-{spec.name}"
|
||||
f"-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
else:
|
||||
spec_formatted = (
|
||||
f"{spec.architecture.platform}-{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
)
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
@@ -1191,7 +1174,7 @@ def _url_upload_tarball_and_specfile(
|
||||
|
||||
|
||||
class Uploader:
|
||||
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
|
||||
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
|
||||
self.mirror = mirror
|
||||
self.force = force
|
||||
self.update_index = update_index
|
||||
@@ -1199,9 +1182,6 @@ def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_inde
|
||||
self.tmpdir: str
|
||||
self.executor: concurrent.futures.Executor
|
||||
|
||||
# Verify if the mirror meets the requirements to push
|
||||
self.mirror.ensure_mirror_usable("push")
|
||||
|
||||
def __enter__(self):
|
||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||
@@ -1239,7 +1219,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
class OCIUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
base_image: Optional[str],
|
||||
@@ -1288,7 +1268,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
class URLUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
signing_key: Optional[str],
|
||||
@@ -1312,7 +1292,7 @@ def push(
|
||||
|
||||
|
||||
def make_uploader(
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool = False,
|
||||
update_index: bool = False,
|
||||
signing_key: Optional[str] = None,
|
||||
@@ -1968,9 +1948,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
|
||||
spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
||||
)
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1995,7 +1975,7 @@ def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirrors.mirror.Mirror(url)
|
||||
return spack.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
|
||||
@@ -2349,9 +2329,7 @@ def is_backup_file(file):
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
codesign("-fs-", binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
@@ -2665,7 +2643,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
specfile_is_signed = False
|
||||
found_specs = []
|
||||
|
||||
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
|
||||
for mirror in binary_mirrors:
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
@@ -2726,7 +2704,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec is None:
|
||||
return []
|
||||
|
||||
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
@@ -2765,7 +2743,7 @@ def clear_spec_cache():
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||
|
||||
if not mirror_collection:
|
||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||
@@ -2820,7 +2798,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
|
||||
def _url_push_keys(
|
||||
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
|
||||
*mirrors: Union[spack.mirror.Mirror, str],
|
||||
keys: List[str],
|
||||
tmpdir: str,
|
||||
update_index: bool = False,
|
||||
@@ -2887,7 +2865,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
@@ -2931,7 +2909,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
|
||||
|
||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||
tty.die(
|
||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||
)
|
||||
@@ -2940,7 +2918,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||
|
||||
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
||||
|
||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||
|
||||
@@ -227,13 +227,12 @@ def _root_spec(spec_str: str) -> str:
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
# FIXME (compiler as nodes): recover the compiler for source bootstrapping
|
||||
# if platform == "darwin":
|
||||
# spec_str += " %apple-clang"
|
||||
if platform == "windows":
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
# elif platform == "linux":
|
||||
# spec_str += " %gcc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
||||
@@ -16,9 +16,8 @@
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
@@ -40,7 +39,7 @@ def __init__(self, configuration):
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self):
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
@@ -48,30 +47,17 @@ def _valid_compiler_or_raise(self):
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "llvm"
|
||||
compiler_name = "clang"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
candidates = [
|
||||
x
|
||||
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||
if x.name == compiler_name
|
||||
]
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||
best = candidates[0]
|
||||
# Get compilers for bootstrapping from the 'builtin' repository
|
||||
best.namespace = "builtin"
|
||||
# If the compiler does not support C++ 14, fail with a legible error message
|
||||
try:
|
||||
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(
|
||||
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||
) from e
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
@@ -90,6 +76,9 @@ def _externals_from_yaml(
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
@@ -122,10 +111,11 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.versions
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
@@ -137,9 +127,6 @@ def concretize(self) -> "spack.spec.Spec":
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if edge.spec.name == self.host_compiler.name:
|
||||
edge.spec = self.host_compiler
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
@@ -155,12 +142,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||
result = detector.default_libc()
|
||||
result = self.host_compiler.default_libc
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
@@ -143,8 +143,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.config.compilers_for_arch(arch):
|
||||
spack.compilers.config.find_compilers()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -281,12 +281,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller(
|
||||
[concrete_spec.package],
|
||||
fail_fast=True,
|
||||
package_use_cache=False,
|
||||
dependencies_use_cache=False,
|
||||
).install()
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -322,10 +317,11 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled(conf: ConfigDictionary):
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
return trusted.get(name, False)
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -355,10 +351,8 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
@@ -370,7 +364,11 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
msg += exception_handler.grouped_message(with_tracebacks=tty.is_debug())
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -408,9 +406,8 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -37,6 +37,7 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
@@ -55,11 +56,12 @@
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
@@ -73,6 +75,7 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
@@ -80,7 +83,6 @@
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
PrependPath,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
@@ -296,10 +298,62 @@ def _add_werror_handling(keep_werror, env):
|
||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||
|
||||
|
||||
def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
spec = pkg.spec
|
||||
|
||||
# Make sure the executables for this compiler exist
|
||||
compiler.verify_executables()
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.paths.build_env_path
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to
|
||||
# call. If there is no compiler configured then use a default
|
||||
# wrapper which will emit an error if it is used.
|
||||
if compiler.cc:
|
||||
env.set("SPACK_CC", compiler.cc)
|
||||
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "cc"))
|
||||
if compiler.cxx:
|
||||
env.set("SPACK_CXX", compiler.cxx)
|
||||
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "c++"))
|
||||
if compiler.f77:
|
||||
env.set("SPACK_F77", compiler.f77)
|
||||
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
|
||||
else:
|
||||
env.set("F77", os.path.join(link_dir, "f77"))
|
||||
if compiler.fc:
|
||||
env.set("SPACK_FC", compiler.fc)
|
||||
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
|
||||
else:
|
||||
env.set("FC", os.path.join(link_dir, "fc"))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
|
||||
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
|
||||
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
|
||||
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
|
||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||
|
||||
if pkg.keep_werror is not None:
|
||||
keep_werror = pkg.keep_werror
|
||||
else:
|
||||
@@ -307,6 +361,10 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
isa_arg = optimization_flags(compiler, spec.target)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
# env_flags are easy to accidentally override.
|
||||
inject_flags = {}
|
||||
@@ -340,27 +398,74 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
||||
pkg.flags_to_build_system_args(build_system_flags)
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
# FIXME (compiler as nodes): recover this one in the correct packages
|
||||
# compiler.setup_custom_environment(pkg, env)
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def optimization_flags(compiler, target):
|
||||
if spack.compilers.is_mixed_toolchain(compiler):
|
||||
msg = (
|
||||
"microarchitecture specific optimizations are not "
|
||||
"supported yet on mixed compiler toolchains [check"
|
||||
f" {compiler.name}@{compiler.version} for further details]"
|
||||
)
|
||||
tty.debug(msg)
|
||||
return ""
|
||||
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||
if not version_number or suffix:
|
||||
try:
|
||||
compiler_version = compiler.real_version
|
||||
except spack.util.executable.ProcessError as e:
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
try:
|
||||
result = target.optimization_flags(compiler.name, version_number)
|
||||
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
result = ""
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class FilterDefaultDynamicLinkerSearchPaths:
|
||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
||||
|
||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
||||
if not dynamic_linker:
|
||||
return
|
||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
||||
try:
|
||||
s = os.stat(path)
|
||||
if stat.S_ISDIR(s.st_mode):
|
||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
||||
try:
|
||||
s = os.stat(p)
|
||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def __call__(self, dirs: List[str]) -> List[str]:
|
||||
if not self.default_path_identifiers:
|
||||
return dirs
|
||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@@ -369,8 +474,39 @@ def set_wrapper_variables(pkg, env):
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set compiler flags injected from the spec
|
||||
set_wrapper_environment_variables_for_flags(pkg, env)
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
||||
|
||||
if compiler.extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path("PATH", item)
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
|
||||
# Working directory for the spack command itself, for debug logs.
|
||||
if spack.config.get("config:debug"):
|
||||
@@ -436,15 +572,22 @@ def set_wrapper_variables(pkg, env):
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
||||
pkg.compiler.default_dynamic_linker
|
||||
)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
||||
|
||||
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
|
||||
if default_dynamic_linker_filter:
|
||||
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
|
||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
||||
# just this filter.
|
||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
||||
if implicit_rpaths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
@@ -500,19 +643,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
# FIXME (compiler as nodes): make this more general, and not tied to three languages
|
||||
# Maybe add a callback?
|
||||
global_names = {
|
||||
"c": ("spack_cc",),
|
||||
"cxx": ("spack_cxx",),
|
||||
"fortran": ("spack_fc", "spack_f77"),
|
||||
}
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
spec = pkg.spec.dependencies(virtuals=[language])
|
||||
value = None if not spec else os.path.join(link_dir, spec[0].package.link_paths[language])
|
||||
for name in global_names[language]:
|
||||
setattr(module, name, value)
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
@@ -679,6 +825,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
# Platform specific setup goes before package specific setup. This is for setting
|
||||
@@ -690,11 +837,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
tty.debug("setup_package: adding compiler wrappers paths")
|
||||
for x in env_mods.group_by_name()["SPACK_ENV_PATH"]:
|
||||
assert isinstance(x, PrependPath), "unexpected setting used for SPACK_ENV_PATH"
|
||||
env_mods.prepend_path("PATH", x.value)
|
||||
|
||||
if context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||
@@ -708,6 +850,11 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
@@ -736,9 +883,6 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def accept(self, item):
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
@@ -776,19 +920,19 @@ def effective_deptypes(
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
topo_sorted_edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges(specs),
|
||||
visitor=EnvironmentVisitor(*specs, context=context),
|
||||
key=traverse.by_dag_hash,
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in topo_sorted_edges:
|
||||
for edge in visitor.edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
@@ -1231,7 +1375,7 @@ def exitcode_msg(p):
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
@@ -1280,20 +1424,27 @@ def make_stack(tb, stack=None):
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
filename = inspect.getfile(frame.f_code)
|
||||
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
|
||||
lineno = frame.f_lineno
|
||||
if os.path.basename(filename) == "package.py":
|
||||
# subtract 1 because we inject a magic import at the top of package files.
|
||||
# TODO: get rid of the magic import.
|
||||
lineno -= 1
|
||||
|
||||
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
fun_lineno = frame.f_lineno - start
|
||||
fun_lineno = lineno - start
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||
|
||||
for i, line in enumerate(sourcelines):
|
||||
is_error = start_ctx + i == fun_lineno
|
||||
mark = ">> " if is_error else " "
|
||||
# Add start to get lineno relative to start of file, not function.
|
||||
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
|
||||
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
|
||||
if is_error:
|
||||
marked = colorize("@R{%s}" % cescape(marked))
|
||||
lines.append(marked)
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -64,7 +63,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
@@ -72,7 +71,7 @@ def ensure_build_dependencies_or_raise(
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of package names of required build dependencies
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
@@ -128,8 +127,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BuilderWithDefaults(spack.builder.Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
"""Base class for builders to register common checks"""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
||||
spack.builder.run_after("install")(sanity_check_prefix)
|
||||
|
||||
@@ -6,19 +6,15 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
@@ -26,7 +22,7 @@
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
BuilderWithDefaults,
|
||||
BaseBuilder,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
@@ -73,14 +69,14 @@ def flags_to_build_system_args(self, flags):
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
return self.builder.enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
return self.builder.with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("autotools")
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
class AutotoolsBuilder(BaseBuilder):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
@@ -161,7 +157,7 @@ class AutotoolsBuilder(BuilderWithDefaults):
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self) -> bool:
|
||||
def patch_config_files(self):
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
@@ -181,7 +177,7 @@ def patch_config_files(self) -> bool:
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self) -> str:
|
||||
def _removed_la_files_log(self):
|
||||
"""File containing the list of removed libtool archives"""
|
||||
build_dir = self.build_directory
|
||||
if not os.path.isabs(self.build_directory):
|
||||
@@ -189,15 +185,15 @@ def _removed_la_files_log(self) -> str:
|
||||
return os.path.join(build_dir, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def _do_patch_config_files(self):
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
@@ -298,7 +294,7 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -308,8 +304,8 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _patch_usr_bin_file(self):
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
@@ -320,8 +316,8 @@ def _patch_usr_bin_file(self) -> None:
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
@@ -334,8 +330,8 @@ def _set_autotools_environment_variables(self) -> None:
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
@@ -362,8 +358,8 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
@@ -397,44 +393,33 @@ def _do_patch_libtool(self) -> None:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.spec.satisfies("%nag"):
|
||||
if self.pkg.compiler.name == "nag":
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
nag_pkg = self.spec["fortran"].package
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
else:
|
||||
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||
if compiler_spec:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for compiler, marker in markers.items():
|
||||
if compiler == "cc":
|
||||
language = "c"
|
||||
elif compiler == "cxx":
|
||||
language = "cxx"
|
||||
else:
|
||||
language = "fortran"
|
||||
|
||||
if language not in self.spec:
|
||||
continue
|
||||
|
||||
for cc, marker in markers.items():
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
repl='pic_flag="{0}"'.format(
|
||||
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
||||
),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.spec.satisfies("%fj"):
|
||||
if self.pkg.compiler.name == "fj":
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
@@ -447,7 +432,7 @@ def _do_patch_libtool(self) -> None:
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.spec.satisfies("%nag"):
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
@@ -522,64 +507,27 @@ def _do_patch_libtool(self) -> None:
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self) -> str:
|
||||
def configure_directory(self):
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self) -> str:
|
||||
def configure_abs_path(self):
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
@spack.builder.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self):
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
@@ -606,12 +554,39 @@ def autoreconf(
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def set_configure_or_die(self):
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
@@ -622,12 +597,7 @@ def configure(
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
@@ -635,49 +605,41 @@ def build(
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||
):
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
name (str): name of the option that is being activated or not
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -685,19 +647,19 @@ def _activate_or_not(
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
)
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
@@ -717,8 +679,8 @@ def _activate_or_not(
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec: spack.spec.Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
spec = self.pkg.spec
|
||||
args = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
@@ -736,7 +698,7 @@ def _activate_or_not(
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
if set(vdef.values) == set((True, False)):
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
@@ -747,12 +709,14 @@ def _activate_or_not(
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
override_name = "{0}_or_{1}_{2}".format(
|
||||
activation_word, deactivation_word, option_value
|
||||
)
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
@@ -761,24 +725,19 @@ def _activate_or_not(
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
line = "--{0}-{1}".format(activation_word, option_value)
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
line += "={0}".format(activation_value(option_value))
|
||||
return line
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -793,11 +752,12 @@ def with_or_without(
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -805,22 +765,18 @@ def with_or_without(
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -828,15 +784,15 @@ def enable_or_disable(
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
def installcheck(self):
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
@spack.builder.run_after("install")
|
||||
def remove_libtool_archives(self):
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
@@ -858,13 +814,12 @@ def setup_build_environment(self, env):
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
def _autoreconf_search_path_args(spec):
|
||||
dirs_seen = set()
|
||||
flags_spack, flags_external = [], []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import depends_on
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -69,7 +68,12 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
return f"{self.pkg.name}-{self.spec.architecture.platform}-{self.spec.dag_hash()}.cmake"
|
||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
||||
self.pkg.name,
|
||||
self.pkg.spec.architecture,
|
||||
self.pkg.spec.compiler.name,
|
||||
self.pkg.spec.compiler.version,
|
||||
)
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
@@ -112,9 +116,7 @@ def initconfig_compiler_entries(self):
|
||||
# Fortran compiler is optional
|
||||
if "FC" in os.environ:
|
||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||
system_fc_entry = cmake_cache_path(
|
||||
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
|
||||
)
|
||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
||||
else:
|
||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||
@@ -130,8 +132,8 @@ def initconfig_compiler_entries(self):
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||
" " + spack_fc_entry,
|
||||
"else()\n",
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
||||
" " + system_fc_entry,
|
||||
"endif()\n",
|
||||
]
|
||||
@@ -190,10 +192,7 @@ def initconfig_mpi_entries(self):
|
||||
|
||||
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
||||
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
||||
|
||||
# not all MPIs have Fortran wrappers
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
@@ -333,7 +332,7 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
@@ -350,10 +349,6 @@ class CachedCMakePackage(CMakePackage):
|
||||
|
||||
CMakeBuilder = CachedCMakeBuilder
|
||||
|
||||
# These dependencies are assumed in the builder
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||
return None, None, None # handled in the cmake cache
|
||||
|
||||
@@ -7,11 +7,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +27,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
class CargoBuilder(BaseBuilder):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -78,7 +77,7 @@ def install(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Tuple
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
@@ -18,15 +18,11 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack import traverse
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -52,9 +48,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -89,7 +85,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
@@ -97,36 +93,30 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
@@ -167,18 +157,15 @@ def _values(x):
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges([pkg.spec]),
|
||||
visitor=traverse.MixedDepthVisitor(
|
||||
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
|
||||
),
|
||||
key=traverse.by_dag_hash,
|
||||
root=False,
|
||||
all_edges=False, # cover all nodes, not all edges
|
||||
)
|
||||
ordered_specs = [edge.spec for edge in edges]
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
@@ -276,15 +263,15 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
def define(self, *args, **kwargs):
|
||||
return self.builder.define(*args, **kwargs)
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
def define_from_variant(self, *args, **kwargs):
|
||||
return self.builder.define_from_variant(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("cmake")
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
class CMakeBuilder(BaseBuilder):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
@@ -334,15 +321,15 @@ class CMakeBuilder(BuilderWithDefaults):
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
if _supports_compilation_databases(self):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
def root_cmakelists_dir(self):
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -351,17 +338,16 @@ def root_cmakelists_dir(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self) -> str:
|
||||
def generator(self):
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
@@ -370,9 +356,7 @@ def std_cmake_args(self) -> List[str]:
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(
|
||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
||||
) -> List[str]:
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
@@ -389,6 +373,7 @@ def std_args(
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
@@ -420,31 +405,152 @@ def std_args(
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
The resulting argument will convert boolean values to OFF/ON
|
||||
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||
values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define('BUILD_SHARED_LIBS', True),
|
||||
define('CMAKE_CXX_STANDARD', 14),
|
||||
define('swr', ['avx', 'avx2'])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
def define_from_variant(self, cmake_var, variant=None):
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('cxxstd', default='11', values=('11', '14'),
|
||||
multi=False, description='')
|
||||
variant('shared', default=True, description='')
|
||||
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||
description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met,
|
||||
this function returns an empty string. CMake discards empty strings
|
||||
provided on the command line.
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return self.define(cmake_var, value)
|
||||
|
||||
@property
|
||||
def build_dirname(self) -> str:
|
||||
def build_dirname(self):
|
||||
"""Directory name to use when building the package."""
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self) -> List[str]:
|
||||
def cmake_args(self):
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
@@ -454,12 +560,7 @@ def cmake_args(self) -> List[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
@@ -474,12 +575,7 @@ def cmake(
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -488,12 +584,7 @@ def build(
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -501,9 +592,9 @@ def install(
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
@@ -514,133 +605,3 @@ def check(self) -> None:
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(
|
||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
||||
|
||||
@@ -5,22 +5,15 @@
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.lang import classproperty
|
||||
|
||||
import spack
|
||||
import spack.compilers.error
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
|
||||
# Local "type" for type hints
|
||||
@@ -51,9 +44,6 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
link_paths: Dict[str, str] = {}
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
@@ -88,14 +78,14 @@ def executables(cls) -> Sequence[str]:
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path) -> str:
|
||||
def determine_version(cls, exe: Path):
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = compiler_output(exe, version_argument=va)
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
@@ -106,7 +96,6 @@ def determine_version(cls, exe: Path) -> str:
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
@@ -154,184 +143,3 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||
|
||||
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||
#: Flag that needs to be used to pass an argument to the linker
|
||||
linker_arg: str = "-Wl,"
|
||||
#: Flag used to produce Position Independent Code
|
||||
pic_flag: str = "-fPIC"
|
||||
#: Flag used to get verbose output
|
||||
verbose_flags: str = "-v"
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
if language not in self.supported_languages:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' language"
|
||||
)
|
||||
try:
|
||||
return self._standard_flag(language=language, standard=standard)
|
||||
except (KeyError, RuntimeError) as e:
|
||||
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||
) from e
|
||||
|
||||
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||
raise NotImplementedError("Must be implemented by derived classes")
|
||||
|
||||
@property
|
||||
def disable_new_dtags(self) -> str:
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--disable-new-dtags"
|
||||
|
||||
@property
|
||||
def enable_new_dtags(self) -> str:
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--enable-new-dtags"
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# FIXME (compiler as nodes): check if this is good enough or should be made more general
|
||||
|
||||
# The package is not used as a compiler, so skip this setup
|
||||
if not any(
|
||||
lang in dependent_spec and dependent_spec[lang].name == self.spec.name
|
||||
for lang in ("c", "cxx", "fortran")
|
||||
):
|
||||
return
|
||||
|
||||
# Populate an object with the list of environment modifications and return it
|
||||
link_dir = pathlib.Path(spack.paths.build_env_path)
|
||||
env_paths = []
|
||||
|
||||
for language, attr_name, wrapper_var_name, spack_var_name in [
|
||||
("c", "cc", "CC", "SPACK_CC"),
|
||||
("cxx", "cxx", "CXX", "SPACK_CXX"),
|
||||
("fortran", "fortran", "F77", "SPACK_F77"),
|
||||
("fortran", "fortran", "FC", "SPACK_FC"),
|
||||
]:
|
||||
if language not in dependent_spec or dependent_spec[language].name != self.spec.name:
|
||||
continue
|
||||
|
||||
if not hasattr(self, attr_name):
|
||||
continue
|
||||
|
||||
compiler = getattr(self, attr_name)
|
||||
env.set(spack_var_name, compiler)
|
||||
|
||||
if language not in self.link_paths:
|
||||
continue
|
||||
|
||||
wrapper_path = link_dir / self.link_paths.get(language)
|
||||
env.set(wrapper_var_name, str(wrapper_path))
|
||||
env.set(f"SPACK_{wrapper_var_name}_RPATH_ARG", self.rpath_arg)
|
||||
|
||||
uarch = dependent_spec.architecture.target
|
||||
version_number, _ = archspec.cpu.version_components(
|
||||
self.spec.version.dotted_numeric_string
|
||||
)
|
||||
try:
|
||||
isa_arg = uarch.optimization_flags(self.archspec_name(), version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
isa_arg = ""
|
||||
|
||||
if isa_arg:
|
||||
env.set(f"SPACK_TARGET_ARGS_{attr_name.upper()}", isa_arg)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(self.link_paths[language])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
# FIXME (compiler as nodes): make these paths language specific
|
||||
env.set("SPACK_LINKER_ARG", self.linker_arg)
|
||||
|
||||
paths = _implicit_rpaths(pkg=self)
|
||||
if paths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(paths))
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", self.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", self.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", self.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", self.enable_new_dtags)
|
||||
|
||||
spec = self.spec
|
||||
if spec.extra_attributes:
|
||||
extra_rpaths = spec.extra_attributes.get("extra_rpaths")
|
||||
if extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.append_path("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
for item in env_paths:
|
||||
env.prepend_path("SPACK_ENV_PATH", item)
|
||||
|
||||
def archspec_name(self) -> str:
|
||||
"""Name that archspec uses to refer to this compiler"""
|
||||
return self.spec.name
|
||||
|
||||
|
||||
def _implicit_rpaths(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(pkg.spec)
|
||||
paths = detector.implicit_rpaths()
|
||||
return paths
|
||||
|
||||
|
||||
@memoized
|
||||
def _compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Returns the output from the compiler invoked with the given version argument.
|
||||
|
||||
Args:
|
||||
compiler_path: path of the compiler to be invoked
|
||||
version_argument: the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_argument:
|
||||
output = compiler(version_argument, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
def compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
|
||||
|
||||
return _compiler_output(
|
||||
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||
)
|
||||
|
||||
@@ -180,6 +180,13 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
||||
@@ -205,6 +212,9 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
|
||||
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
|
||||
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
|
||||
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
||||
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
||||
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
||||
|
||||
@@ -7,9 +7,8 @@
|
||||
import spack.builder
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(spack.package_base.PackageBase):
|
||||
@@ -27,7 +26,7 @@ class Package(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("generic")
|
||||
class GenericBuilder(BuilderWithDefaults):
|
||||
class GenericBuilder(BaseBuilder):
|
||||
"""A builder for a generic build system, that require packagers
|
||||
to implement an "install" phase.
|
||||
"""
|
||||
@@ -45,7 +44,7 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -7,11 +7,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
class GoPackage(spack.package_base.PackageBase):
|
||||
@@ -33,7 +32,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
class GoBuilder(BuilderWithDefaults):
|
||||
class GoBuilder(BaseBuilder):
|
||||
"""The Go builder encodes the most common way of building software with
|
||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -100,7 +99,7 @@ def install(self, pkg, spec, prefix):
|
||||
fs.mkdirp(prefix.bin)
|
||||
fs.install(pkg.name, prefix.bin)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
|
||||
@@ -22,8 +22,8 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
|
||||
debug_print(license_type)
|
||||
return license_type
|
||||
|
||||
@spack.phase_callbacks.run_before("install")
|
||||
@spack.builder.run_before("install")
|
||||
def configure(self):
|
||||
"""Generates the silent.cfg file to pass to installer.sh.
|
||||
|
||||
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
@@ -1258,7 +1258,7 @@ def validate_install(self):
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError("The installer has failed to install anything.")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def configure_rpath(self):
|
||||
if "+rpath" not in self.spec:
|
||||
return
|
||||
@@ -1276,7 +1276,7 @@ def configure_rpath(self):
|
||||
with open(compiler_cfg, "w") as fh:
|
||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def configure_auto_dispatch(self):
|
||||
if self._has_compilers:
|
||||
if "auto_dispatch=none" in self.spec:
|
||||
@@ -1300,7 +1300,7 @@ def configure_auto_dispatch(self):
|
||||
with open(compiler_cfg, "a") as fh:
|
||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def filter_compiler_wrappers(self):
|
||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||
bin_dir = self.component_bin_dir("mpi")
|
||||
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
|
||||
f = os.path.join(bin_dir, f)
|
||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def uninstall_ism(self):
|
||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
|
||||
debug_print(d)
|
||||
return d
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def modify_LLVMgold_rpath(self):
|
||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||
|
||||
|
||||
@@ -8,14 +8,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BuilderWithDefaults,
|
||||
BaseBuilder,
|
||||
apply_macos_rpath_fixups,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
@@ -39,7 +36,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
class MakefileBuilder(BuilderWithDefaults):
|
||||
class MakefileBuilder(BaseBuilder):
|
||||
"""The Makefile builder encodes the most common way of building software with
|
||||
Makefiles. It has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -94,50 +91,35 @@ class MakefileBuilder(BuilderWithDefaults):
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""Return the directory containing the main Makefile."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def edit(self, pkg, spec, prefix):
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
def installcheck(self):
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
@@ -145,4 +127,4 @@ def installcheck(self) -> None:
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
class MavenPackage(spack.package_base.PackageBase):
|
||||
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("maven")
|
||||
class MavenBuilder(BuilderWithDefaults):
|
||||
class MavenBuilder(BaseBuilder):
|
||||
"""The Maven builder encodes the default way to build software with Maven.
|
||||
It has two phases that can be overridden, if need be:
|
||||
|
||||
|
||||
@@ -9,13 +9,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class MesonPackage(spack.package_base.PackageBase):
|
||||
@@ -65,7 +62,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
|
||||
@spack.builder.builder("meson")
|
||||
class MesonBuilder(BuilderWithDefaults):
|
||||
class MesonBuilder(BaseBuilder):
|
||||
"""The Meson builder encodes the default way to build software with Meson.
|
||||
The builder has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -115,7 +112,7 @@ def archive_files(self):
|
||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||
|
||||
@property
|
||||
def root_mesonlists_dir(self) -> str:
|
||||
def root_mesonlists_dir(self):
|
||||
"""Relative path to the directory containing meson.build
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -124,7 +121,7 @@ def root_mesonlists_dir(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def std_meson_args(self) -> List[str]:
|
||||
def std_meson_args(self):
|
||||
"""Standard meson arguments provided as a property for convenience
|
||||
of package writers.
|
||||
"""
|
||||
@@ -135,7 +132,7 @@ def std_meson_args(self) -> List[str]:
|
||||
return std_meson_args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg) -> List[str]:
|
||||
def std_args(pkg):
|
||||
"""Standard meson arguments for a generic package."""
|
||||
try:
|
||||
build_type = pkg.spec.variants["buildtype"].value
|
||||
@@ -175,7 +172,7 @@ def build_directory(self):
|
||||
"""Directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def meson_args(self) -> List[str]:
|
||||
def meson_args(self):
|
||||
"""List of arguments that must be passed to meson, except:
|
||||
|
||||
* ``--prefix``
|
||||
@@ -188,12 +185,7 @@ def meson_args(self) -> List[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def meson(self, pkg, spec, prefix):
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
@@ -204,31 +196,21 @@ def meson(
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
class MSBuildPackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
class MSBuildBuilder(BuilderWithDefaults):
|
||||
class MSBuildBuilder(BaseBuilder):
|
||||
"""The MSBuild builder encodes the most common way of building software with
|
||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -75,7 +75,7 @@ def toolchain_version(self):
|
||||
Override this method to select a specific version of the toolchain or change
|
||||
selection heuristics.
|
||||
Default is whatever version of msvc has been selected by concretization"""
|
||||
return "v" + self.spec["msvc"].package.platform_toolset_ver
|
||||
return "v" + self.pkg.compiler.platform_toolset_ver
|
||||
|
||||
@property
|
||||
def std_msbuild_args(self):
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
class NMakePackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
class NMakeBuilder(BuilderWithDefaults):
|
||||
class NMakeBuilder(BaseBuilder):
|
||||
"""The NMake builder encodes the most common way of building software with
|
||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
class OctavePackage(spack.package_base.PackageBase):
|
||||
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("octave")
|
||||
class OctaveBuilder(BuilderWithDefaults):
|
||||
class OctaveBuilder(BaseBuilder):
|
||||
"""The octave builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.OctaveBuilder.install`
|
||||
|
||||
@@ -140,7 +140,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "+envmods" in self.spec:
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
self.component_prefix.env.join("vars.sh"), *self.env_script_args
|
||||
@@ -255,7 +255,7 @@ def libs(self):
|
||||
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
|
||||
"""Base class for Intel oneAPI library packages with SDK components.
|
||||
|
||||
Contains some convenient default implementations for libraries
|
||||
|
||||
@@ -10,12 +10,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
@@ -85,7 +84,7 @@ def test_use(self):
|
||||
|
||||
|
||||
@spack.builder.builder("perl")
|
||||
class PerlBuilder(BuilderWithDefaults):
|
||||
class PerlBuilder(BaseBuilder):
|
||||
"""The perl builder provides four phases that can be overridden, if required:
|
||||
|
||||
1. :py:meth:`~.PerlBuilder.configure`
|
||||
@@ -164,7 +163,7 @@ def configure(self, pkg, spec, prefix):
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||
# the Build script.
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
@spack.builder.run_after("configure")
|
||||
def fix_shebang(self):
|
||||
if self.build_method == "Build.PL":
|
||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||
@@ -176,7 +175,7 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_executable()
|
||||
|
||||
# Ensure that tests run after build (if requested):
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -35,7 +34,7 @@
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
@@ -277,6 +276,10 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
@@ -371,7 +374,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self) -> Spec:
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
@@ -422,7 +425,7 @@ def libs(self) -> LibraryList:
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BuilderWithDefaults):
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
@@ -540,4 +543,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -6,10 +6,9 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class QMakePackage(spack.package_base.PackageBase):
|
||||
@@ -31,7 +30,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
class QMakeBuilder(BuilderWithDefaults):
|
||||
class QMakeBuilder(BaseBuilder):
|
||||
"""The qmake builder provides three phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||
@@ -82,4 +81,4 @@ def check(self):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("ruby")
|
||||
class RubyBuilder(BuilderWithDefaults):
|
||||
class RubyBuilder(BaseBuilder):
|
||||
"""The Ruby builder provides two phases that can be overridden if required:
|
||||
|
||||
#. :py:meth:`~.RubyBuilder.build`
|
||||
|
||||
@@ -4,10 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
|
||||
class SConsPackage(spack.package_base.PackageBase):
|
||||
@@ -29,7 +28,7 @@ class SConsPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("scons")
|
||||
class SConsBuilder(BuilderWithDefaults):
|
||||
class SConsBuilder(BaseBuilder):
|
||||
"""The Scons builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.SConsBuilder.build`
|
||||
@@ -80,4 +79,4 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
@@ -11,12 +11,11 @@
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
class SIPPackage(spack.package_base.PackageBase):
|
||||
@@ -104,7 +103,7 @@ def test_imports(self):
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
class SIPBuilder(BuilderWithDefaults):
|
||||
class SIPBuilder(BaseBuilder):
|
||||
"""The SIP builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -171,4 +170,4 @@ def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -6,10 +6,9 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||
|
||||
|
||||
class WafPackage(spack.package_base.PackageBase):
|
||||
@@ -31,7 +30,7 @@ class WafPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("waf")
|
||||
class WafBuilder(BuilderWithDefaults):
|
||||
class WafBuilder(BaseBuilder):
|
||||
"""The WAF builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -137,7 +136,7 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def install_test(self):
|
||||
"""Run unit tests after install.
|
||||
@@ -147,4 +146,4 @@ def install_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -6,30 +6,44 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
from typing import Dict, List, Optional, Tuple, Type
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
|
||||
BUILDER_CLS = {}
|
||||
|
||||
#: An object of this kind is a shared global state used to collect callbacks during
|
||||
#: class definition time, and is flushed when the class object is created at the end
|
||||
#: of the class definition
|
||||
#:
|
||||
#: Args:
|
||||
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||
CallbackTemporaryStage = collections.namedtuple(
|
||||
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||
)
|
||||
|
||||
#: Shared global state to aggregate "@run_before" callbacks
|
||||
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||
#: Shared global state to aggregate "@run_after" callbacks
|
||||
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||
|
||||
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||
#: builders for the same package object.
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
_BUILDERS = {}
|
||||
|
||||
|
||||
def builder(build_system_name: str):
|
||||
def builder(build_system_name):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
Args:
|
||||
build_system_name: name of the build-system
|
||||
build_system_name (str): name of the build-system
|
||||
"""
|
||||
|
||||
def _decorator(cls):
|
||||
@@ -40,9 +54,13 @@ def _decorator(cls):
|
||||
return _decorator
|
||||
|
||||
|
||||
def create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Given a package object with an associated concrete spec, return the builder object that can
|
||||
install it."""
|
||||
def create(pkg):
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the builder object that can install it.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||
"""
|
||||
if id(pkg) not in _BUILDERS:
|
||||
_BUILDERS[id(pkg)] = _create(pkg)
|
||||
return _BUILDERS[id(pkg)]
|
||||
@@ -57,7 +75,7 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
@@ -65,7 +83,7 @@ def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
def _create(pkg):
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
The function inspects the build-system used by the package object and try to:
|
||||
@@ -85,7 +103,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
to look for build-related methods in the ``*Package``.
|
||||
|
||||
Args:
|
||||
pkg: package object for which we need a builder
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
"""
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
@@ -150,8 +168,8 @@ def __forward(self, *args, **kwargs):
|
||||
# with the same name is defined in the Package, it will override this definition
|
||||
# (when _ForwardToBaseBuilder is initialized)
|
||||
for method_name in (
|
||||
base_cls.phases # type: ignore
|
||||
+ base_cls.legacy_methods # type: ignore
|
||||
base_cls.phases
|
||||
+ base_cls.legacy_methods
|
||||
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||
):
|
||||
@@ -163,14 +181,14 @@ def __forward(self):
|
||||
|
||||
return __forward
|
||||
|
||||
for attribute_name in base_cls.legacy_attributes: # type: ignore
|
||||
for attribute_name in base_cls.legacy_attributes:
|
||||
setattr(
|
||||
_ForwardToBaseBuilder,
|
||||
attribute_name,
|
||||
property(forward_property_to_getattr(attribute_name)),
|
||||
)
|
||||
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||
def __init__(self, pkg):
|
||||
# Deal with custom phases in packages here
|
||||
if hasattr(pkg, "phases"):
|
||||
@@ -195,18 +213,99 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
return Adapter(pkg)
|
||||
|
||||
|
||||
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
|
||||
def buildsystem_name(pkg):
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the name of its build system."""
|
||||
return the name of its build system.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want
|
||||
the build system name
|
||||
"""
|
||||
try:
|
||||
return pkg.spec.variants["build_system"].value
|
||||
except KeyError:
|
||||
# We are reading an old spec without the build_system variant
|
||||
return pkg.legacy_buildsystem # type: ignore
|
||||
return pkg.legacy_buildsystem
|
||||
|
||||
|
||||
class PhaseCallbacksMeta(type):
|
||||
"""Permit to register arbitrary functions during class definition and run them
|
||||
later, before or after a given install phase.
|
||||
|
||||
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||
stored in a global shared state when a class being defined is parsed by the Python
|
||||
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||
of callbacks is attached to the class being defined.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
||||
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def run_after(phase, when=None):
|
||||
"""Decorator to register a function for running after a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase after which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_AFTER.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
@staticmethod
|
||||
def run_before(phase, when=None):
|
||||
"""Decorator to register a function for running before a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase before which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_BEFORE.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
class BuilderMeta(
|
||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
||||
PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
@@ -301,12 +400,8 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
)
|
||||
|
||||
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_BEFORE.attribute_name
|
||||
)
|
||||
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_AFTER.attribute_name
|
||||
)
|
||||
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@@ -326,8 +421,8 @@ def __init__(self, name, builder):
|
||||
self.name = name
|
||||
self.builder = builder
|
||||
self.phase_fn = self._select_phase_fn()
|
||||
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
|
||||
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||
|
||||
def _make_callbacks(self, callbacks_attribute):
|
||||
result = []
|
||||
@@ -388,103 +483,15 @@ def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
|
||||
class BaseBuilder(metaclass=BuilderMeta):
|
||||
"""An interface for builders, without any phases defined. This class is exposed in the package
|
||||
API, so that packagers can create a single class to define ``setup_build_environment`` and
|
||||
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
|
||||
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""A builder is a class that, given a package object (i.e. associated with
|
||||
concrete spec), knows how to install it.
|
||||
|
||||
Example:
|
||||
The builder behaves like a sequence, and when iterated over return the
|
||||
"phases" of the installation in the correct order.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AnyBuilder(BaseBuilder):
|
||||
@run_after("install")
|
||||
def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
|
||||
pass
|
||||
"""
|
||||
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
self.pkg = pkg
|
||||
|
||||
@property
|
||||
def spec(self) -> spack.spec.Spec:
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env) # type: ignore
|
||||
|
||||
def setup_dependent_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
"""Sets up the build environment of a package that depends on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to modify the build
|
||||
environment of a package that *depends* on this one.
|
||||
|
||||
This gives packages the ability to set environment variables for the build of the
|
||||
dependent, which can be useful to provide search hints for headers or libraries if they are
|
||||
not in standard locations.
|
||||
|
||||
This method will be called before the dependent package prefix exists in Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec: the spec of the dependent package about to be built. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
|
||||
|
||||
def __repr__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
|
||||
|
||||
def __str__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
|
||||
|
||||
|
||||
class Builder(BaseBuilder, collections.abc.Sequence):
|
||||
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
|
||||
knows how to install it.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the "phases" of the
|
||||
installation in the correct order.
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object to be built
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
@@ -499,22 +506,95 @@ class Builder(BaseBuilder, collections.abc.Sequence):
|
||||
build_time_test_callbacks: List[str]
|
||||
install_time_test_callbacks: List[str]
|
||||
|
||||
#: List of glob expressions. Each expression must either be absolute or relative to the package
|
||||
#: source path. Matching artifacts found at the end of the build process will be copied in the
|
||||
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
return []
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files: List[str] = []
|
||||
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
super().__init__(pkg)
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
self.callbacks = {}
|
||||
for phase in self.phases:
|
||||
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||
|
||||
@property
|
||||
def spec(self):
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to
|
||||
modify the build environments of packages that *depend* on this one.
|
||||
|
||||
This gives packages like Python and others that follow the extension
|
||||
model a way to implement common environment or compile-time settings
|
||||
for dependencies.
|
||||
|
||||
This method will be called before the dependent package prefix exists
|
||||
in Spack's store.
|
||||
|
||||
Examples:
|
||||
1. Installing python modules generally requires ``PYTHONPATH``
|
||||
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||
module's install prefix. This method could be used to set that
|
||||
variable.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||
about to be built. This allows the extendee (self) to query
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
return self.callbacks[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.phases)
|
||||
|
||||
def __repr__(self):
|
||||
msg = "{0}({1})"
|
||||
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
def __str__(self):
|
||||
msg = '"{0}" builder for "{1}"'
|
||||
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
|
||||
# Export these names as standalone to be used in packages
|
||||
run_after = PhaseCallbacksMeta.run_after
|
||||
run_before = PhaseCallbacksMeta.run_before
|
||||
|
||||
@@ -32,13 +32,11 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.builder
|
||||
import spack.concretize
|
||||
import spack.config as cfg
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -205,7 +203,7 @@ def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
tty.msg("Checked the following mirrors for binaries:")
|
||||
for m in mirrors.values():
|
||||
tty.msg(f" {m.fetch_url}")
|
||||
@@ -798,7 +796,7 @@ def ensure_expected_target_path(path):
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
||||
@@ -1324,7 +1322,7 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
mirror = spack.mirrors.mirror.Mirror.from_url(mirror_url)
|
||||
mirror = spack.mirror.Mirror.from_url(mirror_url)
|
||||
try:
|
||||
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
|
||||
uploader.push_or_raise([spec])
|
||||
@@ -1344,7 +1342,7 @@ def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
mirrors_to_remove.append(name)
|
||||
|
||||
for mirror_name in mirrors_to_remove:
|
||||
spack.mirrors.utils.remove(mirror_name, scope)
|
||||
spack.mirror.remove(mirror_name, scope)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
@@ -1389,11 +1387,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug(f"stage dir: {stage_dir}")
|
||||
for file in [
|
||||
job_pkg.log_path,
|
||||
job_pkg.env_mods_path,
|
||||
*spack.builder.create(job_pkg).archive_files,
|
||||
]:
|
||||
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||
copy_files_to_artifacts(file, job_log_dir)
|
||||
|
||||
|
||||
@@ -2138,7 +2132,7 @@ def build_name(self):
|
||||
Returns: (str) current spec's CDash build name."""
|
||||
spec = self.current_spec
|
||||
if spec:
|
||||
build_name = f"{spec.name}@{spec.version} \
|
||||
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
||||
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||
return build_name
|
||||
|
||||
@@ -4,13 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import difflib
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import List, Optional, Union
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -26,7 +24,6 @@
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -34,8 +31,6 @@
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
|
||||
@@ -126,8 +121,6 @@ def get_module(cmd_name):
|
||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||
except ImportError:
|
||||
module = spack.extensions.get_module(cmd_name)
|
||||
if not module:
|
||||
raise CommandNotFoundError(cmd_name)
|
||||
|
||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||
attr_setdefault(module, DESCRIPTION, "")
|
||||
@@ -196,43 +189,6 @@ def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
concrete or abstract.concrete or abstract.abstract_hash
|
||||
for abstract, concrete in to_concretize
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
# If unify: true, check that specs don't conflict
|
||||
# Since all concrete, "when_possible" is not relevant
|
||||
if unify is True: # True, "when_possible", False are possible values
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
specs_per_name = Counter(
|
||||
spec.name
|
||||
for spec in traverse.traverse_nodes(
|
||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
||||
)
|
||||
|
||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
||||
if conflicts:
|
||||
raise spack.error.SpecError(
|
||||
"Specs conflict and `concretizer:unify` is configured true.",
|
||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
||||
)
|
||||
return ret
|
||||
|
||||
# Standard case
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
@@ -272,48 +228,39 @@ def matching_specs_from_env(specs):
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(
|
||||
spec: spack.spec.Spec,
|
||||
env: Optional[ev.Environment],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
Args:
|
||||
spec: a spec to disambiguate
|
||||
env: a spack environment, if one is active, or None if no environment is active
|
||||
local: do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
|
||||
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: List[str],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
|
||||
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
||||
|
||||
Arguments:
|
||||
spec: a spec to disambiguate
|
||||
hashes: a set of hashes of specs among which to disambiguate
|
||||
local: if True, do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die(f"Spec '{spec}' matches no installed packages.")
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
|
||||
elif first:
|
||||
return matching_specs[0]
|
||||
@@ -372,13 +319,8 @@ def iter_groups(specs, indent, all_headers):
|
||||
index = index_by(specs, ("architecture", "compiler"))
|
||||
ispace = indent * " "
|
||||
|
||||
def _key(item):
|
||||
if item is None:
|
||||
return ""
|
||||
return str(item)
|
||||
|
||||
# Traverse the index and print out each package
|
||||
for i, (architecture, compiler) in enumerate(sorted(index, key=_key)):
|
||||
for i, (architecture, compiler) in enumerate(sorted(index)):
|
||||
if i > 0:
|
||||
print()
|
||||
|
||||
@@ -436,7 +378,6 @@ def display_specs(specs, args=None, **kwargs):
|
||||
|
||||
"""
|
||||
|
||||
# FIXME (compiler as nodes): remove the "show full compiler" arguments, and its use
|
||||
def get_arg(name, default=None):
|
||||
"""Prefer kwargs, then args, then default."""
|
||||
if name in kwargs:
|
||||
@@ -451,6 +392,7 @@ def get_arg(name, default=None):
|
||||
hashes = get_arg("long", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
groups = get_arg("groups", True)
|
||||
all_headers = get_arg("all_headers", False)
|
||||
@@ -472,7 +414,10 @@ def get_arg(name, default=None):
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if flags:
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
if full_compiler:
|
||||
ffmt += "{@compiler.version}"
|
||||
ffmt += " {compiler_flags}"
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
@@ -600,18 +545,6 @@ def __init__(self, name):
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
########################################
|
||||
# argparse types for argument validation
|
||||
########################################
|
||||
@@ -696,24 +629,3 @@ def find_environment(args):
|
||||
def first_line(docstring):
|
||||
"""Return the first line of the docstring."""
|
||||
return docstring.split("\n")[0]
|
||||
|
||||
|
||||
class CommandNotFoundError(spack.error.SpackError):
|
||||
"""Exception class thrown when a requested command is not recognized as
|
||||
such.
|
||||
"""
|
||||
|
||||
def __init__(self, cmd_name):
|
||||
msg = (
|
||||
f"{cmd_name} is not a recognized Spack command or extension command; "
|
||||
"check with `spack commands`."
|
||||
)
|
||||
long_msg = None
|
||||
|
||||
similar = difflib.get_close_matches(cmd_name, all_commands())
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg = "\nDid you mean one of the following commands?\n "
|
||||
long_msg += "\n ".join(similar)
|
||||
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.mirrors.utils
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
@@ -400,7 +400,7 @@ def _mirror(args):
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirrors.utils.create(mirror_dir, [node])
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
@@ -34,8 +34,6 @@
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
@@ -310,10 +308,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [
|
||||
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
|
||||
for s in specs
|
||||
]
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
@@ -392,7 +387,7 @@ def push_fn(args):
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
mirror = args.mirror
|
||||
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
|
||||
assert isinstance(mirror, spack.mirror.Mirror)
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
@@ -750,7 +745,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
# Special case OCI images for now.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
@@ -240,7 +240,7 @@ def ci_reindex(args):
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
|
||||
mirror = spack.mirror.Mirror(remote_mirror_url)
|
||||
|
||||
buildcache.update_index(mirror, update_keys=True)
|
||||
|
||||
@@ -328,7 +328,7 @@ def ci_rebuild(args):
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.mirror
|
||||
import spack.reporters
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -582,51 +581,23 @@ def add_concretizer_args(subparser):
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs):
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(arg, **kwargs)
|
||||
# Update help string
|
||||
if "help" in kwargs:
|
||||
kwargs["help"] = "environment variable containing " + kwargs["help"]
|
||||
group.add_argument(arg + "-variable", **kwargs)
|
||||
|
||||
s3_connection_parser = subparser.add_argument_group("S3 Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-id",
|
||||
help="ID string to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-secret",
|
||||
help="secret string to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-token",
|
||||
help="access token to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
)
|
||||
s3_connection_parser.add_argument(
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
s3_connection_parser.add_argument(
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
oci_connection_parser = subparser.add_argument_group("OCI Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-username",
|
||||
deprecate_str=False,
|
||||
help="username to use to connect to this OCI mirror",
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-password",
|
||||
help="password to use to connect to this OCI mirror",
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
@@ -690,31 +661,31 @@ def mirror_name_or_url(m):
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirrors.mirror.Mirror(m)
|
||||
return spack.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirrors.utils.require_mirror_name(m)
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirrors.mirror.Mirror.from_url(url)
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirrors.mirror.Mirror.from_local_path(path)
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirrors.utils.require_mirror_name(name)
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
@@ -5,14 +5,13 @@
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import index_by
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
@@ -36,13 +35,13 @@ def setup_parser(subparser):
|
||||
"--mixed-toolchain",
|
||||
action="store_true",
|
||||
default=sys.platform == "darwin",
|
||||
help="(DEPRECATED) Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--no-mixed-toolchain",
|
||||
action="store_false",
|
||||
dest="mixed_toolchain",
|
||||
help="(DEPRECATED) Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
@@ -81,97 +80,77 @@ def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
"""
|
||||
if args.mixed_toolchain:
|
||||
warnings.warn(
|
||||
"The '--mixed-toolchain' option has been deprecated in Spack v0.23, and currently "
|
||||
"has no effect. The option will be removed in Spack v0.25"
|
||||
)
|
||||
|
||||
paths = args.add_paths or None
|
||||
new_compilers = spack.compilers.config.find_compilers(
|
||||
path_hints=paths, scope=args.scope, max_workers=args.jobs
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
)
|
||||
if new_compilers:
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
|
||||
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
|
||||
compiler_strs = sorted(f"{spec.name}@{spec.versions}" for spec in new_compilers)
|
||||
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
|
||||
colify(reversed(compiler_strs), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
colify(spack.compilers.config.compiler_config_files(), indent=4)
|
||||
colify(spack.compilers.compiler_config_files(), indent=4)
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
remover = spack.compilers.config.CompilerRemover(spack.config.CONFIG)
|
||||
candidates = remover.mark_compilers(match=args.compiler_spec, scope=args.scope)
|
||||
if not candidates:
|
||||
tty.die(f"No compiler matches '{args.compiler_spec}'")
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
compiler_strs = reversed(sorted(f"{spec.name}@{spec.versions}" for spec in candidates))
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidates) > 1:
|
||||
tty.error(f"multiple compilers match the spec '{args.compiler_spec}':")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
print(
|
||||
"Either use a stricter spec to select only one, or use `spack compiler remove -a`"
|
||||
" to remove all of them."
|
||||
)
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
remover.flush()
|
||||
tty.msg("The following compilers have been removed:")
|
||||
print()
|
||||
colify(compiler_strs, indent=4)
|
||||
print()
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
"""Print info about all compilers matching a spec."""
|
||||
query = spack.spec.Spec(args.compiler_spec)
|
||||
all_compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
|
||||
|
||||
compilers = [x for x in all_compilers if x.satisfies(query)]
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
|
||||
if not compilers:
|
||||
tty.die(f"No compilers match spec {query.cformat()}")
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
else:
|
||||
for c in compilers:
|
||||
print(f"{c.cformat()}:")
|
||||
print(f" prefix: {c.external_path}")
|
||||
extra_attributes = getattr(c, "extra_attributes", {})
|
||||
if "compilers" in extra_attributes:
|
||||
print(" compilers:")
|
||||
for language, exe in extra_attributes.get("compilers", {}).items():
|
||||
print(f" {language}: {exe}")
|
||||
if "flags" in extra_attributes:
|
||||
print(" flags:")
|
||||
for flag, flag_value in extra_attributes["flags"].items():
|
||||
print(f" {flag} = {flag_value}")
|
||||
# FIXME (compiler as nodes): recover this printing
|
||||
# if "environment" in extra_attributes:
|
||||
# if len(c.environment.get("set", {})) != 0:
|
||||
# print("\tenvironment:")
|
||||
# print("\t set:")
|
||||
# for key, value in c.environment["set"].items():
|
||||
# print("\t %s = %s" % (key, value))
|
||||
if "extra_rpaths" in extra_attributes:
|
||||
print(" extra rpaths:")
|
||||
for extra_rpath in extra_attributes["extra_rpaths"]:
|
||||
print(f" {extra_rpath}")
|
||||
if getattr(c, "external_modules", []):
|
||||
print(" modules: ")
|
||||
for module in c.external_modules:
|
||||
print(f" {module}")
|
||||
print()
|
||||
print(c.spec.display_str + ":")
|
||||
print("\tpaths:")
|
||||
for cpath in ["cc", "cxx", "f77", "fc"]:
|
||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||
if c.flags:
|
||||
print("\tflags:")
|
||||
for flag, flag_value in c.flags.items():
|
||||
print("\t\t%s = %s" % (flag, flag_value))
|
||||
if len(c.environment) != 0:
|
||||
if len(c.environment.get("set", {})) != 0:
|
||||
print("\tenvironment:")
|
||||
print("\t set:")
|
||||
for key, value in c.environment["set"].items():
|
||||
print("\t %s = %s" % (key, value))
|
||||
if c.extra_rpaths:
|
||||
print("\tExtra rpaths:")
|
||||
for extra_rpath in c.extra_rpaths:
|
||||
print("\t\t%s" % extra_rpath)
|
||||
print("\tmodules = %s" % c.modules)
|
||||
print("\toperating system = %s" % c.operating_system)
|
||||
|
||||
|
||||
def compiler_list(args):
|
||||
compilers = spack.compilers.config.all_compilers(scope=args.scope, init_config=False)
|
||||
compilers = spack.compilers.all_compilers(scope=args.scope, init_config=False)
|
||||
|
||||
# If there are no compilers in any scope, and we're outputting to a tty, give a
|
||||
# hint to the user.
|
||||
@@ -184,7 +163,7 @@ def compiler_list(args):
|
||||
tty.msg(msg)
|
||||
return
|
||||
|
||||
index = index_by(compilers, spack.compilers.config.name_os_target)
|
||||
index = index_by(compilers, lambda c: (c.spec.name, c.operating_system, c.target))
|
||||
|
||||
tty.msg("Available compilers")
|
||||
|
||||
@@ -203,10 +182,10 @@ def compiler_list(args):
|
||||
name, os, target = key
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += f"-{target}"
|
||||
cname = f"{spack.spec.COMPILER_COLOR}{{{name}}} {os_str}"
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.format("{name}@{version}") for c in compilers)))
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
def compiler(parser, args):
|
||||
|
||||
@@ -518,6 +518,8 @@ def config_prefer_upstream(args):
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(spec.name, {"version": []})
|
||||
all = pkgs.get("all", {"compiler": []})
|
||||
pkgs["all"] = all
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
@@ -527,6 +529,10 @@ def config_prefer_upstream(args):
|
||||
if version not in pkg["version"]:
|
||||
pkg["version"].append(version)
|
||||
|
||||
compiler = str(spec.compiler)
|
||||
if compiler not in all["compiler"]:
|
||||
all["compiler"].append(compiler)
|
||||
|
||||
# Get and list all the variants that differ from the default.
|
||||
variants = []
|
||||
for var_name, variant in spec.variants.items():
|
||||
|
||||
@@ -23,10 +23,9 @@
|
||||
import spack.installer
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
@@ -96,12 +95,8 @@ def deprecate(parser, args):
|
||||
if len(specs) != 2:
|
||||
raise SpackError("spack deprecate requires exactly two specs")
|
||||
|
||||
deprecate = spack.cmd.disambiguate_spec(
|
||||
specs[0],
|
||||
env,
|
||||
local=True,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
)
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
|
||||
|
||||
if args.install:
|
||||
deprecator = specs[1].concretized()
|
||||
|
||||
@@ -10,12 +10,11 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Set
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
@@ -51,8 +50,6 @@
|
||||
"update",
|
||||
"revert",
|
||||
"depfile",
|
||||
"track",
|
||||
"untrack",
|
||||
]
|
||||
|
||||
|
||||
@@ -449,193 +446,6 @@ def env_deactivate(args):
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
#
|
||||
# env track
|
||||
#
|
||||
def env_track_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("-n", "--name", help="custom environment name")
|
||||
subparser.add_argument("dir", help="path to environment")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_track(args):
|
||||
src_path = os.path.abspath(args.dir)
|
||||
if not ev.is_env_dir(src_path):
|
||||
tty.die("Cannot track environment. Path doesn't contain an environment")
|
||||
|
||||
if args.name:
|
||||
name = args.name
|
||||
else:
|
||||
name = os.path.basename(src_path)
|
||||
|
||||
try:
|
||||
dst_path = ev.environment_dir_from_name(name, exists_ok=False)
|
||||
except ev.SpackEnvironmentError:
|
||||
tty.die(
|
||||
f"An environment named {name} already exists. Set a name with:"
|
||||
"\n\n"
|
||||
f" spack env track --name NAME {src_path}\n"
|
||||
)
|
||||
|
||||
symlink(src_path, dst_path)
|
||||
|
||||
tty.msg(f"Tracking environment in {src_path}")
|
||||
tty.msg(
|
||||
"You can now activate this environment with the following command:\n\n"
|
||||
f" spack env activate {name}\n"
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove & untrack helpers
|
||||
#
|
||||
def filter_managed_env_names(env_names: Set[str]) -> Set[str]:
|
||||
tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))}
|
||||
managed_env_names = env_names - set(tracked_env_names)
|
||||
|
||||
num_managed_envs = len(managed_env_names)
|
||||
managed_envs_str = " ".join(managed_env_names)
|
||||
if num_managed_envs >= 2:
|
||||
tty.error(
|
||||
f"The following are not tracked environments. "
|
||||
"To remove them completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
elif num_managed_envs > 0:
|
||||
tty.error(
|
||||
f"'{managed_envs_str}' is not a tracked env. "
|
||||
"To remove it completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
return tracked_env_names
|
||||
|
||||
|
||||
def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]:
|
||||
valid_envs = set()
|
||||
for env_name in env_names:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.add(env)
|
||||
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
pass
|
||||
|
||||
return valid_envs
|
||||
|
||||
|
||||
def _env_untrack_or_remove(
|
||||
env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False
|
||||
):
|
||||
all_env_names = set(ev.all_environment_names())
|
||||
known_env_names = set(env_names).intersection(all_env_names)
|
||||
unknown_env_names = set(env_names) - known_env_names
|
||||
|
||||
# print error for unknown environments
|
||||
for env_name in unknown_env_names:
|
||||
tty.error(f"Environment '{env_name}' does not exist")
|
||||
|
||||
# if only unlinking is allowed, remove all environments
|
||||
# which do not point internally at symlinks
|
||||
if not remove:
|
||||
env_names_to_remove = filter_managed_env_names(known_env_names)
|
||||
else:
|
||||
env_names_to_remove = known_env_names
|
||||
|
||||
# initalize all environments with valid spack.yaml configs
|
||||
all_valid_envs = get_valid_envs(all_env_names)
|
||||
|
||||
# build a task list of environments and bad env names to remove
|
||||
envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove]
|
||||
bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove}
|
||||
for remove_env in envs_to_remove:
|
||||
for env in all_valid_envs:
|
||||
# don't check if an environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
# check if an environment is included un another
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'"
|
||||
if force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
envs_to_remove.remove(remove_env)
|
||||
|
||||
# ask the user if they really want to remove the known environments
|
||||
# force should do the same as yes to all here following the symantics of rm
|
||||
if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove):
|
||||
environments = string.plural(len(env_names_to_remove), "environment", show_n=False)
|
||||
envs = string.comma_and(list(env_names_to_remove))
|
||||
answer = tty.get_yes_or_no(
|
||||
f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False
|
||||
)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
# keep track of the environments we remove for later printing the exit code
|
||||
removed_env_names = []
|
||||
for env in envs_to_remove:
|
||||
name = env.name
|
||||
if not force and env.active:
|
||||
tty.error(
|
||||
f"Environment '{name}' can't be "
|
||||
f"{'removed' if remove else 'untracked'} while activated."
|
||||
)
|
||||
continue
|
||||
# Get path to check if environment is a tracked / symlinked environment
|
||||
if islink(env.path):
|
||||
real_env_path = os.path.realpath(env.path)
|
||||
os.unlink(env.path)
|
||||
tty.msg(
|
||||
f"Sucessfully untracked environment '{name}', "
|
||||
"but it can still be found at:\n\n"
|
||||
f" {real_env_path}\n"
|
||||
)
|
||||
else:
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
for bad_env_name in bad_env_names_to_remove:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
# Following the design of linux rm we should exit with a status of 1
|
||||
# anytime we cannot delete every environment the user asks for.
|
||||
# However, we should still process all the environments we know about
|
||||
# and delete them instead of failing on the first unknown enviornment.
|
||||
if len(removed_env_names) < len(known_env_names):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
#
|
||||
# env untrack
|
||||
#
|
||||
def env_untrack_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("env", nargs="+", help="tracked environment name")
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="force unlink even when environment is active"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_untrack(args):
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove
|
||||
#
|
||||
@@ -661,9 +471,54 @@ def env_remove_setup_parser(subparser):
|
||||
|
||||
def env_remove(args):
|
||||
"""remove existing environment(s)"""
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all
|
||||
)
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
"--show-full-compiler",
|
||||
action="store_true",
|
||||
dest="show_full_compiler",
|
||||
help="(DEPRECATED) show full compiler specs. Currently it's a no-op",
|
||||
help="show full compiler specs",
|
||||
)
|
||||
implicit_explicit = subparser.add_mutually_exclusive_group()
|
||||
implicit_explicit.add_argument(
|
||||
@@ -138,22 +137,21 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--loaded", action="store_true", help="show only packages loaded in the user environment"
|
||||
)
|
||||
only_missing_or_deprecated = subparser.add_mutually_exclusive_group()
|
||||
only_missing_or_deprecated.add_argument(
|
||||
subparser.add_argument(
|
||||
"-M",
|
||||
"--only-missing",
|
||||
action="store_true",
|
||||
dest="only_missing",
|
||||
help="show only missing dependencies",
|
||||
)
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--deprecated",
|
||||
action="store_true",
|
||||
help="show deprecated packages as well as installed specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
@@ -167,23 +165,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def query_arguments(args):
|
||||
if args.only_missing and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing")
|
||||
|
||||
if args.only_deprecated and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
|
||||
|
||||
installed = InstallRecordStatus.INSTALLED
|
||||
if args.only_missing:
|
||||
installed = InstallRecordStatus.MISSING
|
||||
elif args.only_deprecated:
|
||||
installed = InstallRecordStatus.DEPRECATED
|
||||
|
||||
if args.missing:
|
||||
installed |= InstallRecordStatus.MISSING
|
||||
|
||||
if args.deprecated:
|
||||
installed |= InstallRecordStatus.DEPRECATED
|
||||
# Set up query arguments.
|
||||
installed = []
|
||||
if not (args.only_missing or args.only_deprecated):
|
||||
installed.append(InstallStatuses.INSTALLED)
|
||||
if (args.deprecated or args.only_deprecated) and not args.only_missing:
|
||||
installed.append(InstallStatuses.DEPRECATED)
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
@@ -233,9 +222,11 @@ def decorator(spec, fmt):
|
||||
def display_env(env, args, decorator, results):
|
||||
"""Display extra find output when running in an environment.
|
||||
|
||||
In an environment, `spack find` outputs a preliminary section
|
||||
showing the root specs of the environment (this is in addition
|
||||
to the section listing out specs matching the query parameters).
|
||||
Find in an environment outputs 2 or 3 sections:
|
||||
|
||||
1. Root specs
|
||||
2. Concretized roots (if asked for with -c)
|
||||
3. Installed specs
|
||||
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
@@ -279,6 +270,7 @@ def root_decorator(spec, string):
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
@@ -301,61 +293,12 @@ def root_decorator(spec, string):
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
def _find_query(args, env):
|
||||
q_args = query_arguments(args)
|
||||
concretized_but_not_installed = list()
|
||||
if env:
|
||||
all_env_specs = env.all_specs()
|
||||
if args.constraint:
|
||||
init_specs = cmd.parse_specs(args.constraint)
|
||||
env_specs = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
env_specs = all_env_specs
|
||||
|
||||
spec_hashes = set(x.dag_hash() for x in env_specs)
|
||||
specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args))
|
||||
|
||||
results = list()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for spec in env_specs:
|
||||
if not spec.installed:
|
||||
concretized_but_not_installed.append(spec)
|
||||
if spec in specs_meeting_q_args:
|
||||
results.append(spec)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if concretized_but_not_installed and args.show_concretized:
|
||||
pass
|
||||
elif results:
|
||||
pass
|
||||
elif args.constraint:
|
||||
raise cmd.NoSpecMatches()
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
concretized_but_not_installed = [
|
||||
x for x in concretized_but_not_installed if x.name in packages_with_tags
|
||||
]
|
||||
|
||||
if args.loaded:
|
||||
results = cmd.filter_loaded_specs(results)
|
||||
|
||||
return results, concretized_but_not_installed
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
@@ -364,12 +307,34 @@ def find(parser, args):
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
try:
|
||||
results, concretized_but_not_installed = _find_query(args, env)
|
||||
except cmd.NoSpecMatches:
|
||||
# Note: this uses args.constraint vs. args.constraint_specs because
|
||||
# the latter only exists if you call args.specs()
|
||||
tty.die(f"No package matches the query: {' '.join(args.constraint)}")
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
@@ -380,16 +345,14 @@ def find(parser, args):
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
else:
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
if not args.only_roots:
|
||||
display_results = list(results)
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
@@ -407,9 +370,13 @@ def find(parser, args):
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix)
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
|
||||
if env:
|
||||
cmd.print_how_many_pkgs(
|
||||
concretized_but_not_installed, "concretized", suffix=concretized_suffix
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
@@ -217,11 +217,11 @@ def gpg_publish(args):
|
||||
mirror = None
|
||||
if args.directory:
|
||||
url = spack.util.url.path_to_file_url(args.directory)
|
||||
mirror = spack.mirrors.mirror.Mirror(url, url)
|
||||
mirror = spack.mirror.Mirror(url, url)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution._url_push_keys(
|
||||
|
||||
@@ -78,8 +78,8 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with pgi compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
@@ -203,13 +202,11 @@ def print_namespace(pkg, args):
|
||||
def print_phases(pkg, args):
|
||||
"""output installation phases"""
|
||||
|
||||
builder = spack.builder.create(pkg)
|
||||
|
||||
if hasattr(builder, "phases") and builder.phases:
|
||||
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Installation Phases:"))
|
||||
phase_str = ""
|
||||
for phase in builder.phases:
|
||||
for phase in pkg.builder.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
import spack.cmd
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
section = "admin"
|
||||
@@ -68,7 +67,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = False
|
||||
|
||||
for spec in specs:
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED)
|
||||
install_query = [InstallStatuses.INSTALLED]
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.mirror
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.web as web_util
|
||||
@@ -232,133 +231,31 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def _configure_access_pair(
|
||||
args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None
|
||||
):
|
||||
"""Configure the access_pair options"""
|
||||
|
||||
# Check if any of the arguments are set to update this access_pair.
|
||||
# If none are set, then skip computing the new access pair
|
||||
args_id = getattr(args, id_tok)
|
||||
args_id_variable = getattr(args, id_variable_tok)
|
||||
args_secret = getattr(args, secret_tok)
|
||||
args_secret_variable = getattr(args, secret_variable_tok)
|
||||
if not any([args_id, args_id_variable, args_secret, args_secret_variable]):
|
||||
return None
|
||||
|
||||
def _default_value(id_):
|
||||
if isinstance(default, list):
|
||||
return default[0] if id_ == "id" else default[1]
|
||||
elif isinstance(default, dict):
|
||||
return default.get(id_)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _default_variable(id_):
|
||||
if isinstance(default, dict):
|
||||
return default.get(id_ + "_variable")
|
||||
else:
|
||||
return None
|
||||
|
||||
id_ = None
|
||||
id_variable = None
|
||||
secret = None
|
||||
secret_variable = None
|
||||
|
||||
# Get the value/default value if the argument of the inverse
|
||||
if not args_id_variable:
|
||||
id_ = getattr(args, id_tok) or _default_value("id")
|
||||
if not args_id:
|
||||
id_variable = getattr(args, id_variable_tok) or _default_variable("id")
|
||||
if not args_secret_variable:
|
||||
secret = getattr(args, secret_tok) or _default_value("secret")
|
||||
if not args_secret:
|
||||
secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret")
|
||||
|
||||
if (id_ or id_variable) and (secret or secret_variable):
|
||||
if secret:
|
||||
if not id_:
|
||||
raise SpackError("Cannot add mirror with a variable id and text secret")
|
||||
|
||||
return [id_, secret]
|
||||
else:
|
||||
return dict(
|
||||
[
|
||||
(("id", id_) if id_ else ("id_variable", id_variable)),
|
||||
("secret_variable", secret_variable),
|
||||
]
|
||||
)
|
||||
else:
|
||||
if id_ or id_variable or secret or secret_variable is not None:
|
||||
id_arg_tok = id_tok.replace("_", "-")
|
||||
secret_arg_tok = secret_tok.replace("_", "-")
|
||||
tty.warn(
|
||||
"Expected both parts of the access pair to be specified. "
|
||||
f"(i.e. --{id_arg_tok} and --{secret_arg_tok})"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_access_key_id_variable
|
||||
or args.s3_access_key_secret_variable
|
||||
or args.s3_access_token_variable
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.oci_username_variable
|
||||
or args.oci_password_variable
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
# S3 Connection
|
||||
if args.s3_access_key_secret:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --s3-access-key-secret is "
|
||||
"deprecated. Use --s3-access-key-secret-variable instead"
|
||||
)
|
||||
if args.oci_password:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --oci-password is deprecated. "
|
||||
"Use --oci-password-variable instead"
|
||||
)
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
elif args.s3_access_token_variable:
|
||||
connection["access_token_variable"] = args.s3_access_token_variable
|
||||
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
|
||||
# OCI Connection
|
||||
access_pair = _configure_access_pair(
|
||||
args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable"
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -366,15 +263,15 @@ def mirror_add(args):
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirrors.utils.add(mirror, args.scope)
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirror.add(mirror, args.scope)
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""remove a mirror by name"""
|
||||
spack.mirrors.utils.remove(args.name, args.scope)
|
||||
spack.mirror.remove(args.name, args.scope)
|
||||
|
||||
|
||||
def _configure_mirror(args):
|
||||
@@ -383,40 +280,21 @@ def _configure_mirror(args):
|
||||
if args.name not in mirrors:
|
||||
tty.die(f"No mirror found with name {args.name}.")
|
||||
|
||||
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
|
||||
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
|
||||
direction = "fetch" if args.fetch else "push" if args.push else None
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
|
||||
default_access_pair = entry._get_value("access_pair", direction or "fetch")
|
||||
# TODO: Init access_pair args with the fetch/push/base values in the current mirror state
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"oci_username",
|
||||
"oci_username_variable",
|
||||
"oci_password",
|
||||
"oci_password_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
@@ -450,7 +328,7 @@ def mirror_set_url(args):
|
||||
def mirror_list(args):
|
||||
"""print out available mirrors to the console"""
|
||||
|
||||
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
|
||||
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||
if not mirrors:
|
||||
tty.msg("No mirrors configured.")
|
||||
return
|
||||
@@ -490,9 +368,9 @@ def concrete_specs_from_user(args):
|
||||
|
||||
def extend_with_additional_versions(specs, num_versions):
|
||||
if num_versions == "all":
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
else:
|
||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||
return mirror_specs
|
||||
|
||||
@@ -571,7 +449,7 @@ def concrete_specs_from_environment():
|
||||
|
||||
def all_specs_with_all_versions():
|
||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||
return mirror_specs
|
||||
|
||||
@@ -660,21 +538,19 @@ def _specs_and_action(args):
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirrors.utils.create(
|
||||
path, mirror_specs, skip_unstable_versions
|
||||
)
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
@@ -684,7 +560,7 @@ def mirror_destroy(args):
|
||||
mirror_url = None
|
||||
|
||||
if args.mirror_name:
|
||||
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
mirror_url = result.push_url
|
||||
elif args.mirror_url:
|
||||
mirror_url = args.mirror_url
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd import MultipleSpecsMatch, NoSpecMatches
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
@@ -92,6 +91,18 @@ def add_loads_arguments(subparser):
|
||||
arguments.add_common_arguments(subparser, ["recurse_dependencies"])
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
def one_spec_or_raise(specs):
|
||||
"""Ensures exactly one spec has been selected, or raises the appropriate
|
||||
exception.
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.lmod
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.tcl
|
||||
|
||||
|
||||
|
||||
@@ -82,6 +82,14 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
@@ -91,35 +99,46 @@ def spec(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
elif env:
|
||||
env.concretize()
|
||||
concrete_specs = env.concrete_roots()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
if not args.format:
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
|
||||
@@ -3,21 +3,18 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.executable import which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
section = "developer"
|
||||
@@ -39,7 +36,10 @@ def grouper(iterable, n, fillvalue=None):
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = ["import", "isort", "black", "flake8", "mypy"]
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
|
||||
#: warnings to ignore in mypy
|
||||
mypy_ignores = [
|
||||
@@ -61,28 +61,14 @@ def is_package(f):
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
class tool:
|
||||
def __init__(self, name: str, required: bool = False, external: bool = True) -> None:
|
||||
def __init__(self, name, required=False):
|
||||
self.name = name
|
||||
self.external = external
|
||||
self.required = required
|
||||
|
||||
def __call__(self, fun):
|
||||
self.fun = fun
|
||||
tools[self.name] = self
|
||||
tools[self.name] = (fun, self.required)
|
||||
return fun
|
||||
|
||||
@property
|
||||
def installed(self) -> bool:
|
||||
return bool(which(self.name)) if self.external else True
|
||||
|
||||
@property
|
||||
def executable(self) -> Optional[Executable]:
|
||||
return which(self.name) if self.external else None
|
||||
|
||||
|
||||
#: tools we run in spack style
|
||||
tools: Dict[str, tool] = {}
|
||||
|
||||
|
||||
def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
"""Get list of changed files in the Spack repository.
|
||||
@@ -190,22 +176,22 @@ def setup_parser(subparser):
|
||||
"-t",
|
||||
"--tool",
|
||||
action="append",
|
||||
help="specify which tools to run (default: %s)" % ", ".join(tool_names),
|
||||
help="specify which tools to run (default: %s)" % ",".join(tool_names),
|
||||
)
|
||||
tool_group.add_argument(
|
||||
"-s",
|
||||
"--skip",
|
||||
metavar="TOOL",
|
||||
action="append",
|
||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||
help="specify tools to skip (choose from %s)" % ",".join(tool_names),
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
|
||||
def cwd_relative(path, root, initial_working_dir):
|
||||
def cwd_relative(path, args):
|
||||
"""Translate prefix-relative path to current working directory-relative."""
|
||||
return os.path.relpath(os.path.join(root, path), initial_working_dir)
|
||||
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
|
||||
|
||||
|
||||
def rewrite_and_print_output(
|
||||
@@ -215,10 +201,7 @@ def rewrite_and_print_output(
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(
|
||||
cwd_relative(match.group(1), args.root, args.initial_working_dir),
|
||||
*list(match.groups()[1:]),
|
||||
)
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
@@ -237,7 +220,7 @@ def print_style_header(file_list, args, tools_to_run):
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
paths = [cwd_relative(filename, args.root, args.initial_working_dir) for filename in paths]
|
||||
paths = [cwd_relative(filename, args) for filename in paths]
|
||||
|
||||
tty.msg("Modified files", *paths)
|
||||
sys.stdout.flush()
|
||||
@@ -323,6 +306,8 @@ def process_files(file_list, is_args):
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = (
|
||||
"--rm",
|
||||
"spack",
|
||||
"--rm",
|
||||
"spack.pkgkit",
|
||||
"--rm",
|
||||
@@ -367,137 +352,17 @@ def run_black(black_cmd, file_list, args):
|
||||
return returncode
|
||||
|
||||
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
f2 = os.path.join(root, "lib", "spack", *parts, "__init__.py")
|
||||
|
||||
if (
|
||||
os.path.exists(f1)
|
||||
# ensure case sensitive match
|
||||
and f"{parts[-1]}.py" in os.listdir(os.path.dirname(f1))
|
||||
or os.path.exists(f2)
|
||||
):
|
||||
return ".".join(parts)
|
||||
parts.pop()
|
||||
return None
|
||||
|
||||
|
||||
def _run_import_check(
|
||||
file_list: List[str],
|
||||
*,
|
||||
fix: bool,
|
||||
root_relative: bool,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=spack.paths.prefix,
|
||||
out=sys.stdout,
|
||||
):
|
||||
if sys.version_info < (3, 9):
|
||||
print("import check requires Python 3.9 or later")
|
||||
return 0
|
||||
|
||||
is_use = re.compile(r"(?<!from )(?<!import )(?:llnl|spack)\.[a-zA-Z0-9_\.]+")
|
||||
|
||||
# redundant imports followed by a `# comment` are ignored, cause there can be legimitate reason
|
||||
# to import a module: execute module scope init code, or to deal with circular imports.
|
||||
is_abs_import = re.compile(r"^import ((?:llnl|spack)\.[a-zA-Z0-9_\.]+)$", re.MULTILINE)
|
||||
|
||||
exit_code = 0
|
||||
|
||||
for file in file_list:
|
||||
to_add = set()
|
||||
to_remove = []
|
||||
|
||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||
|
||||
try:
|
||||
with open(file, "r") as f:
|
||||
contents = open(file, "r").read()
|
||||
parsed = ast.parse(contents)
|
||||
except Exception:
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: could not parse", file=out)
|
||||
continue
|
||||
|
||||
for m in is_abs_import.finditer(contents):
|
||||
if contents.count(m.group(1)) == 1:
|
||||
to_remove.append(m.group(0))
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||
|
||||
# Clear all strings to avoid matching comments/strings etc.
|
||||
for node in ast.walk(parsed):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
node.value = ""
|
||||
|
||||
filtered_contents = ast.unparse(parsed) # novermin
|
||||
for m in is_use.finditer(filtered_contents):
|
||||
module = _module_part(root, m.group(0))
|
||||
if not module or module in to_add:
|
||||
continue
|
||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||
continue
|
||||
to_add.add(module)
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: missing import: {module} ({m.group(0)})", file=out)
|
||||
|
||||
if not fix or not to_add and not to_remove:
|
||||
continue
|
||||
|
||||
with open(file, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if to_add:
|
||||
# insert missing imports before the first import, delegate ordering to isort
|
||||
for node in parsed.body:
|
||||
if isinstance(node, (ast.Import, ast.ImportFrom)):
|
||||
first_line = node.lineno
|
||||
break
|
||||
else:
|
||||
print(f"{pretty_path}: could not fix", file=out)
|
||||
continue
|
||||
lines.insert(first_line, "\n".join(f"import {x}" for x in to_add) + "\n")
|
||||
|
||||
new_contents = "".join(lines)
|
||||
|
||||
# remove redundant imports
|
||||
for statement in to_remove:
|
||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||
|
||||
with open(file, "w") as f:
|
||||
f.write(new_contents)
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
@tool("import", external=False)
|
||||
def run_import_check(import_check_cmd, file_list, args):
|
||||
exit_code = _run_import_check(
|
||||
file_list,
|
||||
fix=args.fix,
|
||||
root_relative=args.root_relative,
|
||||
root=args.root,
|
||||
working_dir=args.initial_working_dir,
|
||||
)
|
||||
print_tool_result("import", exit_code)
|
||||
return exit_code
|
||||
|
||||
|
||||
def validate_toolset(arg_value):
|
||||
"""Validate --tool and --skip arguments (sets of optionally comma-separated tools)."""
|
||||
tools = set(",".join(arg_value).split(",")) # allow args like 'isort,flake8'
|
||||
for tool in tools:
|
||||
if tool not in tool_names:
|
||||
tty.die("Invalid tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
tty.die("Invaild tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
return tools
|
||||
|
||||
|
||||
def missing_tools(tools_to_run: List[str]) -> List[str]:
|
||||
return [t for t in tools_to_run if not tools[t].installed]
|
||||
def missing_tools(tools_to_run):
|
||||
return [t for t in tools_to_run if which(t) is None]
|
||||
|
||||
|
||||
def _bootstrap_dev_dependencies():
|
||||
@@ -552,9 +417,9 @@ def prefix_relative(path):
|
||||
|
||||
print_style_header(file_list, args, tools_to_run)
|
||||
for tool_name in tools_to_run:
|
||||
tool = tools[tool_name]
|
||||
run_function, required = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= tool.fun(tool.executable, file_list, args)
|
||||
return_code |= run_function(which(tool_name), file_list, args)
|
||||
|
||||
if return_code == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.23"
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "remove installed packages"
|
||||
section = "build"
|
||||
@@ -100,14 +99,12 @@ def find_matching_specs(
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli: List[spack.spec.Spec] = []
|
||||
specs_from_cli: List["spack.spec.Spec"] = []
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.STORE.db.query_local(
|
||||
spec,
|
||||
hashes=hashes,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
origin=origin,
|
||||
spec, hashes=hashes, installed=install_query, origin=origin
|
||||
)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
|
||||
@@ -217,7 +217,7 @@ def unit_test(parser, args, unknown_args):
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
734
lib/spack/spack/compiler.py
Normal file
734
lib/spack/spack/compiler.py
Normal file
@@ -0,0 +1,734 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
|
||||
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
Args:
|
||||
compiler_path (path): path of the compiler to be invoked
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_arg:
|
||||
output = compiler(version_arg, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
|
||||
|
||||
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
||||
|
||||
|
||||
def tokenize_flags(flags_values, propagate=False):
|
||||
"""Given a compiler flag specification as a string, this returns a list
|
||||
where the entries are the flags. For compiler options which set values
|
||||
using the syntax "-flag value", this function groups flags and their
|
||||
values together. Any token not preceded by a "-" is considered the
|
||||
value of a prior flag."""
|
||||
tokens = flags_values.split()
|
||||
if not tokens:
|
||||
return []
|
||||
flag = tokens[0]
|
||||
flags_with_propagation = []
|
||||
for token in tokens[1:]:
|
||||
if not token.startswith("-"):
|
||||
flag += " " + token
|
||||
else:
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
flag = token
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
return flags_with_propagation
|
||||
|
||||
|
||||
#: regex for parsing linker lines
|
||||
_LINKER_LINE = re.compile(r"^( *|.*[/\\])" r"(link|ld|([^/\\]+-)?ld|collect2)" r"[^/\\]*( |$)")
|
||||
|
||||
#: components of linker lines to ignore
|
||||
_LINKER_LINE_IGNORE = re.compile(r"(collect2 version|^[A-Za-z0-9_]+=|/ldfe )")
|
||||
|
||||
#: regex to match linker search paths
|
||||
_LINK_DIR_ARG = re.compile(r"^-L(.:)?(?P<dir>[/\\].*)")
|
||||
|
||||
#: regex to match linker library path arguments
|
||||
_LIBPATH_ARG = re.compile(r"^[-/](LIBPATH|libpath):(?P<dir>.*)")
|
||||
|
||||
|
||||
def _parse_link_paths(string):
|
||||
"""Parse implicit link paths from compiler debug output.
|
||||
|
||||
This gives the compiler runtime library paths that we need to add to
|
||||
the RPATH of generated binaries and libraries. It allows us to
|
||||
ensure, e.g., that codes load the right libstdc++ for their compiler.
|
||||
"""
|
||||
lib_search_paths = False
|
||||
raw_link_dirs = []
|
||||
for line in string.splitlines():
|
||||
if lib_search_paths:
|
||||
if line.startswith("\t"):
|
||||
raw_link_dirs.append(line[1:])
|
||||
continue
|
||||
else:
|
||||
lib_search_paths = False
|
||||
elif line.startswith("Library search paths:"):
|
||||
lib_search_paths = True
|
||||
|
||||
if not _LINKER_LINE.match(line):
|
||||
continue
|
||||
if _LINKER_LINE_IGNORE.match(line):
|
||||
continue
|
||||
tty.debug(f"implicit link dirs: link line: {line}")
|
||||
|
||||
next_arg = False
|
||||
for arg in line.split():
|
||||
if arg in ("-L", "-Y"):
|
||||
next_arg = True
|
||||
continue
|
||||
|
||||
if next_arg:
|
||||
raw_link_dirs.append(arg)
|
||||
next_arg = False
|
||||
continue
|
||||
|
||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
implicit_link_dirs = list()
|
||||
visited = set()
|
||||
for link_dir in raw_link_dirs:
|
||||
normalized_path = os.path.abspath(link_dir)
|
||||
if normalized_path not in visited:
|
||||
implicit_link_dirs.append(normalized_path)
|
||||
visited.add(normalized_path)
|
||||
|
||||
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@llnl.path.system_path_filter
|
||||
def _parse_non_system_link_dirs(string: str) -> List[str]:
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
Args:
|
||||
string: compiler debug output as a string
|
||||
|
||||
Returns:
|
||||
Implicit link paths parsed from the compiler output
|
||||
"""
|
||||
link_dirs = _parse_link_paths(string)
|
||||
|
||||
# Remove directories that do not exist. Some versions of the Cray compiler
|
||||
# report nonexistent directories
|
||||
link_dirs = [d for d in link_dirs if os.path.isdir(d)]
|
||||
|
||||
# Return set of directories containing needed compiler libs, minus
|
||||
# system paths. Note that 'filter_system_paths' only checks for an
|
||||
# exact match, while 'in_system_subdirectory' checks if a path contains
|
||||
# a system directory as a subdirectory
|
||||
link_dirs = filter_system_paths(link_dirs)
|
||||
return list(p for p in link_dirs if not in_system_subdirectory(p))
|
||||
|
||||
|
||||
def in_system_subdirectory(path):
|
||||
system_dirs = [
|
||||
"/lib/",
|
||||
"/lib64/",
|
||||
"/usr/lib/",
|
||||
"/usr/lib64/",
|
||||
"/usr/local/lib/",
|
||||
"/usr/local/lib64/",
|
||||
]
|
||||
return any(path_contains_subdirectory(path, x) for x in system_dirs)
|
||||
|
||||
|
||||
class Compiler:
|
||||
"""This class encapsulates a Spack "compiler", which includes C,
|
||||
C++, and Fortran compilers. Subclasses should implement
|
||||
support for specific compilers, their possible names, arguments,
|
||||
and how to identify the particular type of compiler."""
|
||||
|
||||
# Optional prefix regexes for searching for this type of compiler.
|
||||
# Prefixes are sometimes used for toolchains
|
||||
prefixes: List[str] = []
|
||||
|
||||
# Optional suffix regexes for searching for this type of compiler.
|
||||
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
# version suffix for gcc.
|
||||
suffixes = [r"-.*"]
|
||||
|
||||
#: Compiler argument that produces version information
|
||||
version_argument = "-dumpversion"
|
||||
|
||||
#: Return values to ignore when invoking the compiler to get its version
|
||||
ignore_version_errors: Sequence[int] = ()
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = "(.*)"
|
||||
|
||||
# These libraries are anticipated to be required by all executables built
|
||||
# by any compiler
|
||||
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
|
||||
|
||||
#: Platform matcher for Platform objects supported by compiler
|
||||
is_supported_on_platform = lambda x: True
|
||||
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def cxx_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def f77_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self):
|
||||
return "-Wl,-rpath,"
|
||||
|
||||
@property
|
||||
def linker_arg(self):
|
||||
"""Flag that need to be used to pass an argument to the linker."""
|
||||
return "-Wl,"
|
||||
|
||||
@property
|
||||
def disable_new_dtags(self):
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--disable-new-dtags"
|
||||
|
||||
@property
|
||||
def enable_new_dtags(self):
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--enable-new-dtags"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g"]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cspec,
|
||||
operating_system,
|
||||
target,
|
||||
paths,
|
||||
modules: Optional[List[str]] = None,
|
||||
alias=None,
|
||||
environment=None,
|
||||
extra_rpaths=None,
|
||||
enable_implicit_rpaths=None,
|
||||
**kwargs,
|
||||
):
|
||||
self.spec = cspec
|
||||
self.operating_system = str(operating_system)
|
||||
self.target = target
|
||||
self.modules = modules or []
|
||||
self.alias = alias
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
self.f77 = None
|
||||
self.fc = None
|
||||
if len(paths) > 2:
|
||||
self.f77 = paths[2]
|
||||
if len(paths) == 3:
|
||||
self.fc = self.f77
|
||||
else:
|
||||
self.fc = paths[3]
|
||||
|
||||
# Unfortunately have to make sure these params are accepted
|
||||
# in the same order they are returned by sorted(flags)
|
||||
# in compilers/__init__.py
|
||||
self.flags = spack.spec.FlagMap(self.spec)
|
||||
for flag in self.flags.valid_compiler_flags():
|
||||
value = kwargs.get(flag, None)
|
||||
if value is not None:
|
||||
values_with_propagation = tokenize_flags(value, False)
|
||||
for value, propagation in values_with_propagation:
|
||||
self.flags.add_flag(flag, value, propagation)
|
||||
|
||||
# caching value for compiler reported version
|
||||
# used for version checks for API, e.g. C++11 flag
|
||||
self._real_version = None
|
||||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
self.cc == other.cc
|
||||
and self.cxx == other.cxx
|
||||
and self.fc == other.fc
|
||||
and self.f77 == other.f77
|
||||
and self.spec == other.spec
|
||||
and self.operating_system == other.operating_system
|
||||
and self.target == other.target
|
||||
and self.flags == other.flags
|
||||
and self.modules == other.modules
|
||||
and self.environment == other.environment
|
||||
and self.extra_rpaths == other.extra_rpaths
|
||||
and self.enable_implicit_rpaths == other.enable_implicit_rpaths
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(
|
||||
(
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.fc,
|
||||
self.f77,
|
||||
self.spec,
|
||||
self.operating_system,
|
||||
self.target,
|
||||
str(self.flags),
|
||||
str(self.modules),
|
||||
str(self.environment),
|
||||
str(self.extra_rpaths),
|
||||
self.enable_implicit_rpaths,
|
||||
)
|
||||
)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
This method confirms that for all of the compilers (cc, cxx, f77, fc)
|
||||
that have paths, those paths exist and are executable by the current
|
||||
user.
|
||||
Raises a CompilerAccessError if any of the non-null paths for the
|
||||
compiler are not accessible.
|
||||
"""
|
||||
|
||||
def accessible_exe(exe):
|
||||
# compilers may contain executable names (on Cray or user edited)
|
||||
if not os.path.isabs(exe):
|
||||
exe = spack.util.executable.which_string(exe)
|
||||
if not exe:
|
||||
return False
|
||||
return os.path.isfile(exe) and os.access(exe, os.X_OK)
|
||||
|
||||
# setup environment before verifying in case we have executable names
|
||||
# instead of absolute paths
|
||||
with self.compiler_environment():
|
||||
missing = [
|
||||
cmp
|
||||
for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||
if cmp and not accessible_exe(cmp)
|
||||
]
|
||||
if missing:
|
||||
raise CompilerAccessError(self, missing)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
|
||||
@property
|
||||
def real_version(self):
|
||||
"""Executable reported compiler version used for API-determinations
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
if not self._real_version:
|
||||
try:
|
||||
real_version = spack.version.Version(self.get_real_version())
|
||||
if real_version == spack.version.Version("unknown"):
|
||||
return self.version
|
||||
self._real_version = real_version
|
||||
except spack.util.executable.ProcessError:
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
link_dirs = _parse_non_system_link_dirs(output)
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_dynamic_linker(self) -> Optional[str]:
|
||||
"""Determine default dynamic linker from compiler link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
return spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
@property
|
||||
def required_libs(self):
|
||||
"""For executables created with this compiler, the compiler libraries
|
||||
that would be generally required to run it.
|
||||
"""
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, "main.c")
|
||||
|
||||
with open(fin, "w") as csource:
|
||||
csource.write(
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
with self.compiler_environment():
|
||||
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
return None
|
||||
finally:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def verbose_flag(self) -> Optional[str]:
|
||||
"""
|
||||
This property should be overridden in the compiler subclass if a
|
||||
verbose flag is available.
|
||||
|
||||
If it is not overridden, it is assumed to not be supported.
|
||||
"""
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# OpenMP is supported by that compiler
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "OpenMP", "openmp_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++98 is not the default standard for that compiler
|
||||
@property
|
||||
def cxx98_flag(self):
|
||||
return ""
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++11 is supported by that compiler
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++14 is supported by that compiler
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C++17 is supported by that compiler
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C99 is supported by that compiler
|
||||
@property
|
||||
def c99_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag")
|
||||
|
||||
# This property should be overridden in the compiler subclass if
|
||||
# C11 is supported by that compiler
|
||||
@property
|
||||
def c11_flag(self):
|
||||
# If it is not overridden, assume it is not supported and warn the user
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag")
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
"""Returns the flag used by the C compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
"""Returns the flag used by the C++ compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
"""Returns the flag used by the F77 compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
"""Returns the flag used by the FC compiler to produce
|
||||
Position Independent Code (PIC)."""
|
||||
return "-fPIC"
|
||||
|
||||
# Note: This is not a class method. The class methods are used to detect
|
||||
# compilers on PATH based systems, and do not set up the run environment of
|
||||
# the compiler. This method can be called on `module` based systems as well
|
||||
def get_real_version(self):
|
||||
"""Query the compiler for its version.
|
||||
|
||||
This is the "real" compiler version, regardless of what is in the
|
||||
compilers.yaml file, which the user can change to name their compiler.
|
||||
|
||||
Use the runtime environment of the compiler (modules and environment
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
"""Query the compiler for its install prefix. This is the install
|
||||
path as reported by the compiler. Note that paths for cc, cxx, etc
|
||||
are not enough to find the install prefix of the compiler, since
|
||||
the can be symlinks, wrappers, or filenames instead of absolute paths."""
|
||||
raise NotImplementedError("prefix is not implemented for this compiler")
|
||||
|
||||
#
|
||||
# Compiler classes have methods for querying the version of
|
||||
# specific compiler executables. This is used when discovering compilers.
|
||||
#
|
||||
# Compiler *instances* are just data objects, and can only be
|
||||
# constructed from an actual set of executables.
|
||||
#
|
||||
@classmethod
|
||||
def default_version(cls, cc):
|
||||
"""Override just this to override all compiler version functions."""
|
||||
output = get_compiler_version_output(
|
||||
cc, cls.version_argument, tuple(cls.ignore_version_errors)
|
||||
)
|
||||
return cls.extract_version_from_output(output)
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
"""Extracts the version from compiler's output."""
|
||||
match = re.search(cls.version_regex, output)
|
||||
return match.group(1) if match else "unknown"
|
||||
|
||||
@classmethod
|
||||
def cc_version(cls, cc):
|
||||
return cls.default_version(cc)
|
||||
|
||||
@classmethod
|
||||
def search_regexps(cls, language):
|
||||
# Compile all the regular expressions used for files beforehand.
|
||||
# This searches for any combination of <prefix><name><suffix>
|
||||
# defined for the compiler
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
ext_suf = [ext]
|
||||
suffixes = suffixes + cls.suffixes + cls_suf + ext_suf
|
||||
else:
|
||||
suffixes = suffixes + cls.suffixes
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
return [
|
||||
re.compile(regexp_fmt.format(prefix, re.escape(name), suffix))
|
||||
for prefix, name, suffix in itertools.product(prefixes, compiler_names, suffixes)
|
||||
]
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set any environment variables necessary to use the compiler."""
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return self.__str__()
|
||||
|
||||
def __str__(self):
|
||||
"""Return a string representation of the compiler toolchain."""
|
||||
return "%s(%s)" % (
|
||||
self.name,
|
||||
"\n ".join(
|
||||
(
|
||||
str(s)
|
||||
for s in (
|
||||
self.cc,
|
||||
self.cxx,
|
||||
self.f77,
|
||||
self.fc,
|
||||
self.modules,
|
||||
str(self.operating_system),
|
||||
)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def compiler_environment(self):
|
||||
# Avoid modifying os.environ if possible.
|
||||
if not self.modules and not self.environment:
|
||||
yield
|
||||
return
|
||||
|
||||
# store environment to replace later
|
||||
backup_env = os.environ.copy()
|
||||
|
||||
try:
|
||||
# load modules and set env variables
|
||||
for module in self.modules:
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||
|
||||
yield
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
|
||||
def to_dict(self):
|
||||
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
|
||||
flags_dict.update(
|
||||
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
|
||||
)
|
||||
result = {
|
||||
"spec": str(self.spec),
|
||||
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
|
||||
"flags": flags_dict,
|
||||
"operating_system": str(self.operating_system),
|
||||
"target": str(self.target),
|
||||
"modules": self.modules or [],
|
||||
"environment": self.environment or {},
|
||||
"extra_rpaths": self.extra_rpaths or [],
|
||||
}
|
||||
|
||||
if self.enable_implicit_rpaths is not None:
|
||||
result["implicit_rpaths"] = self.enable_implicit_rpaths
|
||||
|
||||
if self.alias:
|
||||
result["alias"] = self.alias
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
||||
msg += " or are not executable: %s" % paths
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidCompilerError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super().__init__("Compiler has no executables.")
|
||||
|
||||
|
||||
class UnsupportedCompilerFlag(spack.error.SpackError):
|
||||
def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
super().__init__(
|
||||
"{0} ({1}) does not support {2} (as compiler.{3}).".format(
|
||||
compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
|
||||
),
|
||||
"If you think it should, please edit the compiler.{0} subclass to".format(
|
||||
compiler.name
|
||||
)
|
||||
+ " implement the {0} property and submit a pull request or issue.".format(flag_name),
|
||||
)
|
||||
@@ -2,3 +2,836 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This module contains functions related to finding compilers on the
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_other_instance_vars = [
|
||||
"modules",
|
||||
"operating_system",
|
||||
"environment",
|
||||
"implicit_rpaths",
|
||||
"extra_rpaths",
|
||||
]
|
||||
|
||||
# TODO: Caches at module level make it difficult to mock configurations in
|
||||
# TODO: unit tests. It might be worth reworking their implementation.
|
||||
#: cache of compilers constructed from config data, keyed by config entry id.
|
||||
_compiler_cache: Dict[str, "spack.compiler.Compiler"] = {}
|
||||
|
||||
_compiler_to_pkg = {
|
||||
"clang": "llvm+clang",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel@2020:": "intel-oneapi-compilers-classic",
|
||||
"arm": "acfl",
|
||||
}
|
||||
|
||||
# TODO: generating this from the previous dict causes docs errors
|
||||
package_name_to_compiler_name = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
|
||||
def pkg_spec_for_compiler(cspec):
|
||||
"""Return the spec of the package that provides the compiler."""
|
||||
for spec, package in _compiler_to_pkg.items():
|
||||
if cspec.satisfies(spec):
|
||||
spec_str = "%s@%s" % (package, cspec.versions)
|
||||
break
|
||||
else:
|
||||
spec_str = str(cspec)
|
||||
return spack.spec.parse_with_version_concrete(spec_str)
|
||||
|
||||
|
||||
def _auto_compiler_spec(function):
|
||||
def converter(cspec_like, *args, **kwargs):
|
||||
if not isinstance(cspec_like, spack.spec.CompilerSpec):
|
||||
cspec_like = spack.spec.CompilerSpec(cspec_like)
|
||||
return function(cspec_like, *args, **kwargs)
|
||||
|
||||
return converter
|
||||
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
return {"compiler": compiler.to_dict()}
|
||||
|
||||
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
find_compilers(scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
packages_yaml = configuration.get("packages", scope=scope)
|
||||
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.writable_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compilers_to_config(compilers, scope=None):
|
||||
"""Add compilers to the config for the specified architecture.
|
||||
|
||||
Arguments:
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
if not compiler.cxx:
|
||||
tty.debug(f"{compiler.spec} does not have a C++ compiler")
|
||||
if not compiler.f77:
|
||||
tty.debug(f"{compiler.spec} does not have a Fortran77 compiler")
|
||||
if not compiler.fc:
|
||||
tty.debug(f"{compiler.spec} does not have a Fortran compiler")
|
||||
compiler_config.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""Remove compilers from configuration by spec.
|
||||
|
||||
If scope is None, all the scopes are searched for removal.
|
||||
|
||||
Arguments:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope to modify
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
|
||||
|
||||
msg = "`spack compiler remove` will not remove compilers defined in packages.yaml"
|
||||
msg += "\nTo remove these compilers, either edit the config or use `spack external remove`"
|
||||
tty.debug(msg)
|
||||
return removal_happened
|
||||
|
||||
|
||||
def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
"""Removes a compiler from a specific configuration scope.
|
||||
|
||||
Args:
|
||||
compiler_spec: compiler to be removed
|
||||
scope: configuration scope under consideration
|
||||
|
||||
Returns:
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
if not spack.spec.parse_with_version_concrete(
|
||||
compiler_entry["compiler"]["spec"], compiler=True
|
||||
).satisfies(compiler_spec)
|
||||
]
|
||||
|
||||
if len(filtered_compiler_config) == len(compiler_config):
|
||||
return False
|
||||
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
mixed_toolchain: bool = False,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: configuration scope to modify
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
import spack.detection
|
||||
|
||||
known_compilers = set(all_compilers(init_config=False))
|
||||
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
default_paths = fs.search_paths_for_executables(*path_hints)
|
||||
if sys.platform == "win32":
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
)
|
||||
|
||||
valid_compilers = {}
|
||||
for name, detected in detected_packages.items():
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
|
||||
if not compilers:
|
||||
continue
|
||||
valid_compilers[name] = compilers
|
||||
|
||||
def _has_fortran_compilers(x):
|
||||
if "compilers" not in x.extra_attributes:
|
||||
return False
|
||||
|
||||
return "fortran" in x.extra_attributes["compilers"]
|
||||
|
||||
if mixed_toolchain:
|
||||
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
|
||||
if gccs:
|
||||
best_gcc = sorted(
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
|
||||
)[-1]
|
||||
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
|
||||
for name in ("llvm", "apple-clang"):
|
||||
if name not in valid_compilers:
|
||||
continue
|
||||
candidates = valid_compilers[name]
|
||||
for candidate in candidates:
|
||||
if _has_fortran_compilers(candidate):
|
||||
continue
|
||||
candidate.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
|
||||
new_compilers = []
|
||||
for name, detected in valid_compilers.items():
|
||||
for config in CompilerConfigFactory.from_specs(detected):
|
||||
c = _compiler_from_config_entry(config["compiler"])
|
||||
if c in known_compilers:
|
||||
continue
|
||||
new_compilers.append(c)
|
||||
|
||||
add_compilers_to_config(new_compilers, scope=scope)
|
||||
return new_compilers
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
"""Given a list of compilers, remove those that are already defined in
|
||||
the configuration.
|
||||
"""
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = compilers_for_spec(
|
||||
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
|
||||
)
|
||||
if not same_specs:
|
||||
compilers_not_in_config.append(c)
|
||||
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
See available_compilers() to get a list of all the available
|
||||
versions of supported compilers.
|
||||
"""
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
return sorted(all_compiler_names())
|
||||
|
||||
|
||||
def supported_compilers_for_host_platform() -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the current host platform
|
||||
"""
|
||||
host_plat = spack.platforms.real_host()
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
Args:
|
||||
platform (str): string representation of platform
|
||||
for which compiler compatability should be determined
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name in supported_compilers()
|
||||
if class_for_compiler_name(name).is_supported_on_platform(platform)
|
||||
]
|
||||
|
||||
|
||||
def all_compiler_names() -> List[str]:
|
||||
def replace_apple_clang(name):
|
||||
return name if name != "apple_clang" else "apple-clang"
|
||||
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def supported(compiler_spec):
|
||||
"""Test if a particular compiler is supported."""
|
||||
return compiler_spec.name in supported_compilers()
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find(compiler_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [c for c in all_compiler_specs(scope, init_config) if c.satisfies(compiler_spec)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [
|
||||
c.spec
|
||||
for c in compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
compilers.extend(get_compilers(config, cspec, arch_spec))
|
||||
return compilers
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
def compiler_specs_for_arch(arch_spec, scope=None):
|
||||
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
||||
|
||||
|
||||
class CacheReference:
|
||||
"""This acts as a hashable reference to any object (regardless of whether
|
||||
the object itself is hashable) and also prevents the object from being
|
||||
garbage-collected (so if two CacheReference objects are equal, they
|
||||
will refer to the same object, since it will not have been gc'ed since
|
||||
the creation of the first CacheReference).
|
||||
"""
|
||||
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
self.id = id(val)
|
||||
|
||||
def __hash__(self):
|
||||
return self.id
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, CacheReference) and self.id == other.id
|
||||
|
||||
|
||||
def compiler_from_dict(items):
|
||||
cspec = spack.spec.parse_with_version_concrete(items["spec"], compiler=True)
|
||||
os = items.get("operating_system", None)
|
||||
target = items.get("target", None)
|
||||
|
||||
if not (
|
||||
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
|
||||
):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
compiler_paths = []
|
||||
for c in spack.compiler.PATH_INSTANCE_VARS:
|
||||
compiler_path = items["paths"][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
else:
|
||||
compiler_paths.append(None)
|
||||
|
||||
mods = items.get("modules")
|
||||
if mods == "None":
|
||||
mods = []
|
||||
|
||||
alias = items.get("alias", None)
|
||||
compiler_flags = items.get("flags", {})
|
||||
environment = items.get("environment", {})
|
||||
extra_rpaths = items.get("extra_rpaths", [])
|
||||
implicit_rpaths = items.get("implicit_rpaths", None)
|
||||
|
||||
# Starting with c22a145, 'implicit_rpaths' was a list. Now it is a
|
||||
# boolean which can be set by the user to disable all automatic
|
||||
# RPATH insertion of compiler libraries
|
||||
if implicit_rpaths is not None and not isinstance(implicit_rpaths, bool):
|
||||
implicit_rpaths = None
|
||||
|
||||
return cls(
|
||||
cspec,
|
||||
os,
|
||||
target,
|
||||
compiler_paths,
|
||||
mods,
|
||||
alias,
|
||||
environment,
|
||||
extra_rpaths,
|
||||
enable_implicit_rpaths=implicit_rpaths,
|
||||
**compiler_flags,
|
||||
)
|
||||
|
||||
|
||||
def _compiler_from_config_entry(items):
|
||||
"""Note this is intended for internal use only. To avoid re-parsing
|
||||
the same config dictionary this keeps track of its location in
|
||||
memory. If you provide the same dictionary twice it will return
|
||||
the same Compiler object (regardless of whether the dictionary
|
||||
entries have changed).
|
||||
"""
|
||||
config_id = CacheReference(items)
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
|
||||
|
||||
def get_compilers(config, cspec=None, arch_spec=None):
|
||||
compilers = []
|
||||
|
||||
for items in config:
|
||||
items = items["compiler"]
|
||||
|
||||
# We might use equality here.
|
||||
if cspec and not spack.spec.parse_with_version_concrete(
|
||||
items["spec"], compiler=True
|
||||
).satisfies(cspec):
|
||||
continue
|
||||
|
||||
# If an arch spec is given, confirm that this compiler
|
||||
# is for the given operating system
|
||||
os = items.get("operating_system", None)
|
||||
if arch_spec and os != arch_spec.os:
|
||||
continue
|
||||
|
||||
# If an arch spec is given, confirm that this compiler
|
||||
# is for the given target. If the target is 'any', match
|
||||
# any given arch spec. If the compiler has no assigned
|
||||
# target this is an old compiler config file, skip this logic.
|
||||
target = items.get("target", None)
|
||||
|
||||
try:
|
||||
current_target = archspec.cpu.TARGETS[str(arch_spec.target)]
|
||||
family = str(current_target.family)
|
||||
except KeyError:
|
||||
# TODO: Check if this exception handling makes sense, or if we
|
||||
# TODO: need to change / refactor tests
|
||||
family = str(arch_spec.target)
|
||||
except AttributeError:
|
||||
assert arch_spec is None
|
||||
|
||||
if arch_spec and target and (target != family and target != "any"):
|
||||
# If the family of the target is the family we are seeking,
|
||||
# there's an error in the underlying configuration
|
||||
if archspec.cpu.TARGETS[target].family == family:
|
||||
msg = (
|
||||
'the "target" field in compilers.yaml accepts only '
|
||||
'target families [replace "{0}" with "{1}"'
|
||||
' in "{2}" specification]'
|
||||
)
|
||||
msg = msg.format(str(target), family, items.get("spec", "??"))
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compiler_for_spec(compiler_spec, arch_spec):
|
||||
"""Get the compiler that satisfies compiler_spec. compiler_spec must
|
||||
be concrete."""
|
||||
assert compiler_spec.concrete
|
||||
assert arch_spec.concrete
|
||||
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
if len(compilers) < 1:
|
||||
raise NoCompilerForSpecError(compiler_spec, arch_spec.os)
|
||||
if len(compilers) > 1:
|
||||
msg = "Multiple definitions of compiler %s " % compiler_spec
|
||||
msg += "for architecture %s:\n %s" % (arch_spec, compilers)
|
||||
tty.debug(msg)
|
||||
return compilers[0]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
if not supported(compiler_name):
|
||||
raise UnknownCompilerError(compiler_name)
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
submodule_name = compiler_name
|
||||
if compiler_name == "apple-clang":
|
||||
submodule_name = compiler_name.replace("-", "_")
|
||||
|
||||
module_name = ".".join(["spack", "compilers", submodule_name])
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
cls.name = compiler_name
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
def all_compiler_types():
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
def is_mixed_toolchain(compiler):
|
||||
"""Returns True if the current compiler is a mixed toolchain,
|
||||
False otherwise.
|
||||
|
||||
Args:
|
||||
compiler (spack.compiler.Compiler): a valid compiler object
|
||||
"""
|
||||
import spack.detection.path
|
||||
|
||||
executables = [
|
||||
os.path.basename(compiler.cc or ""),
|
||||
os.path.basename(compiler.cxx or ""),
|
||||
os.path.basename(compiler.f77 or ""),
|
||||
os.path.basename(compiler.fc or ""),
|
||||
]
|
||||
|
||||
toolchains = set()
|
||||
finder = spack.detection.path.ExecutablesFinder()
|
||||
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(COMPILER_TAG):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = finder.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
continue
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
|
||||
if any(joined_pattern.search(exe) for exe in executables):
|
||||
tty.debug(f"[TOOLCHAIN] MATCH {pkg_name}")
|
||||
toolchains.add(pkg_name)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if (
|
||||
toolchains == {"llvm", "apple-clang", "aocc"}
|
||||
# Msvc toolchain uses Intel ifx
|
||||
or toolchains == {"msvc", "intel-oneapi-compilers"}
|
||||
):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
|
||||
_COMPILERS_KEY = "compilers"
|
||||
_C_KEY = "c"
|
||||
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
|
||||
|
||||
|
||||
class CompilerConfigFactory:
|
||||
"""Class aggregating all ways of constructing a list of compiler config entries."""
|
||||
|
||||
@staticmethod
|
||||
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
|
||||
result = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for s in specs:
|
||||
if s.name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
candidate = CompilerConfigFactory.from_external_spec(s)
|
||||
if candidate is None:
|
||||
continue
|
||||
|
||||
result.append(candidate)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_packages_yaml(packages_yaml) -> List[dict]:
|
||||
compiler_specs = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
current_specs = []
|
||||
for current_external in externals_config:
|
||||
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
|
||||
if compiler:
|
||||
current_specs.append(compiler)
|
||||
compiler_specs.extend(current_specs)
|
||||
|
||||
return CompilerConfigFactory.from_specs(compiler_specs)
|
||||
|
||||
@staticmethod
|
||||
def _spec_from_external_config(config):
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if _EXTRA_ATTRIBUTES_KEY not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
|
||||
return None
|
||||
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
|
||||
result = spack.spec.Spec(
|
||||
str(spack.spec.parse_with_version_concrete(config["spec"])),
|
||||
external_modules=config.get("modules"),
|
||||
)
|
||||
result.extra_attributes = extra_attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
|
||||
spec = spack.spec.parse_with_version_concrete(spec)
|
||||
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
|
||||
if extra_attributes is None:
|
||||
return None
|
||||
|
||||
paths = CompilerConfigFactory._extract_compiler_paths(spec)
|
||||
if paths is None:
|
||||
return None
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": getattr(spec, "external_modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
@staticmethod
|
||||
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
extra_attributes = spec.extra_attributes
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing,
|
||||
# or we don't have a C compiler
|
||||
if _COMPILERS_KEY not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[_COMPILERS_KEY]
|
||||
|
||||
if _C_KEY not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[_C_KEY]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if _CXX_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if _FORTRAN_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
return {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(_CXX_KEY, None),
|
||||
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target")
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
return operating_system, target
|
||||
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__(
|
||||
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
|
||||
f"Compiler configuration must contain entries for "
|
||||
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
|
||||
)
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super().__init__(
|
||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||
)
|
||||
|
||||
@@ -1,212 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import enum
|
||||
import typing
|
||||
from typing import Dict, List
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
from .libraries import CompilerPropertyDetector
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import spack.spec
|
||||
|
||||
|
||||
class Languages(enum.Enum):
|
||||
C = "c"
|
||||
CXX = "cxx"
|
||||
FORTRAN = "fortran"
|
||||
|
||||
|
||||
class CompilerAdaptor:
|
||||
def __init__(
|
||||
self, compiled_spec: "spack.spec.Spec", compilers: Dict[Languages, "spack.spec.Spec"]
|
||||
) -> None:
|
||||
if not compilers:
|
||||
raise AttributeError(f"{compiled_spec} has no 'compiler' attribute")
|
||||
|
||||
self.compilers = compilers
|
||||
self.compiled_spec = compiled_spec
|
||||
|
||||
def _lang_exists_or_raise(self, name: str, *, lang: Languages) -> None:
|
||||
if lang not in self.compilers:
|
||||
raise AttributeError(
|
||||
f"'{self.compiled_spec}' has no {lang.value} compiler, so the "
|
||||
f"'{name}' property cannot be retrieved"
|
||||
)
|
||||
|
||||
def _maybe_return_attribute(self, name: str, *, lang: Languages) -> str:
|
||||
self._lang_exists_or_raise(name, lang=lang)
|
||||
return getattr(self.compilers[lang].package, name)
|
||||
|
||||
@property
|
||||
def cc_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("cc_rpath_arg", lang=Languages.C)
|
||||
return self.compilers[Languages.C].package.rpath_arg
|
||||
|
||||
@property
|
||||
def cxx_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("cxx_rpath_arg", lang=Languages.CXX)
|
||||
return self.compilers[Languages.CXX].package.rpath_arg
|
||||
|
||||
@property
|
||||
def fc_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("fc_rpath_arg", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.rpath_arg
|
||||
|
||||
@property
|
||||
def f77_rpath_arg(self) -> str:
|
||||
self._lang_exists_or_raise("f77_rpath_arg", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.rpath_arg
|
||||
|
||||
@property
|
||||
def linker_arg(self) -> str:
|
||||
return self._maybe_return_attribute("linker_arg", lang=Languages.C)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return next(iter(self.compilers.values())).name
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return next(iter(self.compilers.values())).version
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
result, seen = [], set()
|
||||
for compiler in self.compilers.values():
|
||||
if compiler in seen:
|
||||
continue
|
||||
seen.add(compiler)
|
||||
result.extend(CompilerPropertyDetector(compiler).implicit_rpaths())
|
||||
return result
|
||||
|
||||
@property
|
||||
def openmp_flag(self) -> str:
|
||||
return next(iter(self.compilers.values())).package.openmp_flag
|
||||
|
||||
@property
|
||||
def cxx98_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="98"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx11_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="11"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx14_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="14"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx17_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx20_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="20"
|
||||
)
|
||||
|
||||
@property
|
||||
def cxx23_flag(self) -> str:
|
||||
return self.compilers[Languages.CXX].package.standard_flag(
|
||||
language=Languages.CXX.value, standard="23"
|
||||
)
|
||||
|
||||
@property
|
||||
def c99_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="99"
|
||||
)
|
||||
|
||||
@property
|
||||
def c11_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="11"
|
||||
)
|
||||
|
||||
@property
|
||||
def c17_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def c23_flag(self) -> str:
|
||||
return self.compilers[Languages.C].package.standard_flag(
|
||||
language=Languages.C.value, standard="17"
|
||||
)
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("cc_pic_flag", lang=Languages.C)
|
||||
return self.compilers[Languages.C].package.pic_flag
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("cxx_pic_flag", lang=Languages.CXX)
|
||||
return self.compilers[Languages.CXX].package.pic_flag
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("fc_pic_flag", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.pic_flag
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self) -> str:
|
||||
self._lang_exists_or_raise("f77_pic_flag", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.pic_flag
|
||||
|
||||
@property
|
||||
def prefix(self) -> str:
|
||||
return next(iter(self.compilers.values())).prefix
|
||||
|
||||
@property
|
||||
def extra_rpaths(self) -> List[str]:
|
||||
compiler = next(iter(self.compilers.values()))
|
||||
return getattr(compiler, "extra_attributes", {}).get("extra_rpaths", [])
|
||||
|
||||
@property
|
||||
def cc(self):
|
||||
return self._maybe_return_attribute("cc", lang=Languages.C)
|
||||
|
||||
@property
|
||||
def cxx(self):
|
||||
return self._maybe_return_attribute("cxx", lang=Languages.CXX)
|
||||
|
||||
@property
|
||||
def fc(self):
|
||||
self._lang_exists_or_raise("fc", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
@property
|
||||
def f77(self):
|
||||
self._lang_exists_or_raise("f77", lang=Languages.FORTRAN)
|
||||
return self.compilers[Languages.FORTRAN].package.fortran
|
||||
|
||||
|
||||
class DeprecatedCompiler(lang.DeprecatedProperty):
|
||||
def __init__(self) -> None:
|
||||
super().__init__(name="compiler")
|
||||
|
||||
def factory(self, instance, owner) -> CompilerAdaptor:
|
||||
spec = instance.spec
|
||||
if not spec.concrete:
|
||||
raise ValueError("Can only get a compiler for a concrete package.")
|
||||
|
||||
compilers = {}
|
||||
for language in Languages:
|
||||
deps = spec.dependencies(virtuals=[language.value])
|
||||
if deps:
|
||||
compilers[language] = deps[0]
|
||||
|
||||
return CompilerAdaptor(instance, compilers)
|
||||
120
lib/spack/spack/compilers/aocc.py
Normal file
120
lib/spack/spack/compilers/aocc.py
Normal file
@@ -0,0 +1,120 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Aocc(Compiler):
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return [
|
||||
"-gcodeview",
|
||||
"-gdwarf-2",
|
||||
"-gdwarf-3",
|
||||
"-gdwarf-4",
|
||||
"-gdwarf-5",
|
||||
"-gline-tables-only",
|
||||
"-gmodules",
|
||||
"-g",
|
||||
]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"]
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {
|
||||
"cc": os.path.join("aocc", "clang"),
|
||||
"cxx": os.path.join("aocc", "clang++"),
|
||||
"f77": os.path.join("aocc", "flang"),
|
||||
"fc": os.path.join("aocc", "flang"),
|
||||
}
|
||||
|
||||
return link_paths
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang"]
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
||||
@property
|
||||
def cflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def cxxflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def fflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.real_version.satisfies(ver("3.0.0")):
|
||||
return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
|
||||
116
lib/spack/spack/compilers/apple_clang.py
Normal file
116
lib/spack/spack/compilers/apple_clang.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import re
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class AppleClang(spack.compilers.clang.Clang):
|
||||
openmp_flag = "-Xpreprocessor -fopenmp"
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
ver = "unknown"
|
||||
match = re.search(
|
||||
# Apple's LLVM compiler has its own versions, so suffix them.
|
||||
r"^Apple (?:LLVM|clang) version ([^ )]+)",
|
||||
output,
|
||||
# Multi-line, since 'Apple clang' may not be on the first line
|
||||
# in particular, when run as gcc, it seems to output
|
||||
# "Configured with: --prefix=..." as the first line
|
||||
re.M,
|
||||
)
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
return ver
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/AppleClang-CXX.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0"
|
||||
)
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.real_version < Version("5.1"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1"
|
||||
)
|
||||
elif self.real_version < Version("6.1"):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.real_version < Version("6.1"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1"
|
||||
)
|
||||
elif self.real_version < Version("10.0"):
|
||||
return "-std=c++1z"
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("10.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "Xcode < 10.0"
|
||||
)
|
||||
elif self.real_version < Version("13.0"):
|
||||
return "-std=c++2a"
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("13.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "Xcode < 13.0"
|
||||
)
|
||||
return "-std=c++2b"
|
||||
|
||||
# C flags based on CMake Modules/Compiler/AppleClang-C.cmake
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < Version("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C17 standard", "c17_flag", "< 11.0"
|
||||
)
|
||||
return "-std=c17"
|
||||
|
||||
@property
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("11.0.3"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
80
lib/spack/spack/compilers/arm.py
Normal file
80
lib/spack/spack/compilers/arm.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
class Arm(spack.compiler.Compiler):
|
||||
# Named wrapper links within lib/spack/env
|
||||
link_paths = {
|
||||
"cc": os.path.join("arm", "armclang"),
|
||||
"cxx": os.path.join("arm", "armclang++"),
|
||||
"f77": os.path.join("arm", "armflang"),
|
||||
"fc": os.path.join("arm", "armflang"),
|
||||
}
|
||||
|
||||
# The ``--version`` option seems to be the most consistent one for
|
||||
# arm compilers. Output looks like this:
|
||||
#
|
||||
# $ arm<c/f>lang --version
|
||||
# Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)
|
||||
# Target: aarch64--linux-gnu
|
||||
# Thread model: posix
|
||||
# InstalledDir:
|
||||
# /opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin
|
||||
version_argument = "--version"
|
||||
version_regex = r"Arm C\/C\+\+\/Fortran Compiler version ([\d\.]+) "
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++1z"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang", "libflang"]
|
||||
128
lib/spack/spack/compilers/cce.py
Normal file
128
lib/spack/spack/compilers/cce.py
Normal file
@@ -0,0 +1,128 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Cce(Compiler):
|
||||
"""Cray compiler environment compiler."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# For old cray compilers on module based systems we replace
|
||||
# ``version_argument`` with the old value. Cannot be a property
|
||||
# as the new value is used in classmethods for path-based detection
|
||||
if not self.is_clang_based:
|
||||
self.version_argument = "-V"
|
||||
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
if any("PrgEnv-cray" in m for m in self.modules):
|
||||
# Old module-based interface to cray compilers
|
||||
return {
|
||||
"cc": os.path.join("cce", "cc"),
|
||||
"cxx": os.path.join("case-insensitive", "CC"),
|
||||
"f77": os.path.join("cce", "ftn"),
|
||||
"fc": os.path.join("cce", "ftn"),
|
||||
}
|
||||
|
||||
return {
|
||||
"cc": os.path.join("cce", "craycc"),
|
||||
"cxx": os.path.join("cce", "case-insensitive", "crayCC"),
|
||||
"f77": os.path.join("cce", "crayftn"),
|
||||
"fc": os.path.join("cce", "crayftn"),
|
||||
}
|
||||
|
||||
@property
|
||||
def is_clang_based(self):
|
||||
version = self._real_version or self.version
|
||||
return version >= Version("9.0") and "classic" not in str(version)
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"[Cc]ray (?:clang|C :|C\+\+ :|Fortran :) [Vv]ersion.*?(\d+(\.\d+)+)"
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g", "-G0", "-G1", "-G2", "-Gfast"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fopenmp"
|
||||
return "-h omp"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++11"
|
||||
return "-h std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++14"
|
||||
return "-h std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c99"
|
||||
elif self.real_version >= Version("8.4"):
|
||||
return "-h std=c99,noconform,gnu"
|
||||
elif self.real_version >= Version("8.1"):
|
||||
return "-h c99,noconform,gnu"
|
||||
raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 8.1")
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c11"
|
||||
elif self.real_version >= Version("8.5"):
|
||||
return "-h std=c11,noconform,gnu"
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 8.5")
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
# Cray compiler wrappers link to the standard C++ library
|
||||
# without additional flags.
|
||||
return ()
|
||||
194
lib/spack/spack/compilers/clang.py
Normal file
194
lib/spack/spack/compilers/clang.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import Version
|
||||
|
||||
#: compiler symlink mappings for mixed f77 compilers
|
||||
f77_mapping = [
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf_r", os.path.join("xl_r", "xlf_r")),
|
||||
("xlf", os.path.join("xl", "xlf")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
#: compiler symlink mappings for mixed f90/fc compilers
|
||||
fc_mapping = [
|
||||
("gfortran", os.path.join("clang", "gfortran")),
|
||||
("xlf90_r", os.path.join("xl_r", "xlf90_r")),
|
||||
("xlf90", os.path.join("xl", "xlf90")),
|
||||
("pgfortran", os.path.join("pgi", "pgfortran")),
|
||||
("ifort", os.path.join("intel", "ifort")),
|
||||
]
|
||||
|
||||
|
||||
class Clang(Compiler):
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return [
|
||||
"-gcodeview",
|
||||
"-gdwarf-2",
|
||||
"-gdwarf-3",
|
||||
"-gdwarf-4",
|
||||
"-gdwarf-5",
|
||||
"-gline-tables-only",
|
||||
"-gmodules",
|
||||
"-g",
|
||||
]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"]
|
||||
|
||||
# Clang has support for using different fortran compilers with the
|
||||
# clang executable.
|
||||
@property
|
||||
def link_paths(self):
|
||||
# clang links are always the same
|
||||
link_paths = {
|
||||
"cc": os.path.join("clang", "clang"),
|
||||
"cxx": os.path.join("clang", "clang++"),
|
||||
}
|
||||
|
||||
# fortran links need to look at the actual compiler names from
|
||||
# compilers.yaml to figure out which named symlink to use
|
||||
for compiler_name, link_path in f77_mapping:
|
||||
if self.f77 and compiler_name in self.f77:
|
||||
link_paths["f77"] = link_path
|
||||
break
|
||||
else:
|
||||
link_paths["f77"] = os.path.join("clang", "flang")
|
||||
|
||||
for compiler_name, link_path in fc_mapping:
|
||||
if self.fc and compiler_name in self.fc:
|
||||
link_paths["fc"] = link_path
|
||||
break
|
||||
else:
|
||||
link_paths["fc"] = os.path.join("clang", "flang")
|
||||
|
||||
return link_paths
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
openmp_flag = "-fopenmp"
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("3.3"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag", "< 3.3")
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.real_version < Version("3.4"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag", "< 3.5")
|
||||
elif self.real_version < Version("3.5"):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.real_version < Version("3.5"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag", "< 3.5")
|
||||
elif self.real_version < Version("5.0"):
|
||||
return "-std=c++1z"
|
||||
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("12.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||
elif self.real_version < Version("17.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < Version("3.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 3.0")
|
||||
if self.real_version < Version("3.1"):
|
||||
return "-std=c1x"
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
if self.real_version < Version("6.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C17 standard", "c17_flag", "< 6.0")
|
||||
return "-std=c17"
|
||||
|
||||
@property
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
elif self.real_version < Version("18.0"):
|
||||
return "-std=c2x"
|
||||
else:
|
||||
return "-std=c23"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libclang"]
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
ver = "unknown"
|
||||
if ("Apple" in output) or ("AMD" in output):
|
||||
return ver
|
||||
|
||||
match = re.search(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)",
|
||||
output,
|
||||
)
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
return ver
|
||||
@@ -1,427 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains functions related to finding compilers on the system,
|
||||
and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
|
||||
package_name_to_compiler_name = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
"acfl": "arm",
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = []
|
||||
configuration = spack.config.CONFIG
|
||||
for scope in configuration.writable_scopes:
|
||||
name = scope.name
|
||||
|
||||
from_packages_yaml = CompilerFactory.from_packages_yaml(configuration, scope=name)
|
||||
if from_packages_yaml:
|
||||
config_files.append(configuration.get_config_filename(name, "packages"))
|
||||
|
||||
compiler_config = configuration.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(configuration.get_config_filename(name, "compilers"))
|
||||
|
||||
return config_files
|
||||
|
||||
|
||||
def add_compiler_to_config(new_compilers, *, scope=None) -> None:
|
||||
"""Add a Compiler object to the configuration, at the required scope."""
|
||||
# FIXME (compiler as nodes): still needed to read Cray manifest
|
||||
by_name: Dict[str, List["spack.spec.Spec"]] = {}
|
||||
for x in new_compilers:
|
||||
by_name.setdefault(x.name, []).append(x)
|
||||
|
||||
spack.detection.update_configuration(by_name, buildable=True, scope=scope)
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: configuration scope to modify
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
default_paths = fs.search_paths_for_executables(*path_hints)
|
||||
if sys.platform == "win32":
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
)
|
||||
|
||||
new_compilers = spack.detection.update_configuration(
|
||||
detected_packages, buildable=True, scope=scope
|
||||
)
|
||||
return new_compilers
|
||||
|
||||
|
||||
def select_new_compilers(
|
||||
candidates: List["spack.spec.Spec"], *, scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Given a list of compilers, remove those that are already defined in
|
||||
the configuration.
|
||||
"""
|
||||
compilers_in_config = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
|
||||
return [c for c in candidates if c not in compilers_in_config]
|
||||
|
||||
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Returns all the currently supported compiler packages"""
|
||||
return sorted(spack.repo.PATH.packages_with_tags(COMPILER_TAG))
|
||||
|
||||
|
||||
def all_compilers(
|
||||
scope: Optional[str] = None, init_config: bool = True
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Returns all the compilers from the current global configuration.
|
||||
|
||||
Args:
|
||||
scope: configuration scope from which to extract the compilers. If None, the merged
|
||||
configuration is used.
|
||||
init_config: if True, search for compilers if none is found in configuration.
|
||||
"""
|
||||
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
|
||||
|
||||
if not compilers and init_config:
|
||||
find_compilers(scope=scope)
|
||||
compilers = all_compilers_from(configuration=spack.config.CONFIG, scope=scope)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def all_compilers_from(
|
||||
configuration: "spack.config.ConfigurationType", scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Returns all the compilers from the current global configuration.
|
||||
|
||||
Args:
|
||||
configuration: configuration to be queried
|
||||
scope: configuration scope from which to extract the compilers. If None, the merged
|
||||
configuration is used.
|
||||
"""
|
||||
compilers = CompilerFactory.from_packages_yaml(configuration, scope=scope)
|
||||
|
||||
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") != "1":
|
||||
legacy_compilers = CompilerFactory.from_compilers_yaml(configuration, scope=scope)
|
||||
if legacy_compilers:
|
||||
# FIXME (compiler as nodes): write how to update the file. Maybe an ad-hoc command
|
||||
warnings.warn(
|
||||
"Some compilers are still defined in 'compilers.yaml', which has been deprecated "
|
||||
"in v0.23. Those configuration files will be ignored from Spack v0.25.\n"
|
||||
)
|
||||
for legacy in legacy_compilers:
|
||||
if not any(c.satisfies(f"{legacy.name}@{legacy.versions}") for c in compilers):
|
||||
compilers.append(legacy)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
class CompilerRemover:
|
||||
"""Removes compiler from configuration."""
|
||||
|
||||
def __init__(self, configuration: "spack.config.ConfigurationType") -> None:
|
||||
self.configuration = configuration
|
||||
self.marked_packages_yaml: List[Tuple[str, Any]] = []
|
||||
self.marked_compilers_yaml: List[Tuple[str, Any]] = []
|
||||
|
||||
def mark_compilers(
|
||||
self, *, match: str, scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Marks compilers to be removed in configuration, and returns a corresponding list
|
||||
of specs.
|
||||
|
||||
Args:
|
||||
match: constraint that the compiler must match to be removed.
|
||||
scope: scope where to remove the compiler. If None, all writeable scopes are checked.
|
||||
"""
|
||||
self.marked_packages_yaml = []
|
||||
self.marked_compilers_yaml = []
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = [x.name for x in self.configuration.writable_scopes]
|
||||
|
||||
all_removals = self._mark_in_packages_yaml(match, candidate_scopes)
|
||||
all_removals.extend(self._mark_in_compilers_yaml(match, candidate_scopes))
|
||||
|
||||
return all_removals
|
||||
|
||||
def _mark_in_packages_yaml(self, match, candidate_scopes):
|
||||
compiler_package_names = supported_compilers()
|
||||
all_removals = []
|
||||
for current_scope in candidate_scopes:
|
||||
packages_yaml = self.configuration.get("packages", scope=current_scope)
|
||||
if not packages_yaml:
|
||||
continue
|
||||
|
||||
removed_from_scope = []
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
def _partition_match(external_yaml):
|
||||
s = CompilerFactory.from_external_yaml(external_yaml)
|
||||
return not s.satisfies(match)
|
||||
|
||||
to_keep, to_remove = llnl.util.lang.stable_partition(
|
||||
externals_config, _partition_match
|
||||
)
|
||||
if not to_remove:
|
||||
continue
|
||||
|
||||
removed_from_scope.extend(to_remove)
|
||||
entry["externals"] = to_keep
|
||||
|
||||
if not removed_from_scope:
|
||||
continue
|
||||
|
||||
self.marked_packages_yaml.append((current_scope, packages_yaml))
|
||||
all_removals.extend(
|
||||
[CompilerFactory.from_external_yaml(x) for x in removed_from_scope]
|
||||
)
|
||||
return all_removals
|
||||
|
||||
def _mark_in_compilers_yaml(self, match, candidate_scopes):
|
||||
if os.environ.get("SPACK_EXPERIMENTAL_DEPRECATE_COMPILERS_YAML") == "1":
|
||||
return []
|
||||
|
||||
all_removals = []
|
||||
for current_scope in candidate_scopes:
|
||||
compilers_yaml = self.configuration.get("compilers", scope=current_scope)
|
||||
if not compilers_yaml:
|
||||
continue
|
||||
|
||||
def _partition_match(entry):
|
||||
external_specs = CompilerFactory.from_legacy_yaml(entry["compiler"])
|
||||
return not any(x.satisfies(match) for x in external_specs)
|
||||
|
||||
to_keep, to_remove = llnl.util.lang.stable_partition(compilers_yaml, _partition_match)
|
||||
if not to_remove:
|
||||
continue
|
||||
|
||||
compilers_yaml[:] = to_keep
|
||||
self.marked_compilers_yaml.append((current_scope, compilers_yaml))
|
||||
for entry in to_remove:
|
||||
all_removals.extend(CompilerFactory.from_legacy_yaml(entry["compiler"]))
|
||||
|
||||
return all_removals
|
||||
|
||||
def flush(self):
|
||||
"""Removes from configuration the specs that have been marked by the previous call
|
||||
of ``remove_compilers``.
|
||||
"""
|
||||
for scope, packages_yaml in self.marked_packages_yaml:
|
||||
self.configuration.set("packages", packages_yaml, scope=scope)
|
||||
|
||||
for scope, compilers_yaml in self.marked_compilers_yaml:
|
||||
self.configuration.set("compilers", compilers_yaml, scope=scope)
|
||||
|
||||
|
||||
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
# FIXME (compiler as nodes): to be removed, or reimplemented
|
||||
raise NotImplementedError("still to be implemented")
|
||||
|
||||
|
||||
def compilers_for_arch(
|
||||
arch_spec: "spack.spec.ArchSpec", *, scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Returns the compilers that can be used on the input architecture"""
|
||||
compilers = all_compilers_from(spack.config.CONFIG, scope=scope)
|
||||
query = f"platform={arch_spec.platform} target=:{arch_spec.target}"
|
||||
return [x for x in compilers if x.satisfies(query)]
|
||||
|
||||
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
# FIXME (compiler as nodes): to be removed, or reimplemented
|
||||
raise NotImplementedError("still to be implemented")
|
||||
|
||||
|
||||
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
|
||||
_COMPILERS_KEY = "compilers"
|
||||
_C_KEY = "c"
|
||||
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
|
||||
|
||||
|
||||
def name_os_target(spec: "spack.spec.Spec") -> Tuple[str, str, str]:
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target")
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
return spec.name, str(operating_system), str(target)
|
||||
|
||||
|
||||
class CompilerFactory:
|
||||
"""Class aggregating all ways of constructing a list of compiler specs from config entries."""
|
||||
|
||||
_PACKAGES_YAML_CACHE: Dict[str, Optional["spack.spec.Spec"]] = {}
|
||||
_COMPILERS_YAML_CACHE: Dict[str, List["spack.spec.Spec"]] = {}
|
||||
_GENERIC_TARGET = None
|
||||
|
||||
@staticmethod
|
||||
def from_packages_yaml(
|
||||
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Returns the compiler specs defined in the "packages" section of the configuration"""
|
||||
compilers = []
|
||||
compiler_package_names = supported_compilers()
|
||||
packages_yaml = configuration.get("packages", scope=scope)
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
compiler_specs = []
|
||||
for current_external in externals_config:
|
||||
key = str(current_external)
|
||||
if key not in CompilerFactory._PACKAGES_YAML_CACHE:
|
||||
CompilerFactory._PACKAGES_YAML_CACHE[key] = CompilerFactory.from_external_yaml(
|
||||
current_external
|
||||
)
|
||||
|
||||
compiler = CompilerFactory._PACKAGES_YAML_CACHE[key]
|
||||
if compiler:
|
||||
compiler_specs.append(compiler)
|
||||
|
||||
compilers.extend(compiler_specs)
|
||||
return compilers
|
||||
|
||||
@staticmethod
|
||||
def from_external_yaml(config: Dict[str, Any]) -> Optional["spack.spec.Spec"]:
|
||||
"""Returns a compiler spec from an external definition from packages.yaml."""
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if _EXTRA_ATTRIBUTES_KEY not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
|
||||
return None
|
||||
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
|
||||
result = spack.spec.Spec(
|
||||
str(spack.spec.parse_with_version_concrete(config["spec"])),
|
||||
external_path=config.get("prefix"),
|
||||
external_modules=config.get("modules"),
|
||||
)
|
||||
result.extra_attributes = extra_attributes
|
||||
CompilerFactory._finalize_external_concretization(result)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _finalize_external_concretization(abstract_spec):
|
||||
if CompilerFactory._GENERIC_TARGET is None:
|
||||
CompilerFactory._GENERIC_TARGET = archspec.cpu.host().family
|
||||
|
||||
if abstract_spec.architecture:
|
||||
abstract_spec.architecture.complete_with_defaults()
|
||||
else:
|
||||
abstract_spec.constrain(spack.spec.Spec.default_arch())
|
||||
abstract_spec.architecture.target = CompilerFactory._GENERIC_TARGET
|
||||
abstract_spec._finalize_concretization()
|
||||
|
||||
@staticmethod
|
||||
def from_legacy_yaml(compiler_dict: Dict[str, Any]) -> List["spack.spec.Spec"]:
|
||||
"""Returns a list of external specs, corresponding to a compiler entry
|
||||
from compilers.yaml.
|
||||
"""
|
||||
from spack.detection.path import ExecutablesFinder
|
||||
|
||||
# FIXME (compiler as nodes): should we look at targets too?
|
||||
result = []
|
||||
candidate_paths = [x for x in compiler_dict["paths"].values() if x is not None]
|
||||
finder = ExecutablesFinder()
|
||||
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags("compiler"):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pattern = re.compile(r"|".join(finder.search_patterns(pkg=pkg_cls)))
|
||||
filtered_paths = [x for x in candidate_paths if pattern.search(os.path.basename(x))]
|
||||
detected = finder.detect_specs(pkg=pkg_cls, paths=filtered_paths)
|
||||
result.extend(detected)
|
||||
|
||||
for item in result:
|
||||
CompilerFactory._finalize_external_concretization(item)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_compilers_yaml(
|
||||
configuration: "spack.config.ConfigurationType", *, scope: Optional[str] = None
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Returns the compiler specs defined in the "compilers" section of the configuration"""
|
||||
result: List["spack.spec.Spec"] = []
|
||||
for item in configuration.get("compilers", scope=scope):
|
||||
key = str(item)
|
||||
if key not in CompilerFactory._COMPILERS_YAML_CACHE:
|
||||
CompilerFactory._COMPILERS_YAML_CACHE[key] = CompilerFactory.from_legacy_yaml(
|
||||
item["compiler"]
|
||||
)
|
||||
|
||||
result.extend(CompilerFactory._COMPILERS_YAML_CACHE[key])
|
||||
return result
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super().__init__(f"Spack doesn't support the requested compiler: {compiler_name}")
|
||||
@@ -1,19 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from ..error import SpackError
|
||||
|
||||
|
||||
class CompilerAccessError(SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
super().__init__(
|
||||
f"Compiler '{compiler.spec}' has executables that are missing"
|
||||
f" or are not executable: {paths}"
|
||||
)
|
||||
|
||||
|
||||
class UnsupportedCompilerFlag(SpackError):
|
||||
"""Raised when a compiler does not support a flag type (e.g. a flag to enforce a
|
||||
language standard).
|
||||
"""
|
||||
79
lib/spack/spack/compilers/fj.py
Normal file
79
lib/spack/spack/compilers/fj.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
class Fj(spack.compiler.Compiler):
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("fj", "fcc"),
|
||||
"cxx": os.path.join("fj", "case-insensitive", "FCC"),
|
||||
"f77": os.path.join("fj", "frt"),
|
||||
"fc": os.path.join("fj", "frt"),
|
||||
}
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"\((?:FCC|FRT)\) ([a-z\d.]+)"
|
||||
|
||||
required_libs = ["libfj90i", "libfj90f", "libfjsrcinfo"]
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return "-g"
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O0", "-O1", "-O2", "-O3", "-Ofast"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-Kopenmp"
|
||||
|
||||
@property
|
||||
def cxx98_flag(self):
|
||||
return "-std=c++98"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-KPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-KPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-KPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-KPIC"
|
||||
@@ -1,26 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
def tokenize_flags(flags_values: str, propagate: bool = False) -> List[Tuple[str, bool]]:
|
||||
"""Given a compiler flag specification as a string, this returns a list
|
||||
where the entries are the flags. For compiler options which set values
|
||||
using the syntax "-flag value", this function groups flags and their
|
||||
values together. Any token not preceded by a "-" is considered the
|
||||
value of a prior flag."""
|
||||
tokens = flags_values.split()
|
||||
if not tokens:
|
||||
return []
|
||||
flag = tokens[0]
|
||||
flags_with_propagation = []
|
||||
for token in tokens[1:]:
|
||||
if not token.startswith("-"):
|
||||
flag += " " + token
|
||||
else:
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
flag = token
|
||||
flags_with_propagation.append((flag, propagate))
|
||||
return flags_with_propagation
|
||||
191
lib/spack/spack/compilers/gcc.py
Normal file
191
lib/spack/spack/compilers/gcc.py
Normal file
@@ -0,0 +1,191 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import ancestor
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.apple_clang as apple_clang
|
||||
import spack.util.executable
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Gcc(spack.compiler.Compiler):
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X or -mp-X.Y suffixes.
|
||||
# Homebrew and Linuxbrew may build gcc with -X, -X.Y suffixes.
|
||||
# Old compatibility versions may contain XY suffixes.
|
||||
suffixes = [r"-mp-\d+(?:\.\d+)?", r"-\d+(?:\.\d+)?", r"\d\d"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("gcc", "gcc"),
|
||||
"cxx": os.path.join("gcc", "g++"),
|
||||
"f77": os.path.join("gcc", "gfortran"),
|
||||
"fc": os.path.join("gcc", "gfortran"),
|
||||
}
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ["-g", "-gstabs+", "-gstabs", "-gxcoff+", "-gxcoff", "-gvms"]
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3", "-Os", "-Ofast", "-Og"]
|
||||
|
||||
@property
|
||||
def openmp_flag(self):
|
||||
return "-fopenmp"
|
||||
|
||||
@property
|
||||
def cxx98_flag(self):
|
||||
if self.real_version < Version("6.0"):
|
||||
return ""
|
||||
else:
|
||||
return "-std=c++98"
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("4.3"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", " < 4.3"
|
||||
)
|
||||
elif self.real_version < Version("4.7"):
|
||||
return "-std=c++0x"
|
||||
else:
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.real_version < Version("4.8"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "< 4.8"
|
||||
)
|
||||
elif self.real_version < Version("4.9"):
|
||||
return "-std=c++1y"
|
||||
else:
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "< 5.0"
|
||||
)
|
||||
elif self.real_version < Version("6.0"):
|
||||
return "-std=c++1z"
|
||||
else:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("8.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "< 8.0"
|
||||
)
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "< 11.0"
|
||||
)
|
||||
elif self.real_version < Version("14.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.5"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.5"
|
||||
)
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < Version("4.7"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.7"
|
||||
)
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
return "-fPIC"
|
||||
|
||||
required_libs = ["libgcc", "libgfortran"]
|
||||
|
||||
@classmethod
|
||||
def default_version(cls, cc):
|
||||
"""Older versions of gcc use the ``-dumpversion`` option.
|
||||
Output looks like this::
|
||||
|
||||
4.4.7
|
||||
|
||||
In GCC 7, this option was changed to only return the major
|
||||
version of the compiler::
|
||||
|
||||
7
|
||||
|
||||
A new ``-dumpfullversion`` option was added that gives us
|
||||
what we want::
|
||||
|
||||
7.2.0
|
||||
"""
|
||||
# Apple's gcc is actually apple clang, so skip it. Returning
|
||||
# "unknown" ensures this compiler is not detected by default.
|
||||
# Users can add it manually to compilers.yaml at their own risk.
|
||||
if apple_clang.AppleClang.default_version(cc) != "unknown":
|
||||
return "unknown"
|
||||
|
||||
version = super(Gcc, cls).default_version(cc)
|
||||
if Version(version) >= Version("7"):
|
||||
output = spack.compiler.get_compiler_version_output(cc, "-dumpfullversion")
|
||||
version = cls.extract_version_from_output(output)
|
||||
return version
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
# GCC reports its install prefix when running ``-print-search-dirs``
|
||||
# on the first line ``install: <prefix>``.
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
gcc_output = cc("-print-search-dirs", output=str, error=str)
|
||||
|
||||
for line in gcc_output.splitlines():
|
||||
if line.startswith("install:"):
|
||||
gcc_prefix = line.split(":")[1].strip()
|
||||
# Go from <prefix>/lib/gcc/<triplet>/<version>/ to <prefix>
|
||||
return ancestor(gcc_prefix, 4)
|
||||
|
||||
raise RuntimeError(
|
||||
"could not find install prefix of GCC from output:\n\t{}".format(gcc_output)
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user