Compare commits

..

23 Commits

Author SHA1 Message Date
psakievich
2cae2d0f27 Update lib/spack/spack/test/cmd/build_env.py 2024-12-17 15:47:28 -07:00
psakievich
b68b5811b8 Update build_env.py
Co-authored-by: John W. Parent <45471568+johnwparent@users.noreply.github.com>
2024-12-16 20:27:52 -07:00
psakiev
3f88278d48 Update imports 2024-12-10 22:54:56 -07:00
psakiev
1d536ce143 Revert prompt changes for environment activation
I tried consolidating but I am not convinced it will work
correctly
2024-12-10 22:40:34 -07:00
psakiev
20d148e464 Style 2024-12-10 16:24:24 -07:00
Aaron Young
88b5a12e16 Add prompt support for fish. (#47609)
To avoid changing the fish_prompt function, create a SPACK_PROMPT
environment variable, which users can then manually incorporate into
their fish_prompt.fish function.
2024-11-14 16:14:44 -07:00
psakievich
f5fef81779 [@spackbot] updating style on behalf of psakievich 2024-11-14 04:53:17 +00:00
psakiev
8c6773f33e Add a cd test 2024-11-13 16:13:01 -07:00
psakiev
7a79fe88e2 Re-write test and update docs some more 2024-11-12 20:57:32 -07:00
psakiev
5cbbed42b4 Add docs 2024-11-12 18:12:21 -07:00
psakiev
d6b937d94b Style 2024-11-12 17:44:11 -07:00
psakiev
8eb4354b4b Add navigation to dive feature 2024-11-12 17:32:28 -07:00
psakiev
707a8daaea Compatibility 2024-11-12 16:20:39 -07:00
psakiev
ab5c2d5f7c Fixes 2024-11-12 16:09:16 -07:00
psakiev
4d49a658c8 Add completion 2024-11-12 15:28:34 -07:00
psakiev
973f59abbc Merge remote-tracking branch 'origin' into psakiev/dev-build 2024-11-12 13:18:16 -07:00
psakiev
4216a06cd8 Add build-env --dive 2024-11-12 13:16:59 -07:00
psakiev
2acf90f7b7 WIP unify 2024-11-11 17:06:04 -07:00
psakiev
8d5e71f66b Merge remote-tracking branch 'origin' into psakiev/dev-build 2024-11-11 16:46:55 -07:00
psakiev
9b3c200a07 Add prompt capability to dev_build drop-in 2024-11-11 16:44:48 -07:00
Philip Sakievich
43604f639e Update help 2024-11-08 10:01:17 -07:00
Philip Sakievich
33ae096a8b Fix some things from review 2024-11-08 06:32:55 -07:00
Philip Sakievich
259a1d9268 Make dev-build compatible with spack develop 2024-11-07 21:59:21 -07:00
1056 changed files with 12693 additions and 22746 deletions

View File

@@ -66,7 +66,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
with:
name: coverage-audits-${{ matrix.system.os }}

View File

@@ -57,13 +57,7 @@ jobs:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Determine latest release tag
id: latest
run: |
git fetch --quiet --tags
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta
with:
images: |
@@ -77,7 +71,6 @@ jobs:
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
- name: Generate the Dockerfile
env:
@@ -94,7 +87,7 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles
@@ -103,7 +96,7 @@ jobs:
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
- name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
@@ -120,7 +113,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
@@ -133,7 +126,7 @@ jobs:
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: dockerfiles
pattern: dockerfiles_*

View File

@@ -29,7 +29,6 @@ jobs:
- run: coverage xml
- name: "Upload coverage report to CodeCov"
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
with:
verbose: true
fail_ci_if_error: false

View File

@@ -3,5 +3,5 @@ clingo==5.7.1
flake8==7.1.1
isort==5.13.2
mypy==1.8.0
types-six==1.17.0.20241205
types-six==1.16.21.20241105
vermin==1.6.0

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-22.04]
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
@@ -24,19 +24,19 @@ jobs:
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-22.04
os: ubuntu-latest
on_develop: false
- python-version: '3.8'
os: ubuntu-22.04
os: ubuntu-latest
on_develop: false
- python-version: '3.9'
os: ubuntu-22.04
os: ubuntu-latest
on_develop: false
- python-version: '3.10'
os: ubuntu-22.04
os: ubuntu-latest
on_develop: false
- python-version: '3.11'
os: ubuntu-22.04
os: ubuntu-latest
on_develop: false
steps:
@@ -80,14 +80,14 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
path: coverage
include-hidden-files: true
# Test shell integration
shell:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
@@ -113,7 +113,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-shell
path: coverage
@@ -134,7 +134,7 @@ jobs:
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory '*'
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
@@ -175,7 +175,7 @@ jobs:
spack bootstrap status
spack solve zlib
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-clingo-cffi
path: coverage
@@ -213,7 +213,7 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
path: coverage
@@ -244,7 +244,7 @@ jobs:
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-windows
path: coverage

View File

@@ -13,7 +13,8 @@ concurrency:
jobs:
# Validate that the code can be run on all the Python versions supported by Spack
# Validate that the code can be run on all the Python versions
# supported by Spack
validate:
runs-on: ubuntu-latest
steps:
@@ -73,7 +74,7 @@ jobs:
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory '*'
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
@@ -86,7 +87,6 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
@@ -146,21 +146,3 @@ jobs:
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: '3.13'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pylint
- name: Pylint (Spack Core)
run: |
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib

View File

@@ -102,6 +102,6 @@ PackageName: sbang
PackageHomePage: https://github.com/spack/sbang
PackageLicenseDeclared: Apache-2.0 OR MIT
PackageName: typing_extensions
PackageHomePage: https://pypi.org/project/typing-extensions/
PackageLicenseDeclared: Python-2.0
PackageName: six
PackageHomePage: https://pypi.python.org/pypi/six
PackageLicenseDeclared: MIT

View File

@@ -70,7 +70,7 @@ Tutorial
----------------
We maintain a
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
It covers basic to advanced usage, packaging, developer features, and large HPC
deployments. You can do all of the exercises on your own laptop using a
Docker container.

View File

@@ -39,8 +39,7 @@ concretizer:
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
duplicates:
# "none": allows a single node for any package in the DAG.
# "minimal": allows the duplication of 'build-tools' nodes only
# (e.g. py-setuptools, cmake etc.)
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Option to specify compatibility between operating systems for reuse of compilers and packages
@@ -48,18 +47,3 @@ concretizer:
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
os_compatible: {}
# Option to specify whether to support splicing. Splicing allows for
# the relinking of concrete package dependencies in order to better
# reuse already built packages with ABI compatible dependencies
splice:
explicit: []
automatic: false
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
timeout: 0
# If set to true, exceeding the timeout will always result in a concretization error. If false,
# the best (suboptimal) model computed before the timeout is used.
#
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true

View File

@@ -194,12 +194,6 @@ config:
# executables with many dependencies, in particular on slow filesystems.
bind: false
# Controls the handling of missing dynamic libraries after installation.
# Options are ignore (default), warn, or error. If set to error, the
# installation fails if installed binaries reference dynamic libraries that
# are not found in their specified rpaths.
missing_library_policy: ignore
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
# manipulation by unprivileged user (e.g. AFS)
@@ -226,3 +220,4 @@ config:
concretise: concretize
containerise: containerize
rm: remove
dev-dive: build-env --cd build-dir --dive

View File

@@ -76,8 +76,6 @@ packages:
buildable: false
cray-mvapich2:
buildable: false
egl:
buildable: false
fujitsu-mpi:
buildable: false
hpcx-mpi:

View File

@@ -265,30 +265,25 @@ infrastructure, or to cache Spack built binaries in Github Actions and
GitLab CI.
To get started, configure an OCI mirror using ``oci://`` as the scheme,
and optionally specify variables that hold the username and password (or
personal access token) for the registry:
and optionally specify a username and password (or personal access token):
.. code-block:: console
$ spack mirror add --oci-username-variable REGISTRY_USER \
--oci-password-variable REGISTRY_TOKEN \
my_registry oci://example.com/my_image
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
Spack follows the naming conventions of Docker, with Dockerhub as the default
registry. To use Dockerhub, you can omit the registry domain:
.. code-block:: console
$ spack mirror add ... my_registry oci://username/my_image
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
From here, you can use the mirror as any other build cache:
.. code-block:: console
$ export REGISTRY_USER=...
$ export REGISTRY_TOKEN=...
$ spack buildcache push my_registry <specs...> # push to the registry
$ spack install <specs...> # or install from the registry
$ spack install <specs...> # install from the registry
A unique feature of buildcaches on top of OCI registries is that it's incredibly
easy to generate get a runnable container image with the binaries installed. This

View File

@@ -237,35 +237,3 @@ is optional -- by default, splices will be transitive.
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
will warn the user in this case, but will not fail the
concretization.
.. _automatic_splicing:
^^^^^^^^^^^^^^^^^^
Automatic Splicing
^^^^^^^^^^^^^^^^^^
The Spack solver can be configured to do automatic splicing for
ABI-compatible packages. Automatic splices are enabled in the concretizer
config section
.. code-block:: yaml
concretizer:
splice:
automatic: True
Packages can include ABI-compatibility information using the
``can_splice`` directive. See :ref:`the packaging
guide<abi_compatibility>` for instructions on specifying ABI
compatibility using the ``can_splice`` directive.
.. note::
The ``can_splice`` directive is experimental and may be changed in
future versions.
When automatic splicing is enabled, the concretizer will combine any
number of ABI-compatible specs if possible to reuse installed packages
and packages available from binary caches. The end result of these
specs is equivalent to a series of transitive/intransitive splices,
but the series may be non-obvious.

View File

@@ -25,14 +25,6 @@ QMake does not appear to have a standardized way of specifying
the installation directory, so you may have to set environment
variables or edit ``*.pro`` files to get things working properly.
QMake packages will depend on the virtual ``qmake`` package which
is provided by multiple versions of Qt: ``qt`` provides Qt up to
Qt5, and ``qt-base`` provides Qt from version Qt6 onwards. This
split was motivated by the desire to split the single Qt package
into its components to allow for more fine-grained installation.
To depend on a specific version, refer to the documentation on
:ref:`virtual-dependencies`.
^^^^^^
Phases
^^^^^^

View File

@@ -210,7 +210,7 @@ def setup(sphinx):
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
("py:class", "spack.build_systems._checks.BaseBuilder"),
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"),

View File

@@ -38,11 +38,9 @@ just have to configure and OCI registry and run ``spack buildcache push``.
spack -e . install
# Configure the registry
spack -e . mirror add --oci-username-variable REGISTRY_USER \
--oci-password-variable REGISTRY_TOKEN \
container-registry oci://example.com/name/image
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
# Push the image
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
The resulting container image can then be run as follows:

View File

@@ -178,8 +178,8 @@ Spec-related modules
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
of specs.
:mod:`spack.spec_parser`
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
:mod:`spack.parser`
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
:mod:`spack.version`
Implements a simple :class:`~spack.version.Version` class with simple

View File

@@ -470,6 +470,48 @@ The supplied location will become the build-directory for that package in all fu
developers to only redirect the build directory if they understand their package's
build-system.
When doing active development it can often be nice to work inside the build environment
to run tests, and compile the code natively (i.e. run ``make`` or ``ninja``) without the
overhead of calling ``spack install``.
The spack command ``build-env`` allows users to run processes inside the build environment
or to dive directly into it. An additional convenience alias is ``spack dev-dive [spec]``.
This will navigate to the package's build directory and then launch a subshell with the
build environment active. Users can query if they are currently in a build environment
subshell by running ``spack build-env --status``. When users are finished exploring or
working in the subshell they can call ``exit`` to leave the build subshell.
.. code-block:: console
# create and setup a spack environment for development
# ====
$ spack env activate --temp
$ spack add zlib-ng
$ spack stage -p $SPACK_ENV zlib-ng
$ spack develop zlib-ng
# build and make changes
# =====
$ spack install -u autoreconf zlib-ng
# [ fails ]
$ spack dev-dive zlib-ng
# prompt changed, confirm the location is the build directory
zlib-ng-build-env $ pwd
/private/var/folders/ln/1_3kxbwd35s_ylsjlm3zmqmc00307v/T/spack-ne8_m488/zlib-ng
# confirm build-env in case we forget where we are in subshell hierarchy
zlib-ng-build-env $ spack build-env --status
==> In build env zlib-ng-wrsaadvkbg7rjj7kfjw5rhdrrfdswcmm
# [ fix code ]
zlib-ng-build-env $ make -j6
# [ builds now ]
zlib-ng-build-env $ exit
# exit subshell and rebuild environment
# ====
$ spack build-env --status
==> build environment not detected
$ spack install
^^^^^^^
Loading
^^^^^^^
@@ -1042,7 +1084,7 @@ file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given
package. This view selects all packages that depend on MPI, and
excludes those built with the GCC compiler at version 18.5.
excludes those built with the PGI compiler at version 18.5.
The root specs with their (transitive) link and run type dependencies
will be put in the view due to the ``link: all`` option,
and the files in the view will be symlinks to the spack install
@@ -1056,7 +1098,7 @@ directories.
mpis:
root: /path/to/view
select: [^mpi]
exclude: ['%gcc@18.5']
exclude: ['%pgi@18.5']
projections:
all: '{name}/{version}-{compiler.name}'
link: all

View File

@@ -283,6 +283,10 @@ compilers`` or ``spack compiler list``:
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
-- clang -------------------------------------------------------
clang@3.4 clang@3.3 clang@3.2 clang@3.1
-- pgi ---------------------------------------------------------
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`.
@@ -802,6 +806,65 @@ flags to the ``icc`` command:
spec: intel@15.0.24.4.9.3
^^^
PGI
^^^
PGI comes with two sets of compilers for C++ and Fortran,
distinguishable by their names. "Old" compilers:
.. code-block:: yaml
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
"New" compilers:
.. code-block:: yaml
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
Older installations of PGI contains just the old compilers; whereas
newer installations contain the old and the new. The new compiler is
considered preferable, as some packages
(``hdf``) will not build with the old compiler.
When auto-detecting a PGI compiler, there are cases where Spack will
find the old compilers, when you really want it to find the new
compilers. It is best to check this ``compilers.yaml``; and if the old
compilers are being used, change ``pgf77`` and ``pgf90`` to
``pgfortran``.
Other issues:
* There are reports that some packages will not build with PGI,
including ``libpciaccess`` and ``openssl``. A workaround is to
build these packages with another compiler and then use them as
dependencies for PGI-build packages. For example:
.. code-block:: console
$ spack install openmpi%pgi ^libpciaccess%gcc
* PGI requires a license to use; see :ref:`licensed-compilers` for more
information on installation.
.. note::
It is believed the problem with HDF 4 is that everything is
compiled with the ``F77`` compiler, but at some point some Fortran
90 code slipped in there. So compilers that can handle both FORTRAN
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
work.
^^^
NAG
^^^
@@ -1326,7 +1389,6 @@ Required:
* Microsoft Visual Studio
* Python
* Git
* 7z
Optional:
* Intel Fortran (needed for some packages)
@@ -1392,13 +1454,6 @@ as the project providing Git support on Windows. This is additionally the recomm
for installing Git on Windows, a link to which can be found above. Spack requires the
utilities vendored by this project.
"""
7zip
"""
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
can be located at https://sourceforge.net/projects/sevenzip/.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 2: Install and setup Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1367,8 +1367,8 @@ Submodules
git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
@@ -1928,29 +1928,71 @@ to the empty list.
String. A URL pointing to license setup instructions for the software.
Defaults to the empty string.
For example, let's take a look at the Arm Forge package.
For example, let's take a look at the package for the PGI compilers.
.. code-block:: python
# Licensing
license_required = True
license_comment = "#"
license_files = ["licences/Licence"]
license_vars = [
"ALLINEA_LICENSE_DIR",
"ALLINEA_LICENCE_DIR",
"ALLINEA_LICENSE_FILE",
"ALLINEA_LICENCE_FILE",
]
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
license_comment = "#"
license_files = ["license.dat"]
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
in the installation prefix.
As you can see, PGI requires a license. Its license manager, FlexNet, uses
the ``#`` symbol to denote a comment. It expects the license file to be
named ``license.dat`` and to be located directly in the installation prefix.
If you would like the installation file to be located elsewhere, simply set
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
further instructions on installation and licensing, see the URL provided.
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
one of the other license variables after installation. For further instructions on installation and
licensing, see the URL provided.
Let's walk through a sample PGI installation to see exactly what Spack is
and isn't capable of. Since PGI does not provide a download URL, it must
be downloaded manually. It can either be added to a mirror or located in
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
for instructions on setting up a mirror.
After running ``spack install pgi``, the first thing that will happen is
Spack will create a global license file located at
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
this:
.. code-block:: sh
# A license is required to use pgi.
#
# The recommended solution is to store your license key in this global
# license file. After installation, the following symlink(s) will be
# added to point to this file (relative to the installation prefix):
#
# license.dat
#
# Alternatively, use one of the following environment variable(s):
#
# PGROUPD_LICENSE_FILE
# LM_LICENSE_FILE
#
# If you choose to store your license in a non-standard location, you may
# set one of these variable(s) to the full pathname to the license file, or
# port@host if you store your license keys on a dedicated license server.
# You will likely want to set this variable in a module file so that it
# gets loaded every time someone tries to use pgi.
#
# For further information on how to acquire a license, please refer to:
#
# http://www.pgroup.com/doc/pgiinstall.pdf
#
# You may enter your license below.
You can add your license directly to this file, or tell FlexNet to use a
license stored on a separate license server. Here is an example that
points to a license server called licman1:
.. code-block:: none
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
USE_SERVER
If your package requires the license to install, you can reference the
location of this global license using ``self.global_license_file``.
@@ -2350,7 +2392,7 @@ by the ``--jobs`` option:
.. code-block:: python
:emphasize-lines: 7, 11
:linenos:
class Xios(Package):
...
def install(self, spec, prefix):
@@ -2925,9 +2967,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
it makes sense to let a dependency set the environment variables for its dependents. To allow all
this, Spack provides four different methods that can be overridden in a package:
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
The Qt package, for instance, uses this call:
@@ -5137,7 +5179,7 @@ other checks.
- Not applicable
* - :ref:`PythonPackage <pythonpackage>`
- Not applicable
- ``test_imports`` (module imports)
- ``test`` (module imports)
* - :ref:`QMakePackage <qmakepackage>`
- ``check`` (``make check``)
- Not applicable
@@ -5146,7 +5188,7 @@ other checks.
- Not applicable
* - :ref:`SIPPackage <sippackage>`
- Not applicable
- ``test_imports`` (module imports)
- ``test`` (module imports)
* - :ref:`WafPackage <wafpackage>`
- ``build_test`` (must be overridden)
- ``install_test`` (must be overridden)
@@ -5378,7 +5420,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
determine whether it needs to also set up build dependencies (see
:ref:`test-build-tests`).
The ``MyPackage`` package below provides two basic test examples:
The ``MyPackage`` package below provides two basic test examples:
``test_example`` and ``test_example2``. The first runs the installed
``example`` and ensures its output contains an expected string. The second
runs ``example2`` without checking output so is only concerned with confirming
@@ -5695,7 +5737,7 @@ subdirectory of the installation prefix. They are automatically copied to
the appropriate relative paths under the test stage directory prior to
executing stand-alone tests.
.. tip::
.. tip::
*Perform test-related conversions once when copying files.*
@@ -7039,7 +7081,34 @@ provide them after the spec argument to ``spack build-env``:
$ spack build-env mpileaks@1.1%intel ./configure
This will cd to the build directory and then run ``configure`` in the
package's build environment.
package's build environment. This could also be done in one command as follows.
.. code-block:: console
$ spack build-env --cd build-dir mpileaks@1.1%intel ./configure
Furthermore, ``spack build-env`` has the ability to dive into the build environment
in a subshell to allow you to work natively without polluting your current shell.
.. code-block:: console
$ spack build-env --cd build-dir --dive mpileaks@1.1%intel
mpileaks-build-env $ ./configure
Note that the command prompt changed for this case. This will happen automatically
if a Bourne or C-Shell to indicate that the user is in the build environment subshell.
While automatic prompt changes only occur for a selection of shells, any shell can
query if they are in the build environment with the ``spack build-env --status``
flag.
.. code-block:: console
$ spack build-env --dive mpileaks@1.1%intel
$ spack build-env --status
==> In build env mpileaks-wrsaadvkbg7rjj7kfjw5rhdrrfdswcmm
$ exit
$ spack build-env --status
==> build environment not detected
.. _cmd-spack-location:
@@ -7071,46 +7140,6 @@ might write:
CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib
.. _abi_compatibility:
----------------------------
Specifying ABI Compatibility
----------------------------
Packages can include ABI-compatibility information using the
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
always replace version 1.0, then the package could have:
.. code-block:: python
can_splice("foo@1.0", when="@1.1")
For virtual packages, packages can also specify ABI-compabitiliby with
other packages providing the same virtual. For example, ``zlib-ng``
could specify:
.. code-block:: python
can_splice("zlib@1.3.1", when="@2.2+compat")
Some packages have ABI-compatibility that is dependent on matching
variant values, either for all variants or for some set of
ABI-relevant variants. In those cases, it is not necessary to specify
the full combinatorial explosion. The ``match_variants`` keyword can
cover all single-value variants.
.. code-block:: python
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
The concretizer will use ABI compatibility to determine automatic
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
.. note::
The ``can_splice`` directive is experimental, and may be replaced
by a higher-level interface in future versions of Spack.
.. _package_class_structure:

View File

@@ -1,12 +1,12 @@
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
sphinx-rtd-theme==3.0.1
python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.18.0
urllib3==2.2.3
pytest==8.3.4
pytest==8.3.3
isort==5.13.2
black==24.10.0
flake8==7.1.1

View File

@@ -1,254 +0,0 @@
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations (now Zope
Corporation, see http://www.zope.com). In 2001, the Python Software
Foundation (PSF, see http://www.python.org/psf/) was formed, a
non-profit organization created specifically to own Python-related
Intellectual Property. Zope Corporation is a sponsoring member of
the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2 and above 2.1.1 2001-now PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
retained in Python alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
from typing_extensions import *

View File

@@ -8,4 +8,3 @@ six==1.16.0
macholib==1.16.2
altgraph==0.17.3
ruamel.yaml==0.17.21
typing_extensions==4.1.1

View File

@@ -66,7 +66,7 @@ def _is_url(path_or_url: str) -> bool:
return result
def _system_path_filter(_func=None, arg_slice: Optional[slice] = None):
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
"""Filters function arguments to account for platform path separators.
Optional slicing range can be specified to select specific arguments
@@ -100,16 +100,6 @@ def path_filter_caller(*args, **kwargs):
return holder_func
def _noop_decorator(_func=None, arg_slice: Optional[slice] = None):
return _func if _func else lambda x: x
if sys.platform == "win32":
system_path_filter = _system_path_filter
else:
system_path_filter = _noop_decorator
def sanitize_win_longpath(path: str) -> str:
"""Strip Windows extended path prefix from strings
Returns sanitized string.

View File

@@ -24,7 +24,6 @@
Callable,
Deque,
Dict,
Generator,
Iterable,
List,
Match,
@@ -301,32 +300,35 @@ def filter_file(
ignore_absent: bool = False,
start_at: Optional[str] = None,
stop_at: Optional[str] = None,
encoding: Optional[str] = "utf-8",
) -> None:
r"""Like sed, but uses python regular expressions.
Filters every line of each file through regex and replaces the file with a filtered version.
Preserves mode of filtered files.
Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
passed the match object and should return a suitable replacement string. If it is a string, it
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
As with re.sub, ``repl`` can be either a string or a callable.
If it is a callable, it is passed the match object and should
return a suitable replacement string. If it is a string, it
can contain ``\1``, ``\2``, etc. to represent back-substitution
as sed would allow.
Args:
regex: The regular expression to search for
repl: The string to replace matches with
*filenames: One or more files to search and replace string: Treat regex as a plain string.
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
ignore_absent: Ignore any files that don't exist. Default is False
start_at: Marker used to start applying the replacements. If a text line matches this
marker filtering is started at the next line. All contents before the marker and the
marker itself are copied verbatim. Default is to start filtering from the first line of
the file.
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
until the end of the file.
encoding: The encoding to use when reading and writing the files. Default is None, which
uses the system's default encoding.
regex (str): The regular expression to search for
repl (str): The string to replace matches with
*filenames: One or more files to search and replace
string (bool): Treat regex as a plain string. Default it False
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
ignore_absent (bool): Ignore any files that don't exist.
Default is False
start_at (str): Marker used to start applying the replacements. If a
text line matches this marker filtering is started at the next line.
All contents before the marker and the marker itself are copied
verbatim. Default is to start filtering from the first line of the
file.
stop_at (str): Marker used to stop scanning the file further. If a text
line matches this marker filtering is stopped and the rest of the
file is copied verbatim. Default is to filter until the end of the
file.
"""
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
@@ -342,56 +344,72 @@ def groupid_to_group(x):
if string:
regex = re.escape(regex)
regex_compiled = re.compile(regex)
for path in path_to_os_path(*filenames):
if ignore_absent and not os.path.exists(path):
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
for filename in path_to_os_path(*filenames):
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
backup_filename = filename + "~"
tmp_filename = filename + ".spack~"
if ignore_absent and not os.path.exists(filename):
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
tty.debug(msg.format(filename))
continue
else:
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
fd, temp_path = tempfile.mkstemp(
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
)
os.close(fd)
# Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
shutil.copy(path, temp_path)
errored = False
# Create a temporary file to read from. We cannot use backup_filename
# in case filter_file is invoked multiple times on the same file.
shutil.copy(filename, tmp_filename)
try:
# Open as a text file and filter until the end of the file is reached, or we found a
# marker in the line if it was specified. To avoid translating line endings (\n to
# \r\n and vice-versa) use newline="".
with open(
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
) as input_file, open(
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
) as output_file:
if start_at is None and stop_at is None: # common case, avoids branching in loop
for line in input_file:
output_file.write(re.sub(regex_compiled, repl, line))
else:
# state is -1 before start_at; 0 between; 1 after stop_at
state = 0 if start_at is None else -1
for line in input_file:
if state == 0:
# Open as a text file and filter until the end of the file is
# reached, or we found a marker in the line if it was specified
#
# To avoid translating line endings (\n to \r\n and vice-versa)
# we force os.open to ignore translations and use the line endings
# the file comes with
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
do_filtering = start_at is None
# Using iter and readline is a workaround needed not to
# disable input_file.tell(), which will happen if we call
# input_file.next() implicitly via the for loop
for line in iter(input_file.readline, ""):
if stop_at is not None:
current_position = input_file.tell()
if stop_at == line.strip():
state = 1
else:
line = re.sub(regex_compiled, repl, line)
elif state == -1 and start_at == line.strip():
state = 0
output_file.write(line)
output_file.write(line)
break
if do_filtering:
filtered_line = re.sub(regex, repl, line)
output_file.write(filtered_line)
else:
do_filtering = start_at == line.strip()
output_file.write(line)
else:
current_position = None
# If we stopped filtering at some point, reopen the file in
# binary mode and copy verbatim the remaining part
if current_position and stop_at:
with open(tmp_filename, mode="rb") as input_binary_buffer:
input_binary_buffer.seek(current_position)
with open(filename, mode="ab") as output_binary_buffer:
output_binary_buffer.writelines(input_binary_buffer.readlines())
except BaseException:
# restore the original file
os.rename(temp_path, path)
errored = True
# clean up the original file on failure.
shutil.move(backup_filename, filename)
raise
finally:
if not errored and not backup:
os.unlink(temp_path)
os.remove(tmp_filename)
if not backup and os.path.exists(backup_filename):
os.remove(backup_filename)
class FileFilter:
@@ -1096,12 +1114,12 @@ def hash_directory(directory, ignore=[]):
@contextmanager
@system_path_filter
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
tmp = os.path.join(dirname, ".%s.tmp" % basename)
with open(tmp, "w", encoding=encoding) as f:
with open(tmp, "w") as f:
yield f
shutil.move(tmp, filename)
@@ -2754,6 +2772,22 @@ def prefixes(path):
return paths
@system_path_filter
def md5sum(file):
"""Compute the MD5 sum of a file.
Args:
file (str): file to be checksummed
Returns:
MD5 sum of the file's content
"""
md5 = hashlib.md5()
with open(file, "rb") as f:
md5.update(f.read())
return md5.digest()
@system_path_filter
def remove_directory_contents(dir):
"""Remove all contents of a directory."""
@@ -2804,25 +2838,6 @@ def temporary_dir(
remove_directory_contents(tmp_dir)
@contextmanager
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
for functions or external tools that do not support in-place editing. Notice that this function
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
since we assume the call site will create a new inode at the same path."""
tmp_fd, tmp_path = tempfile.mkstemp(
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
)
# windows cannot replace a file with open fds, so close since the call site needs to replace.
os.close(tmp_fd)
try:
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
yield tmp_path
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
finally:
os.unlink(tmp_path)
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
"""Create a small summary of the given file. Does not error
when file does not exist.

View File

@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
Arguments:
path: path to lock file we want a filehandle for
"""
# Open writable files as rb+ so we can upgrade to write later
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
# Open writable files as 'r+' so we can upgrade to write later
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
pid = os.getpid()
open_file = None # OpenFile object, if there is one
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
# we know path exists but not if it's writable. If it's read-only,
# only open the file for reading (and fail if we're trying to get
# an exclusive (write) lock on it)
os_mode, fh_mode = os.O_RDONLY, "rb"
os_mode, fh_mode = os.O_RDONLY, "r"
fd = os.open(path, os_mode)
fh = os.fdopen(fd, fh_mode)
@@ -243,7 +243,7 @@ def __init__(
helpful for distinguishing between different Spack locks.
"""
self.path = path
self._file: Optional[IO[bytes]] = None
self._file: Optional[IO] = None
self._reads = 0
self._writes = 0
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
self._ensure_parent_directory()
self._file = FILE_TRACKER.get_fh(self.path)
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
# Attempt to upgrade to write lock w/a read-only file.
# If the file were writable, we'd have opened it rb+
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
self._log_debug(
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
line = self._file.read()
if line:
pid, host = line.decode("utf-8").strip().split(",")
pid, host = line.strip().split(",")
_, _, pid = pid.rpartition("=")
_, _, self.host = host.rpartition("=")
self.pid = int(pid)
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
# write pid, host to disk to sync over FS
self._file.seek(0)
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
self._file.truncate()
self._file.flush()
os.fsync(self._file.fileno())

View File

@@ -161,7 +161,7 @@ def _err_check(result, func, args):
)
# Use conout$ here to handle a redirectired stdout/get active console associated
# with spack
with open(r"\\.\CONOUT$", "w", encoding="utf-8") as conout:
with open(r"\\.\CONOUT$", "w") as conout:
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
# which is defined as conout$ on Windows.

View File

@@ -762,7 +762,7 @@ def __enter__(self):
self.reader = open(self.logfile, mode="rb+")
# Dup stdout so we can still write to it after redirection
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w", encoding=sys.stdout.encoding)
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
# Redirect stdout and stderr to write to logfile
self.stderr.redirect_stream(self.writer.fileno())
self.stdout.redirect_stream(self.writer.fileno())
@@ -879,13 +879,10 @@ def _writer_daemon(
write_fd.close()
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O. [needs citation]
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
# The downside is that the log file will not contain the exact output of the build process.
# that prevents unbuffered text I/O.
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
# 3. closefd=False because Connection has "ownership"
read_file = os.fdopen(
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
)
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
if stdin_fd:
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
@@ -931,7 +928,11 @@ def _writer_daemon(
try:
while line_count < 100:
# Handle output from the calling process.
line = _retry(read_file.readline)()
try:
line = _retry(read_file.readline)()
except UnicodeDecodeError:
# installs like --test=root gpgme produce non-UTF8 logs
line = "<line lost: output was not encoded as UTF-8>\n"
if not line:
return
@@ -945,13 +946,6 @@ def _writer_daemon(
output_line = clean_line
if filter_fn:
output_line = filter_fn(clean_line)
enc = sys.stdout.encoding
if enc != "utf-8":
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
# may not be able to handle utf-8 output. We do an inefficient
# dance of re-encoding with errors replaced, so stdout.write
# does not raise.
output_line = output_line.encode(enc, "replace").decode(enc)
sys.stdout.write(output_line)
# Stripped output to log file.

View File

@@ -11,7 +11,7 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.24.0.dev0"
__version__ = "0.23.0.dev0"
spack_version = __version__

View File

@@ -571,13 +571,8 @@ def _search_for_deprecated_package_methods(pkgs, error_cls):
@package_properties
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
"""Ensure package names are lowercase and consistent"""
reserved_names = ("all",)
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if pkg_name in reserved_names:
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
errors.append(error_cls(error_msg, []))
if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
errors.append(error_cls(error_msg, []))
@@ -656,7 +651,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
for pkg_name in pkgs:
details = []
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
with open(filename, "r", encoding="utf-8") as package_file:
with open(filename, "r") as package_file:
for i, line in enumerate(package_file):
pattern = next((r for r in fixme_regexes if r.search(line)), None)
if pattern:
@@ -693,19 +688,19 @@ def invalid_sha256_digest(fetcher):
return h, True
return None, False
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
details = []
for v, args in pkg.versions.items():
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
digest, is_bad = invalid_sha256_digest(fetcher)
if is_bad:
details.append(f"{pkg_name}@{v} uses {digest}")
details.append("{}@{} uses {}".format(pkg_name, v, digest))
for _, resources in pkg.resources.items():
for resource in resources:
digest, is_bad = invalid_sha256_digest(resource.fetcher)
if is_bad:
details.append(f"Resource in '{pkg_name}' uses {digest}")
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
if details:
errors.append(error_cls(error_msg, details))
@@ -809,7 +804,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
continue
file = spack.repo.PATH.filename_for_package_name(pkg_name)
tree = ast.parse(open(file, "rb").read())
tree = ast.parse(open(file).read())
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
visitor.visit(tree)
if visitor.references_to_globals:
@@ -1009,6 +1004,20 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for when, deps_by_name in pkg_cls.dependencies.items():
for dep_name, dep in deps_by_name.items():
# Check if there are nested dependencies declared. We don't want directives like:
#
# depends_on('foo+bar ^fee+baz')
#
# but we'd like to have two dependencies listed instead.
nested_dependencies = dep.spec.dependencies()
if nested_dependencies:
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
ndir = len(nested_dependencies) + 1
details = [
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
f"in {filename}",
]
errors.append(error_cls(summary=summary, details=details))
def check_virtual_with_variants(spec, msg):
if not spec.virtual or not spec.variants:

View File

@@ -40,7 +40,7 @@
import spack.hash_types as ht
import spack.hooks
import spack.hooks.sbang
import spack.mirrors.mirror
import spack.mirror
import spack.oci.image
import spack.oci.oci
import spack.oci.opener
@@ -69,8 +69,10 @@
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
ensure_valid_tag,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
@@ -81,11 +83,10 @@
)
from spack.package_prefs import get_package_dir_permissions, get_package_group
from spack.relocate_text import utf8_paths_to_single_binary_regex
from spack.spec import Spec
from spack.stage import Stage
from spack.util.executable import which
from .enums import InstallRecordStatus
BUILD_CACHE_RELATIVE_PATH = "build_cache"
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
@@ -251,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
spec_list = [
s
for s in db.query_local(installed=InstallRecordStatus.ANY)
for s in db.query_local(installed=any)
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
]
@@ -366,7 +367,7 @@ def update(self, with_cooldown=False):
on disk under ``_index_cache_root``)."""
self._init_local_index_cache()
configured_mirror_urls = [
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
]
items_to_remove = []
spec_cache_clear_needed = False
@@ -583,7 +584,7 @@ def buildinfo_file_name(prefix):
def read_buildinfo_file(prefix):
"""Read buildinfo file"""
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
with open(buildinfo_file_name(prefix), "r") as f:
return syaml.load(f)
@@ -824,10 +825,10 @@ def _read_specs_and_push_index(
contents = read_method(file)
# Need full spec.json name or this gets confused with index.json.
if file.endswith(".json.sig"):
specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents)
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
specfile_json = Spec.extract_json_from_clearsig(contents)
fetched_spec = Spec.from_dict(specfile_json)
elif file.endswith(".json"):
fetched_spec = spack.spec.Spec.from_json(contents)
fetched_spec = Spec.from_json(contents)
else:
continue
@@ -837,17 +838,17 @@ def _read_specs_and_push_index(
# Now generate the index, compute its hash, and push the two files to
# the mirror.
index_json_path = os.path.join(temp_dir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f:
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Read the index back in and compute its hash
with open(index_json_path, encoding="utf-8") as f:
with open(index_json_path) as f:
index_string = f.read()
index_hash = compute_hash(index_string)
# Write the hash out to a local file
index_hash_path = os.path.join(temp_dir, "index.json.hash")
with open(index_hash_path, "w", encoding="utf-8") as f:
with open(index_hash_path, "w") as f:
f.write(index_hash)
# Push the index itself
@@ -881,7 +882,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
aws = which("aws")
def file_read_method(file_path):
with open(file_path, encoding="utf-8") as fd:
with open(file_path) as fd:
return fd.read()
tmpspecsdir = tempfile.mkdtemp()
@@ -1026,7 +1027,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
target = os.path.join(tmpdir, "index.json")
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
with open(target, "w", encoding="utf-8") as f:
with open(target, "w") as f:
sjson.dump(index, f)
try:
@@ -1097,7 +1098,7 @@ class ExistsInBuildcache(NamedTuple):
class BuildcacheFiles:
def __init__(self, spec: spack.spec.Spec, local: str, remote: str):
def __init__(self, spec: Spec, local: str, remote: str):
"""
Args:
spec: The spec whose tarball and specfile are being managed.
@@ -1127,7 +1128,7 @@ def local_tarball(self) -> str:
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
exist in the buildcache"""
files = BuildcacheFiles(spec, tmpdir, out_url)
@@ -1138,11 +1139,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
def _url_upload_tarball_and_specfile(
spec: spack.spec.Spec,
tmpdir: str,
out_url: str,
exists: ExistsInBuildcache,
signing_key: Optional[str],
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
):
files = BuildcacheFiles(spec, tmpdir, out_url)
tarball = files.local_tarball()
@@ -1160,7 +1157,7 @@ def _url_upload_tarball_and_specfile(
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
specfile = files.local_specfile()
with open(specfile, "w", encoding="utf-8") as f:
with open(specfile, "w") as f:
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
# If lines are longer, they are truncated without error. Thanks GPG!
# So, here we still add newlines, but no indent, so save on file size and
@@ -1177,7 +1174,7 @@ def _url_upload_tarball_and_specfile(
class Uploader:
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
@@ -1225,7 +1222,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirrors.mirror.Mirror,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
@@ -1274,7 +1271,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirrors.mirror.Mirror,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
@@ -1298,7 +1295,7 @@ def push(
def make_uploader(
mirror: spack.mirrors.mirror.Mirror,
mirror: spack.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
@@ -1315,7 +1312,7 @@ def make_uploader(
)
def _format_spec(spec: spack.spec.Spec) -> str:
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
@@ -1338,7 +1335,7 @@ def _progress(self):
return f"[{self.n:{digits}}/{self.total}] "
return ""
def start(self, spec: spack.spec.Spec, running: bool) -> None:
def start(self, spec: Spec, running: bool) -> None:
self.n += 1
self.running = running
self.pre = self._progress()
@@ -1357,18 +1354,18 @@ def fail(self) -> None:
def _url_push(
specs: List[spack.spec.Spec],
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool,
update_index: bool,
tmpdir: str,
executor: concurrent.futures.Executor,
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
present (when force=False), and a list of errors. Does not raise on error."""
skipped: List[spack.spec.Spec] = []
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
skipped: List[Spec] = []
errors: List[Tuple[Spec, BaseException]] = []
exists_futures = [
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
@@ -1441,7 +1438,7 @@ def _url_push(
return skipped, errors
def _oci_upload_success_msg(spec: spack.spec.Spec, digest: Digest, size: int, elapsed: float):
def _oci_upload_success_msg(spec: Spec, digest: Digest, size: int, elapsed: float):
elapsed = max(elapsed, 0.001) # guard against division by zero
return (
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
@@ -1527,7 +1524,7 @@ def _oci_put_manifest(
):
architecture = _oci_archspec_to_gooarch(specs[0])
expected_blobs: List[spack.spec.Spec] = [
expected_blobs: List[Spec] = [
s
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
if not s.external
@@ -1571,7 +1568,7 @@ def _oci_put_manifest(
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
with open(config_file, "w", encoding="utf-8") as f:
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
@@ -1641,33 +1638,19 @@ def _oci_update_base_images(
)
def _oci_default_tag(spec: spack.spec.Spec) -> str:
"""Return a valid, default image tag for a spec."""
return ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
#: Default OCI index tag
default_index_tag = "index.spack"
def tag_is_spec(tag: str) -> bool:
"""Check if a tag is likely a Spec"""
return tag.endswith(".spack") and tag != default_index_tag
def _oci_push(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
installed_specs_with_deps: List[spack.spec.Spec],
installed_specs_with_deps: List[Spec],
tmpdir: str,
executor: concurrent.futures.Executor,
force: bool = False,
) -> Tuple[
List[spack.spec.Spec],
List[Spec],
Dict[str, Tuple[dict, dict]],
Dict[str, spack.oci.oci.Blob],
List[Tuple[spack.spec.Spec, BaseException]],
List[Tuple[Spec, BaseException]],
]:
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
@@ -1676,15 +1659,13 @@ def _oci_push(
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped: List[spack.spec.Spec] = []
skipped: List[Spec] = []
if not force:
tty.info("Checking for existing specs in the buildcache")
blobs_to_upload = []
tags_to_check = (
target_image.with_tag(_oci_default_tag(s)) for s in installed_specs_with_deps
)
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
@@ -1712,8 +1693,8 @@ def _oci_push(
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
]
manifests_to_upload: List[spack.spec.Spec] = []
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
manifests_to_upload: List[Spec] = []
errors: List[Tuple[Spec, BaseException]] = []
# And update the spec to blob mapping for successful uploads
for spec, blob_future in zip(blobs_to_upload, blob_futures):
@@ -1739,7 +1720,7 @@ def _oci_push(
base_image_cache=base_images,
)
def extra_config(spec: spack.spec.Spec):
def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
spec_dict["binary_cache_checksum"] = {
@@ -1755,7 +1736,7 @@ def extra_config(spec: spack.spec.Spec):
_oci_put_manifest,
base_images,
checksums,
target_image.with_tag(_oci_default_tag(spec)),
target_image.with_tag(default_tag(spec)),
tmpdir,
extra_config(spec),
{"org.opencontainers.image.description": spec.format()},
@@ -1772,7 +1753,7 @@ def extra_config(spec: spack.spec.Spec):
manifest_progress.start(spec, manifest_future.running())
if error is None:
manifest_progress.ok(
f"Tagged {_format_spec(spec)} as {target_image.with_tag(_oci_default_tag(spec))}"
f"Tagged {_format_spec(spec)} as {target_image.with_tag(default_tag(spec))}"
)
else:
manifest_progress.fail()
@@ -1807,13 +1788,13 @@ def _oci_update_index(
db = BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = spack.spec.Spec.from_dict(spec_dict)
spec = Spec.from_dict(spec_dict)
db.add(spec)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f:
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
@@ -1922,7 +1903,7 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
try:
as_string = binary_content.decode("utf-8")
if path.endswith(".json.sig"):
spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string)
spec_dict = Spec.extract_json_from_clearsig(as_string)
else:
spec_dict = json.loads(as_string)
except Exception as e:
@@ -1970,9 +1951,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
"signature_verified": "true-if-binary-pkg-was-already-verified"
}
"""
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
spack.mirrors.mirror.MirrorCollection(binary=True).values()
)
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
binary=True
).values()
if not configured_mirrors:
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
@@ -1997,7 +1978,7 @@ def fetch_url_to_mirror(url):
for mirror in configured_mirrors:
if mirror.fetch_url == url:
return mirror
return spack.mirrors.mirror.Mirror(url)
return spack.mirror.Mirror(url)
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
@@ -2018,7 +1999,7 @@ def fetch_url_to_mirror(url):
if fetch_url.startswith("oci://"):
ref = spack.oci.image.ImageReference.from_string(
fetch_url[len("oci://") :]
).with_tag(_oci_default_tag(spec))
).with_tag(spack.oci.image.default_tag(spec))
# Fetch the manifest
try:
@@ -2262,8 +2243,7 @@ def relocate_package(spec):
]
if analogs:
# Prefer same-name analogs and prefer higher versions
# This matches the preferences in spack.spec.Spec.splice, so we
# will find same node
# This matches the preferences in Spec.splice, so we will find same node
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
lookup_dag_hash = analog.dag_hash()
@@ -2352,9 +2332,7 @@ def is_backup_file(file):
if not codesign:
return
for binary in changed_files:
# preserve the original inode by running codesign on a copy
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
codesign("-fs-", tmp_binary)
codesign("-fs-", binary)
# If we are installing back to the same location
# relocate the sbang location if the spack directory changed
@@ -2668,7 +2646,7 @@ def try_direct_fetch(spec, mirrors=None):
specfile_is_signed = False
found_specs = []
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join(
@@ -2699,10 +2677,10 @@ def try_direct_fetch(spec, mirrors=None):
# are concrete (as they are built) so we need to mark this spec
# concrete on read-in.
if specfile_is_signed:
specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents)
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
fetched_spec = Spec.from_dict(specfile_json)
else:
fetched_spec = spack.spec.Spec.from_json(specfile_contents)
fetched_spec = Spec.from_json(specfile_contents)
fetched_spec._mark_concrete()
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
@@ -2729,7 +2707,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
if spec is None:
return []
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
tty.debug("No Spack mirrors are currently configured")
return {}
@@ -2768,7 +2746,7 @@ def clear_spec_cache():
def get_keys(install=False, trust=False, force=False, mirrors=None):
"""Get pgp public keys available on mirror with suffix .pub"""
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
if not mirror_collection:
tty.die("Please add a spack mirror to allow " + "download of build caches.")
@@ -2823,7 +2801,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
def _url_push_keys(
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
*mirrors: Union[spack.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
update_index: bool = False,
@@ -2890,7 +2868,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
"""
rebuilds = {}
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
rebuild_list = []
@@ -2907,7 +2885,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
}
if output_file:
with open(output_file, "w", encoding="utf-8") as outf:
with open(output_file, "w") as outf:
outf.write(json.dumps(rebuilds))
return 1 if rebuilds else 0
@@ -2934,7 +2912,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
def download_buildcache_entry(file_descriptions, mirror_url=None):
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
tty.die(
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
)
@@ -2943,7 +2921,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
for mirror in spack.mirror.MirrorCollection(binary=True).values():
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
if _download_buildcache_entry(mirror_root, file_descriptions):
@@ -3001,7 +2979,7 @@ def __init__(self, all_architectures):
self.possible_specs = specs
def __call__(self, spec: spack.spec.Spec, **kwargs):
def __call__(self, spec: Spec, **kwargs):
"""
Args:
spec: The spec being searched for
@@ -3139,7 +3117,7 @@ def __init__(self, url: str, local_hash, urlopen=None) -> None:
def conditional_fetch(self) -> FetchIndexResult:
"""Download an index from an OCI registry type mirror."""
url_manifest = self.ref.with_tag(default_index_tag).manifest_url()
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
try:
response = self.urlopen(
urllib.request.Request(

View File

@@ -146,7 +146,6 @@ def libc_external_spec(self) -> "spack.spec.Spec":
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
#TODO: Does this need to be changed?
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture

View File

@@ -37,7 +37,7 @@
import spack.binary_distribution
import spack.config
import spack.detection
import spack.mirrors.mirror
import spack.mirror
import spack.platforms
import spack.spec
import spack.store
@@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
# Promote (relative) paths to file urls
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
@property
def mirror_scope(self) -> spack.config.InternalConfigScope:

View File

@@ -56,6 +56,7 @@
from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize
import spack.build_systems._checks
import spack.build_systems.cmake
import spack.build_systems.meson
import spack.build_systems.python
@@ -856,6 +857,8 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
load_external_modules(pkg)
env_mods.set("SPACK_BUILD_ENV", f"{pkg.spec.name}-{pkg.spec.dag_hash()}")
# Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn)
env_mods.apply_modifications()
@@ -882,9 +885,6 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
elif context == Context.RUN:
self.root_depflag = dt.RUN | dt.LINK
def accept(self, item):
return True
def neighbors(self, item):
spec = item.edge.spec
if spec.dag_hash() in self.root_hashes:
@@ -922,19 +922,19 @@ def effective_deptypes(
a flag specifying in what way they do so. The list is ordered topologically
from root to leaf, meaning that environment modifications should be applied
in reverse so that dependents override dependencies, not the other way around."""
topo_sorted_edges = traverse.traverse_topo_edges_generator(
traverse.with_artificial_edges(specs),
visitor=EnvironmentVisitor(*specs, context=context),
key=traverse.by_dag_hash,
visitor = traverse.TopoVisitor(
EnvironmentVisitor(*specs, context=context),
key=lambda x: x.dag_hash(),
root=True,
all_edges=True,
)
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
use_modes = defaultdict(lambda: UseMode(0))
nodes_with_type = []
for edge in topo_sorted_edges:
for edge in visitor.edges:
parent, child, depflag = edge.parent, edge.spec, edge.depflag
# Mark the starting point
@@ -1377,7 +1377,7 @@ def exitcode_msg(p):
return child_result
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
def get_package_context(traceback, context=3):
@@ -1426,20 +1426,27 @@ def make_stack(tb, stack=None):
# We found obj, the Package implementation we care about.
# Point out the location in the install method where we failed.
filename = inspect.getfile(frame.f_code)
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
lineno = frame.f_lineno
if os.path.basename(filename) == "package.py":
# subtract 1 because we inject a magic import at the top of package files.
# TODO: get rid of the magic import.
lineno -= 1
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
# Build a message showing context in the install method.
sourcelines, start = inspect.getsourcelines(frame)
# Calculate lineno of the error relative to the start of the function.
fun_lineno = frame.f_lineno - start
fun_lineno = lineno - start
start_ctx = max(0, fun_lineno - context)
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
for i, line in enumerate(sourcelines):
is_error = start_ctx + i == fun_lineno
mark = ">> " if is_error else " "
# Add start to get lineno relative to start of file, not function.
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
if is_error:
marked = colorize("@R{%s}" % cescape(marked))
lines.append(marked)

View File

@@ -9,7 +9,6 @@
import spack.builder
import spack.error
import spack.phase_callbacks
import spack.relocate
import spack.spec
import spack.store
@@ -64,7 +63,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
def ensure_build_dependencies_or_raise(
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
):
"""Ensure that some build dependencies are present in the concrete spec.
@@ -72,7 +71,7 @@ def ensure_build_dependencies_or_raise(
Args:
spec: concrete spec to be checked.
dependencies: list of package names of required build dependencies
dependencies: list of abstract specs to be satisfied
error_msg: brief error message to be prepended to a longer description
Raises:
@@ -128,8 +127,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
class BuilderWithDefaults(spack.builder.Builder):
"""Base class for all specific builders with common callbacks registered."""
class BaseBuilder(spack.builder.Builder):
"""Base class for builders to register common checks"""
# Check that self.prefix is there after installation
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
spack.builder.run_after("install")(sanity_check_prefix)

View File

@@ -6,7 +6,7 @@
import os.path
import stat
import subprocess
from typing import Callable, List, Optional, Set, Tuple, Union
from typing import List
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -15,9 +15,6 @@
import spack.builder
import spack.error
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when
from spack.operating_systems.mac_os import macos_version
@@ -25,7 +22,7 @@
from spack.version import Version
from ._checks import (
BuilderWithDefaults,
BaseBuilder,
apply_macos_rpath_fixups,
ensure_build_dependencies_or_raise,
execute_build_time_tests,
@@ -72,14 +69,14 @@ def flags_to_build_system_args(self, flags):
# Legacy methods (used by too many packages to change them,
# need to forward to the builder)
def enable_or_disable(self, *args, **kwargs):
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
return self.builder.enable_or_disable(*args, **kwargs)
def with_or_without(self, *args, **kwargs):
return spack.builder.create(self).with_or_without(*args, **kwargs)
return self.builder.with_or_without(*args, **kwargs)
@spack.builder.builder("autotools")
class AutotoolsBuilder(BuilderWithDefaults):
class AutotoolsBuilder(BaseBuilder):
"""The autotools builder encodes the default way of installing software built
with autotools. It has four phases that can be overridden, if need be:
@@ -160,7 +157,7 @@ class AutotoolsBuilder(BuilderWithDefaults):
install_libtool_archives = False
@property
def patch_config_files(self) -> bool:
def patch_config_files(self):
"""Whether to update old ``config.guess`` and ``config.sub`` files
distributed with the tarball.
@@ -180,20 +177,23 @@ def patch_config_files(self) -> bool:
)
@property
def _removed_la_files_log(self) -> str:
def _removed_la_files_log(self):
"""File containing the list of removed libtool archives"""
return os.path.join(self.build_directory, "removed_la_files.txt")
build_dir = self.build_directory
if not os.path.isabs(self.build_directory):
build_dir = os.path.join(self.pkg.stage.path, build_dir)
return os.path.join(build_dir, "removed_la_files.txt")
@property
def archive_files(self) -> List[str]:
def archive_files(self):
"""Files to archive for packages based on autotools"""
files = [os.path.join(self.build_directory, "config.log")]
if not self.install_libtool_archives:
files.append(self._removed_la_files_log)
return files
@spack.phase_callbacks.run_after("autoreconf")
def _do_patch_config_files(self) -> None:
@spack.builder.run_after("autoreconf")
def _do_patch_config_files(self):
"""Some packages ship with older config.guess/config.sub files and need to
have these updated when installed on a newer architecture.
@@ -294,7 +294,7 @@ def runs_ok(script_abs_path):
and set the prefix to the directory containing the `config.guess` and
`config.sub` files.
"""
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
# Copy the good files over the bad ones
for abs_path in to_be_patched:
@@ -304,8 +304,8 @@ def runs_ok(script_abs_path):
fs.copy(substitutes[name], abs_path)
os.chmod(abs_path, mode)
@spack.phase_callbacks.run_before("configure")
def _patch_usr_bin_file(self) -> None:
@spack.builder.run_before("configure")
def _patch_usr_bin_file(self):
"""On NixOS file is not available in /usr/bin/file. Patch configure
scripts to use file from path."""
@@ -316,8 +316,8 @@ def _patch_usr_bin_file(self) -> None:
with fs.keep_modification_time(*x.filenames):
x.filter(regex="/usr/bin/file", repl="file", string=True)
@spack.phase_callbacks.run_before("configure")
def _set_autotools_environment_variables(self) -> None:
@spack.builder.run_before("configure")
def _set_autotools_environment_variables(self):
"""Many autotools builds use a version of mknod.m4 that fails when
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
@@ -330,8 +330,8 @@ def _set_autotools_environment_variables(self) -> None:
"""
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
@spack.phase_callbacks.run_before("configure")
def _do_patch_libtool_configure(self) -> None:
@spack.builder.run_before("configure")
def _do_patch_libtool_configure(self):
"""Patch bugs that propagate from libtool macros into "configure" and
further into "libtool". Note that patches that can be fixed by patching
"libtool" directly should be implemented in the _do_patch_libtool method
@@ -358,8 +358,8 @@ def _do_patch_libtool_configure(self) -> None:
# Support Libtool 2.4.2 and older:
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
@spack.phase_callbacks.run_after("configure")
def _do_patch_libtool(self) -> None:
@spack.builder.run_after("configure")
def _do_patch_libtool(self):
"""If configure generates a "libtool" script that does not correctly
detect the compiler (and patch_libtool is set), patch in the correct
values for libtool variables.
@@ -507,69 +507,27 @@ def _do_patch_libtool(self) -> None:
)
@property
def configure_directory(self) -> str:
def configure_directory(self):
"""Return the directory where 'configure' resides."""
return self.pkg.stage.source_path
@property
def configure_abs_path(self) -> str:
def configure_abs_path(self):
# Absolute path to configure
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
return configure_abs_path
@property
def build_directory(self) -> str:
def build_directory(self):
"""Override to provide another place to build the package"""
# Handle the case where the configure directory is set to a non-absolute path
# Non-absolute paths are always relative to the staging source path
build_dir = self.configure_directory
if not os.path.isabs(build_dir):
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
return build_dir
return self.configure_directory
@spack.phase_callbacks.run_before("autoreconf")
def delete_configure_to_force_update(self) -> None:
@spack.builder.run_before("autoreconf")
def delete_configure_to_force_update(self):
if self.force_autoreconf:
fs.force_remove(self.configure_abs_path)
@property
def autoreconf_search_path_args(self) -> List[str]:
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
of build deps, skips the default path of automake, move external include
flags to the back, since they might pull in unrelated m4 files shadowing
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
@spack.phase_callbacks.run_after("autoreconf")
def set_configure_or_die(self) -> None:
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self) -> List[str]:
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
def autoreconf(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def autoreconf(self, pkg, spec, prefix):
"""Not needed usually, configure should be already there"""
# If configure exists nothing needs to be done
@@ -596,12 +554,39 @@ def autoreconf(
autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args)
def configure(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
@property
def autoreconf_search_path_args(self):
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
of build deps, skips the default path of automake, move external include
flags to the back, since they might pull in unrelated m4 files shadowing
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
@spack.builder.run_after("autoreconf")
def set_configure_or_die(self):
"""Ensure the presence of a "configure" script, or raise. If the "configure"
is found, a module level attribute is set.
Raises:
RuntimeError: if the "configure" script is not found
"""
# Check if the "configure" script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self):
"""Return the list of all the arguments that must be passed to configure,
except ``--prefix`` which will be pre-pended to the list.
"""
return []
def configure(self, pkg, spec, prefix):
"""Run "configure", with the arguments specified by the builder and an
appropriately set prefix.
"""
@@ -612,12 +597,7 @@ def configure(
with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options)
def build(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder."""
# See https://autotools.io/automake/silent.html
params = ["V=1"]
@@ -625,49 +605,41 @@ def build(
with fs.working_dir(self.build_directory):
pkg.module.make(*params)
def install(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None:
def check(self):
"""Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check")
def _activate_or_not(
self,
name: str,
activation_word: str,
deactivation_word: str,
activation_value: Optional[Union[Callable, str]] = None,
variant=None,
) -> List[str]:
self, name, activation_word, deactivation_word, activation_value=None, variant=None
):
"""This function contain the current implementation details of
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
Args:
name: name of the option that is being activated or not
activation_word: the default activation word ('with' in the case of
``with_or_without``)
deactivation_word: the default deactivation word ('without' in the case of
``with_or_without``)
activation_value: callable that accepts a single value. This value is either one of the
allowed values for a multi-valued variant or the name of a bool-valued variant.
name (str): name of the option that is being activated or not
activation_word (str): the default activation word ('with' in the
case of ``with_or_without``)
deactivation_word (str): the default deactivation word ('without'
in the case of ``with_or_without``)
activation_value (typing.Callable): callable that accepts a single
value. This value is either one of the allowed values for a
multi-valued variant or the name of a bool-valued variant.
Returns the parameter to be used when the value is activated.
The special value "prefix" can also be assigned and will return
The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter.
variant: name of the variant that is being processed (if different from option name)
variant (str): name of the variant that is being processed
(if different from option name)
Examples:
@@ -675,19 +647,19 @@ def _activate_or_not(
.. code-block:: python
variant("foo", values=("x", "y"), description=")
variant("bar", default=True, description=")
variant("ba_z", default=True, description=")
variant('foo', values=('x', 'y'), description='')
variant('bar', default=True, description='')
variant('ba_z', default=True, description='')
calling this function like:
.. code-block:: python
_activate_or_not(
"foo", "with", "without", activation_value="prefix"
'foo', 'with', 'without', activation_value='prefix'
)
_activate_or_not("bar", "with", "without")
_activate_or_not("ba-z", "with", "without", variant="ba_z")
_activate_or_not('bar', 'with', 'without')
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
will generate the following configuration options:
@@ -707,8 +679,8 @@ def _activate_or_not(
Raises:
KeyError: if name is not among known variants
"""
spec: spack.spec.Spec = self.pkg.spec
args: List[str] = []
spec = self.pkg.spec
args = []
if activation_value == "prefix":
activation_value = lambda x: spec[x].prefix
@@ -726,7 +698,7 @@ def _activate_or_not(
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
vdef = self.pkg.get_variant(variant)
if set(vdef.values) == set((True, False)): # type: ignore
if set(vdef.values) == set((True, False)):
# BoolValuedVariant carry information about a single option.
# Nonetheless, for uniformity of treatment we'll package them
# in an iterable of one element.
@@ -737,12 +709,14 @@ def _activate_or_not(
# package's build system. It excludes values which have special
# meanings and do not correspond to features (e.g. "none")
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
# For each allowed value in the list of values
for option_value, activated in options:
# Search for an override in the package for this value
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
override_name = "{0}_or_{1}_{2}".format(
activation_word, deactivation_word, option_value
)
line_generator = getattr(self, override_name, None) or getattr(
self.pkg, override_name, None
)
@@ -751,24 +725,19 @@ def _activate_or_not(
def _default_generator(is_activated):
if is_activated:
line = f"--{activation_word}-{option_value}"
line = "--{0}-{1}".format(activation_word, option_value)
if activation_value is not None and activation_value(
option_value
): # NOQA=ignore=E501
line = f"{line}={activation_value(option_value)}"
line += "={0}".format(activation_value(option_value))
return line
return f"--{deactivation_word}-{option_value}"
return "--{0}-{1}".format(deactivation_word, option_value)
line_generator = _default_generator
args.append(line_generator(activated))
return args
def with_or_without(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
def with_or_without(self, name, activation_value=None, variant=None):
"""Inspects a variant and returns the arguments that activate
or deactivate the selected feature(s) for the configure options.
@@ -783,11 +752,12 @@ def with_or_without(
``variant=value`` is in the spec.
Args:
name: name of a valid multi-valued variant
activation_value: callable that accepts a single value and returns the parameter to be
used leading to an entry of the type ``--with-{name}={parameter}``.
name (str): name of a valid multi-valued variant
activation_value (typing.Callable): callable that accepts a single
value and returns the parameter to be used leading to an entry
of the type ``--with-{name}={parameter}``.
The special value "prefix" can also be assigned and will return
The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
@@ -795,22 +765,18 @@ def with_or_without(
"""
return self._activate_or_not(name, "with", "without", activation_value, variant)
def enable_or_disable(
self,
name: str,
activation_value: Optional[Union[Callable, str]] = None,
variant: Optional[str] = None,
) -> List[str]:
def enable_or_disable(self, name, activation_value=None, variant=None):
"""Same as
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
Args:
name: name of a valid multi-valued variant
activation_value: if present accepts a single value and returns the parameter to be
used leading to an entry of the type ``--enable-{name}={parameter}``
name (str): name of a valid multi-valued variant
activation_value (typing.Callable): if present accepts a single value
and returns the parameter to be used leading to an entry of the
type ``--enable-{name}={parameter}``
The special value "prefix" can also be assigned and will return
The special value 'prefix' can also be assigned and will return
``spec[name].prefix`` as activation parameter.
Returns:
@@ -818,15 +784,15 @@ def enable_or_disable(
"""
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self) -> None:
def installcheck(self):
"""Run "make" on the ``installcheck`` target, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("installcheck")
@spack.phase_callbacks.run_after("install")
def remove_libtool_archives(self) -> None:
@spack.builder.run_after("install")
def remove_libtool_archives(self):
"""Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False.
"""
@@ -838,7 +804,7 @@ def remove_libtool_archives(self) -> None:
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
with open(self._removed_la_files_log, mode="w") as f:
f.write("\n".join(libtool_files))
def setup_build_environment(self, env):
@@ -848,13 +814,12 @@ def setup_build_environment(self, env):
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
dirs_seen: Set[Tuple[int, int]] = set()
flags_spack: List[str] = []
flags_external: List[str] = []
def _autoreconf_search_path_args(spec):
dirs_seen = set()
flags_spack, flags_external = [], []
# We don't want to add an include flag for automake's default search path.
for automake in spec.dependencies(name="automake", deptype="build"):

View File

@@ -10,7 +10,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.phase_callbacks
import spack.builder
from .cmake import CMakeBuilder, CMakePackage
@@ -192,10 +192,7 @@ def initconfig_mpi_entries(self):
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
# not all MPIs have Fortran wrappers
if hasattr(spec["mpi"], "mpifc"):
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
# Check for slurm
using_slurm = False
@@ -324,7 +321,7 @@ def initconfig(self, pkg, spec, prefix):
+ self.initconfig_package_entries()
)
with open(self.cache_name, "w", encoding="utf-8") as f:
with open(self.cache_name, "w") as f:
for entry in cache_entries:
f.write("%s\n" % entry)
f.write("\n")
@@ -335,7 +332,7 @@ def std_cmake_args(self):
args.extend(["-C", self.cache_path])
return args
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def install_cmake_cache(self):
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)

View File

@@ -7,11 +7,10 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on
from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests
from ._checks import BaseBuilder, execute_install_time_tests
class CargoPackage(spack.package_base.PackageBase):
@@ -28,7 +27,7 @@ class CargoPackage(spack.package_base.PackageBase):
@spack.builder.builder("cargo")
class CargoBuilder(BuilderWithDefaults):
class CargoBuilder(BaseBuilder):
"""The Cargo builder encodes the most common way of building software with
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
@@ -78,7 +77,7 @@ def install(self, pkg, spec, prefix):
with fs.working_dir(self.build_directory):
fs.install_tree("out", prefix)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)
def check(self):
"""Run "cargo test"."""

View File

@@ -9,7 +9,7 @@
import re
import sys
from itertools import chain
from typing import Any, List, Optional, Tuple
from typing import List, Optional, Set, Tuple
import llnl.util.filesystem as fs
from llnl.util.lang import stable_partition
@@ -18,15 +18,11 @@
import spack.deptypes as dt
import spack.error
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack import traverse
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
from spack.util.environment import filter_system_paths
from ._checks import BuilderWithDefaults, execute_build_time_tests
from ._checks import BaseBuilder, execute_build_time_tests
# Regex to extract the primary generator from the CMake generator
# string.
@@ -52,9 +48,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
python_executable = pkg.spec["python"].command.path
args.extend(
[
define("PYTHON_EXECUTABLE", python_executable),
define("Python_EXECUTABLE", python_executable),
define("Python3_EXECUTABLE", python_executable),
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
]
)
@@ -89,7 +85,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
ipo = False
if cmake.satisfies("@3.9:"):
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
# find_package may search there. This is not what we want.
@@ -97,36 +93,30 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
# Do not populate CMake User Package Registry
if cmake.satisfies("@3.15:"):
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
elif cmake.satisfies("@3.1:"):
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
# Do not use CMake User/System Package Registry
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
if cmake.satisfies("@3.16:"):
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
elif cmake.satisfies("@3.1:3.15"):
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
# Export a compilation database if supported.
if _supports_compilation_databases(pkg):
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
# Disable find package's config mode for versions of Boost that
# didn't provide it. See https://github.com/spack/spack/issues/20169
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
if pkg.spec.satisfies("^boost@:1.69.0"):
args.append(define("Boost_NO_BOOST_CMAKE", True))
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
def generator(*names: str, default: Optional[str] = None) -> None:
def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use.
See ``cmake --help`` for a list of valid generators.
@@ -167,18 +157,15 @@ def _values(x):
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
attribute of direct build/test and transitive link dependencies."""
edges = traverse.traverse_topo_edges_generator(
traverse.with_artificial_edges([pkg.spec]),
visitor=traverse.MixedDepthVisitor(
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
),
key=traverse.by_dag_hash,
root=False,
all_edges=False, # cover all nodes, not all edges
)
ordered_specs = [edge.spec for edge in edges]
# Add direct build/test deps
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
# Add transitive link deps
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
# Separate out externals so they do not shadow Spack prefixes
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
externals, spack_built = stable_partition(
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
lambda x: x.external,
)
return filter_system_paths(
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
@@ -276,15 +263,15 @@ def flags_to_build_system_args(self, flags):
# Legacy methods (used by too many packages to change them,
# need to forward to the builder)
def define(self, cmake_var: str, value: Any) -> str:
return define(cmake_var, value)
def define(self, *args, **kwargs):
return self.builder.define(*args, **kwargs)
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
return define_from_variant(self, cmake_var, variant)
def define_from_variant(self, *args, **kwargs):
return self.builder.define_from_variant(*args, **kwargs)
@spack.builder.builder("cmake")
class CMakeBuilder(BuilderWithDefaults):
class CMakeBuilder(BaseBuilder):
"""The cmake builder encodes the default way of building software with CMake. IT
has three phases that can be overridden:
@@ -334,15 +321,15 @@ class CMakeBuilder(BuilderWithDefaults):
build_time_test_callbacks = ["check"]
@property
def archive_files(self) -> List[str]:
def archive_files(self):
"""Files to archive for packages based on CMake"""
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
if _supports_compilation_databases(self.pkg):
if _supports_compilation_databases(self):
files.append(os.path.join(self.build_directory, "compile_commands.json"))
return files
@property
def root_cmakelists_dir(self) -> str:
def root_cmakelists_dir(self):
"""The relative path to the directory containing CMakeLists.txt
This path is relative to the root of the extracted tarball,
@@ -351,17 +338,16 @@ def root_cmakelists_dir(self) -> str:
return self.pkg.stage.source_path
@property
def generator(self) -> str:
def generator(self):
if self.spec.satisfies("generator=make"):
return "Unix Makefiles"
if self.spec.satisfies("generator=ninja"):
return "Ninja"
raise ValueError(
f'{self.spec.format()} has an unsupported value for the "generator" variant'
)
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
raise ValueError(msg)
@property
def std_cmake_args(self) -> List[str]:
def std_cmake_args(self):
"""Standard cmake arguments provided as a property for
convenience of package writers
"""
@@ -370,9 +356,7 @@ def std_cmake_args(self) -> List[str]:
return args
@staticmethod
def std_args(
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
) -> List[str]:
def std_args(pkg, generator=None):
"""Computes the standard cmake arguments for a generic package"""
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
generator = generator or default_generator
@@ -389,6 +373,7 @@ def std_args(
except KeyError:
build_type = "RelWithDebInfo"
define = CMakeBuilder.define
args = [
"-G",
generator,
@@ -420,31 +405,152 @@ def std_args(
return args
@staticmethod
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
return define_cuda_architectures(pkg)
def define_cuda_architectures(pkg):
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
``cuda_arch`` is variant composed of a list of target CUDA architectures and
it is declared in the cuda package.
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
"""
cmake_flag = str()
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
cmake_flag = CMakeBuilder.define(
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
)
return cmake_flag
@staticmethod
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
return define_hip_architectures(pkg)
def define_hip_architectures(pkg):
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
``amdgpu_target`` is variant composed of a list of the target HIP
architectures and it is declared in the rocm package.
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
not set.
"""
cmake_flag = str()
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
cmake_flag = CMakeBuilder.define(
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
)
return cmake_flag
@staticmethod
def define(cmake_var: str, value: Any) -> str:
return define(cmake_var, value)
def define(cmake_var, value):
"""Return a CMake command line argument that defines a variable.
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
return define_from_variant(self.pkg, cmake_var, variant)
The resulting argument will convert boolean values to OFF/ON
and lists/tuples to CMake semicolon-separated string lists. All other
values will be interpreted as strings.
Examples:
.. code-block:: python
[define('BUILD_SHARED_LIBS', True),
define('CMAKE_CXX_STANDARD', 14),
define('swr', ['avx', 'avx2'])]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
"""
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
kind = "BOOL"
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)
return "".join(["-D", cmake_var, ":", kind, "=", value])
def define_from_variant(self, cmake_var, variant=None):
"""Return a CMake command line argument from the given variant's value.
The optional ``variant`` argument defaults to the lower-case transform
of ``cmake_var``.
This utility function is similar to
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
Examples:
Given a package with:
.. code-block:: python
variant('cxxstd', default='11', values=('11', '14'),
multi=False, description='')
variant('shared', default=True, description='')
variant('swr', values=any_combination_of('avx', 'avx2'),
description='')
calling this function like:
.. code-block:: python
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
self.define_from_variant('SWR')]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
Note: if the provided variant is conditional, and the condition is not met,
this function returns an empty string. CMake discards empty strings
provided on the command line.
"""
if variant is None:
variant = cmake_var.lower()
if not self.pkg.has_variant(variant):
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.pkg.spec.variants:
return ""
value = self.pkg.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
value = sorted(value)
return self.define(cmake_var, value)
@property
def build_dirname(self) -> str:
def build_dirname(self):
"""Directory name to use when building the package."""
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
@property
def build_directory(self) -> str:
def build_directory(self):
"""Full-path to the directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname)
def cmake_args(self) -> List[str]:
def cmake_args(self):
"""List of all the arguments that must be passed to cmake, except:
* CMAKE_INSTALL_PREFIX
@@ -454,12 +560,7 @@ def cmake_args(self) -> List[str]:
"""
return []
def cmake(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def cmake(self, pkg, spec, prefix):
"""Runs ``cmake`` in the build directory"""
# skip cmake phase if it is an incremental develop build
@@ -474,12 +575,7 @@ def cmake(
with fs.working_dir(self.build_directory, create=True):
pkg.module.cmake(*options)
def build(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def build(self, pkg, spec, prefix):
"""Make the build targets"""
with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles":
@@ -488,12 +584,7 @@ def build(
self.build_targets.append("-v")
pkg.module.ninja(*self.build_targets)
def install(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def install(self, pkg, spec, prefix):
"""Make the install targets"""
with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles":
@@ -501,9 +592,9 @@ def install(
elif self.generator == "Ninja":
pkg.module.ninja(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None:
def check(self):
"""Search the CMake-generated files for the targets ``test`` and ``check``,
and runs them if found.
"""
@@ -514,133 +605,3 @@ def check(self) -> None:
elif self.generator == "Ninja":
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
self.pkg._if_ninja_target_execute("check")
def define(cmake_var: str, value: Any) -> str:
"""Return a CMake command line argument that defines a variable.
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
semicolon-separated string lists. All other values will be interpreted as strings.
Examples:
.. code-block:: python
[define("BUILD_SHARED_LIBS", True),
define("CMAKE_CXX_STANDARD", 14),
define("swr", ["avx", "avx2"])]
will generate the following configuration options:
.. code-block:: console
["-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2]
"""
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
kind = "BOOL"
value = "ON" if value else "OFF"
else:
kind = "STRING"
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)
return "".join(["-D", cmake_var, ":", kind, "=", value])
def define_from_variant(
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
) -> str:
"""Return a CMake command line argument from the given variant's value.
The optional ``variant`` argument defaults to the lower-case transform
of ``cmake_var``.
Examples:
Given a package with:
.. code-block:: python
variant("cxxstd", default="11", values=("11", "14"),
multi=False, description="")
variant("shared", default=True, description="")
variant("swr", values=any_combination_of("avx", "avx2"),
description="")
calling this function like:
.. code-block:: python
[
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
self.define_from_variant("SWR"),
]
will generate the following configuration options:
.. code-block:: console
[
"-DBUILD_SHARED_LIBS:BOOL=ON",
"-DCMAKE_CXX_STANDARD:STRING=14",
"-DSWR:STRING=avx;avx2",
]
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
Note: if the provided variant is conditional, and the condition is not met, this function
returns an empty string. CMake discards empty strings provided on the command line.
"""
if variant is None:
variant = cmake_var.lower()
if not pkg.has_variant(variant):
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
if variant not in pkg.spec.variants:
return ""
value = pkg.spec.variants[variant].value
if isinstance(value, (tuple, list)):
# Sort multi-valued variants for reproducibility
value = sorted(value)
return define(cmake_var, value)
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
``amdgpu_target`` is variant composed of a list of the target HIP
architectures and it is declared in the rocm package.
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
not set.
"""
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
return ""
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
``cuda_arch`` is variant composed of a list of target CUDA architectures and
it is declared in the cuda package.
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
"""
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
return ""

View File

@@ -180,6 +180,13 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
@@ -205,6 +212,9 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")

View File

@@ -7,9 +7,8 @@
import spack.builder
import spack.directives
import spack.package_base
import spack.phase_callbacks
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
class Package(spack.package_base.PackageBase):
@@ -27,7 +26,7 @@ class Package(spack.package_base.PackageBase):
@spack.builder.builder("generic")
class GenericBuilder(BuilderWithDefaults):
class GenericBuilder(BaseBuilder):
"""A builder for a generic build system, that require packagers
to implement an "install" phase.
"""
@@ -45,7 +44,7 @@ class GenericBuilder(BuilderWithDefaults):
install_time_test_callbacks = []
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
# unconditionally perform any post-install phase tests
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -7,11 +7,10 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, extends
from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_install_time_tests
from ._checks import BaseBuilder, execute_install_time_tests
class GoPackage(spack.package_base.PackageBase):
@@ -33,7 +32,7 @@ class GoPackage(spack.package_base.PackageBase):
@spack.builder.builder("go")
class GoBuilder(BuilderWithDefaults):
class GoBuilder(BaseBuilder):
"""The Go builder encodes the most common way of building software with
a golang go.mod file. It has two phases that can be overridden, if need be:
@@ -100,7 +99,7 @@ def install(self, pkg, spec, prefix):
fs.mkdirp(prefix.bin)
fs.install(pkg.name, prefix.bin)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)
def check(self):
"""Run ``go test .`` in the source directory"""

View File

@@ -22,8 +22,8 @@
install,
)
import spack.builder
import spack.error
import spack.phase_callbacks
from spack.build_environment import dso_suffix
from spack.error import InstallError
from spack.util.environment import EnvironmentModifications
@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
# The file will have been created upon self.license_required AND
# self.license_files having been populated, so the "if" is usually
# true by the time the present function runs; ../hooks/licensing.py
with open(f, encoding="utf-8") as fh:
with open(f) as fh:
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
license_type = {
"ACTIVATION_TYPE": "license_file",
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
debug_print(license_type)
return license_type
@spack.phase_callbacks.run_before("install")
@spack.builder.run_before("install")
def configure(self):
"""Generates the silent.cfg file to pass to installer.sh.
@@ -1185,7 +1185,7 @@ def configure(self):
# our configuration accordingly. We can do this because the tokens are
# quite long and specific.
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
validator_code = open("pset/check.awk", "r").read()
# Let's go a little further and distill the tokens (plus some noise).
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
@@ -1222,7 +1222,7 @@ def configure(self):
config_draft.update(self._determine_license_type)
# Write sorted *by token* so the file looks less like a hash dump.
f = open("silent.cfg", "w", encoding="utf-8")
f = open("silent.cfg", "w")
for token, value in sorted(config_draft.items()):
if token in tokenlike_words:
f.write("%s=%s\n" % (token, value))
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
for f in glob.glob("%s/intel*log" % tmpdir):
install(f, dst)
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def validate_install(self):
# Sometimes the installer exits with an error but doesn't pass a
# non-zero exit code to spack. Check for the existence of a 'bin'
@@ -1258,7 +1258,7 @@ def validate_install(self):
if not os.path.exists(self.prefix.bin):
raise InstallError("The installer has failed to install anything.")
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def configure_rpath(self):
if "+rpath" not in self.spec:
return
@@ -1273,10 +1273,10 @@ def configure_rpath(self):
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
compiler_cfg = os.path.abspath(f + ".cfg")
with open(compiler_cfg, "w", encoding="utf-8") as fh:
with open(compiler_cfg, "w") as fh:
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def configure_auto_dispatch(self):
if self._has_compilers:
if "auto_dispatch=none" in self.spec:
@@ -1297,10 +1297,10 @@ def configure_auto_dispatch(self):
ad.append(x)
compiler_cfg = os.path.abspath(f + ".cfg")
with open(compiler_cfg, "a", encoding="utf-8") as fh:
with open(compiler_cfg, "a") as fh:
fh.write("-ax{0}\n".format(",".join(ad)))
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def filter_compiler_wrappers(self):
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
bin_dir = self.component_bin_dir("mpi")
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
f = os.path.join(bin_dir, f)
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def uninstall_ism(self):
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
debug_print(d)
return d
@spack.phase_callbacks.run_after("install")
@spack.builder.run_after("install")
def modify_LLVMgold_rpath(self):
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.

View File

@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
table_entries.append(self._generate_tree_line(d.name, d.prefix))
path = self._luarocks_config_path()
with open(path, "w", encoding="utf-8") as config:
with open(path, "w") as config:
config.write(
"""
deps_mode="all"

View File

@@ -8,14 +8,11 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when
from ._checks import (
BuilderWithDefaults,
BaseBuilder,
apply_macos_rpath_fixups,
execute_build_time_tests,
execute_install_time_tests,
@@ -39,7 +36,7 @@ class MakefilePackage(spack.package_base.PackageBase):
@spack.builder.builder("makefile")
class MakefileBuilder(BuilderWithDefaults):
class MakefileBuilder(BaseBuilder):
"""The Makefile builder encodes the most common way of building software with
Makefiles. It has three phases that can be overridden, if need be:
@@ -94,50 +91,35 @@ class MakefileBuilder(BuilderWithDefaults):
install_time_test_callbacks = ["installcheck"]
@property
def build_directory(self) -> str:
def build_directory(self):
"""Return the directory containing the main Makefile."""
return self.pkg.stage.source_path
def edit(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def edit(self, pkg, spec, prefix):
"""Edit the Makefile before calling make. The default is a no-op."""
pass
def build(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.build_targets)
def install(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None:
def check(self):
"""Run "make" on the ``test`` and ``check`` targets, if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_make_target_execute("test")
self.pkg._if_make_target_execute("check")
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)
def installcheck(self) -> None:
def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target
and runs it if found.
"""
@@ -145,4 +127,4 @@ def installcheck(self) -> None:
self.pkg._if_make_target_execute("installcheck")
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)

View File

@@ -10,7 +10,7 @@
from spack.multimethod import when
from spack.util.executable import which
from ._checks import BuilderWithDefaults
from ._checks import BaseBuilder
class MavenPackage(spack.package_base.PackageBase):
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
@spack.builder.builder("maven")
class MavenBuilder(BuilderWithDefaults):
class MavenBuilder(BaseBuilder):
"""The Maven builder encodes the default way to build software with Maven.
It has two phases that can be overridden, if need be:

View File

@@ -9,13 +9,10 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
from ._checks import BuilderWithDefaults, execute_build_time_tests
from ._checks import BaseBuilder, execute_build_time_tests
class MesonPackage(spack.package_base.PackageBase):
@@ -65,7 +62,7 @@ def flags_to_build_system_args(self, flags):
@spack.builder.builder("meson")
class MesonBuilder(BuilderWithDefaults):
class MesonBuilder(BaseBuilder):
"""The Meson builder encodes the default way to build software with Meson.
The builder has three phases that can be overridden, if need be:
@@ -115,7 +112,7 @@ def archive_files(self):
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
@property
def root_mesonlists_dir(self) -> str:
def root_mesonlists_dir(self):
"""Relative path to the directory containing meson.build
This path is relative to the root of the extracted tarball,
@@ -124,7 +121,7 @@ def root_mesonlists_dir(self) -> str:
return self.pkg.stage.source_path
@property
def std_meson_args(self) -> List[str]:
def std_meson_args(self):
"""Standard meson arguments provided as a property for convenience
of package writers.
"""
@@ -135,7 +132,7 @@ def std_meson_args(self) -> List[str]:
return std_meson_args
@staticmethod
def std_args(pkg) -> List[str]:
def std_args(pkg):
"""Standard meson arguments for a generic package."""
try:
build_type = pkg.spec.variants["buildtype"].value
@@ -175,7 +172,7 @@ def build_directory(self):
"""Directory to use when building the package."""
return os.path.join(self.pkg.stage.path, self.build_dirname)
def meson_args(self) -> List[str]:
def meson_args(self):
"""List of arguments that must be passed to meson, except:
* ``--prefix``
@@ -188,12 +185,7 @@ def meson_args(self) -> List[str]:
"""
return []
def meson(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def meson(self, pkg, spec, prefix):
"""Run ``meson`` in the build directory"""
options = []
if self.spec["meson"].satisfies("@0.64:"):
@@ -204,31 +196,21 @@ def meson(
with fs.working_dir(self.build_directory, create=True):
pkg.module.meson(*options)
def build(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def build(self, pkg, spec, prefix):
"""Make the build targets"""
options = ["-v"]
options += self.build_targets
with fs.working_dir(self.build_directory):
pkg.module.ninja(*options)
def install(
self,
pkg: spack.package_base.PackageBase,
spec: spack.spec.Spec,
prefix: spack.util.prefix.Prefix,
) -> None:
def install(self, pkg, spec, prefix):
"""Make the install targets"""
with fs.working_dir(self.build_directory):
pkg.module.ninja(*self.install_targets)
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def check(self) -> None:
def check(self):
"""Search Meson-generated files for the target ``test`` and run it if found."""
with fs.working_dir(self.build_directory):
self.pkg._if_ninja_target_execute("test")

View File

@@ -10,7 +10,7 @@
import spack.package_base
from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults
from ._checks import BaseBuilder
class MSBuildPackage(spack.package_base.PackageBase):
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
@spack.builder.builder("msbuild")
class MSBuildBuilder(BuilderWithDefaults):
class MSBuildBuilder(BaseBuilder):
"""The MSBuild builder encodes the most common way of building software with
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:

View File

@@ -10,7 +10,7 @@
import spack.package_base
from spack.directives import build_system, conflicts
from ._checks import BuilderWithDefaults
from ._checks import BaseBuilder
class NMakePackage(spack.package_base.PackageBase):
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
@spack.builder.builder("nmake")
class NMakeBuilder(BuilderWithDefaults):
class NMakeBuilder(BaseBuilder):
"""The NMake builder encodes the most common way of building software with
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:

View File

@@ -7,7 +7,7 @@
from spack.directives import build_system, extends
from spack.multimethod import when
from ._checks import BuilderWithDefaults
from ._checks import BaseBuilder
class OctavePackage(spack.package_base.PackageBase):
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
@spack.builder.builder("octave")
class OctaveBuilder(BuilderWithDefaults):
class OctaveBuilder(BaseBuilder):
"""The octave builder provides the following phases that can be overridden:
1. :py:meth:`~.OctaveBuilder.install`

View File

@@ -32,9 +32,6 @@ class IntelOneApiPackage(Package):
# organization (e.g. University/Company).
redistribute(source=False, binary=False)
# contains precompiled binaries without rpaths
unresolved_libraries = ["*"]
for c in [
"target=ppc64:",
"target=ppc64le:",
@@ -258,7 +255,7 @@ def libs(self):
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
"""Base class for Intel oneAPI library packages with SDK components.
Contains some convenient default implementations for libraries

View File

@@ -10,12 +10,11 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, extends
from spack.install_test import SkipTest, test_part
from spack.util.executable import Executable
from ._checks import BuilderWithDefaults, execute_build_time_tests
from ._checks import BaseBuilder, execute_build_time_tests
class PerlPackage(spack.package_base.PackageBase):
@@ -85,7 +84,7 @@ def test_use(self):
@spack.builder.builder("perl")
class PerlBuilder(BuilderWithDefaults):
class PerlBuilder(BaseBuilder):
"""The perl builder provides four phases that can be overridden, if required:
1. :py:meth:`~.PerlBuilder.configure`
@@ -164,7 +163,7 @@ def configure(self, pkg, spec, prefix):
# Build.PL may be too long causing the build to fail. Patching the shebang
# does not happen until after install so set '/usr/bin/env perl' here in
# the Build script.
@spack.phase_callbacks.run_after("configure")
@spack.builder.run_after("configure")
def fix_shebang(self):
if self.build_method == "Build.PL":
pattern = "#!{0}".format(self.spec["perl"].command.path)
@@ -176,7 +175,7 @@ def build(self, pkg, spec, prefix):
self.build_executable()
# Ensure that tests run after build (if requested):
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def check(self):
"""Runs built-in tests of a Perl package."""

View File

@@ -24,7 +24,6 @@
import spack.detection
import spack.multimethod
import spack.package_base
import spack.phase_callbacks
import spack.platforms
import spack.repo
import spack.spec
@@ -35,7 +34,7 @@
from spack.spec import Spec
from spack.util.prefix import Prefix
from ._checks import BuilderWithDefaults, execute_install_time_tests
from ._checks import BaseBuilder, execute_install_time_tests
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
@@ -375,7 +374,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
return None
@property
def python_spec(self) -> Spec:
def python_spec(self):
"""Get python-venv if it exists or python otherwise."""
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
return python
@@ -426,7 +425,7 @@ def libs(self) -> LibraryList:
@spack.builder.builder("python_pip")
class PythonPipBuilder(BuilderWithDefaults):
class PythonPipBuilder(BaseBuilder):
phases = ("install",)
#: Names associated with package methods in the old build-system format
@@ -544,4 +543,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
with fs.working_dir(self.build_directory):
pip(*args)
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,10 +6,9 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests
from ._checks import BaseBuilder, execute_build_time_tests
class QMakePackage(spack.package_base.PackageBase):
@@ -31,7 +30,7 @@ class QMakePackage(spack.package_base.PackageBase):
@spack.builder.builder("qmake")
class QMakeBuilder(BuilderWithDefaults):
class QMakeBuilder(BaseBuilder):
"""The qmake builder provides three phases that can be overridden:
1. :py:meth:`~.QMakeBuilder.qmake`
@@ -82,4 +81,4 @@ def check(self):
with working_dir(self.build_directory):
self.pkg._if_make_target_execute("check")
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -8,7 +8,7 @@
import spack.package_base
from spack.directives import build_system, extends, maintainers
from ._checks import BuilderWithDefaults
from ._checks import BaseBuilder
class RubyPackage(spack.package_base.PackageBase):
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
@spack.builder.builder("ruby")
class RubyBuilder(BuilderWithDefaults):
class RubyBuilder(BaseBuilder):
"""The Ruby builder provides two phases that can be overridden if required:
#. :py:meth:`~.RubyBuilder.build`

View File

@@ -4,10 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests
from ._checks import BaseBuilder, execute_build_time_tests
class SConsPackage(spack.package_base.PackageBase):
@@ -29,7 +28,7 @@ class SConsPackage(spack.package_base.PackageBase):
@spack.builder.builder("scons")
class SConsBuilder(BuilderWithDefaults):
class SConsBuilder(BaseBuilder):
"""The Scons builder provides the following phases that can be overridden:
1. :py:meth:`~.SConsBuilder.build`
@@ -80,4 +79,4 @@ def build_test(self):
"""
pass
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -11,12 +11,11 @@
import spack.builder
import spack.install_test
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on, extends
from spack.multimethod import when
from spack.util.executable import Executable
from ._checks import BuilderWithDefaults, execute_install_time_tests
from ._checks import BaseBuilder, execute_install_time_tests
class SIPPackage(spack.package_base.PackageBase):
@@ -104,7 +103,7 @@ def test_imports(self):
@spack.builder.builder("sip")
class SIPBuilder(BuilderWithDefaults):
class SIPBuilder(BaseBuilder):
"""The SIP builder provides the following phases that can be overridden:
* configure
@@ -171,4 +170,4 @@ def install_args(self):
"""Arguments to pass to install."""
return []
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,10 +6,9 @@
import spack.builder
import spack.package_base
import spack.phase_callbacks
from spack.directives import build_system, depends_on
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
class WafPackage(spack.package_base.PackageBase):
@@ -31,7 +30,7 @@ class WafPackage(spack.package_base.PackageBase):
@spack.builder.builder("waf")
class WafBuilder(BuilderWithDefaults):
class WafBuilder(BaseBuilder):
"""The WAF builder provides the following phases that can be overridden:
* configure
@@ -137,7 +136,7 @@ def build_test(self):
"""
pass
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
spack.builder.run_after("build")(execute_build_time_tests)
def install_test(self):
"""Run unit tests after install.
@@ -147,4 +146,4 @@ def install_test(self):
"""
pass
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -6,30 +6,44 @@
import collections.abc
import copy
import functools
from typing import Dict, List, Optional, Tuple, Type
from typing import List, Optional, Tuple
from llnl.util import lang
import spack.error
import spack.multimethod
import spack.package_base
import spack.phase_callbacks
import spack.repo
import spack.spec
import spack.util.environment
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
BUILDER_CLS = {}
#: An object of this kind is a shared global state used to collect callbacks during
#: class definition time, and is flushed when the class object is created at the end
#: of the class definition
#:
#: Args:
#: attribute_name (str): name of the attribute that will be attached to the builder
#: callbacks (list): container used to temporarily aggregate the callbacks
CallbackTemporaryStage = collections.namedtuple(
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
)
#: Shared global state to aggregate "@run_before" callbacks
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
#: Shared global state to aggregate "@run_after" callbacks
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
#: Map id(pkg) to a builder, to avoid creating multiple
#: builders for the same package object.
_BUILDERS: Dict[int, "Builder"] = {}
_BUILDERS = {}
def builder(build_system_name: str):
def builder(build_system_name):
"""Class decorator used to register the default builder
for a given build-system.
Args:
build_system_name: name of the build-system
build_system_name (str): name of the build-system
"""
def _decorator(cls):
@@ -40,9 +54,13 @@ def _decorator(cls):
return _decorator
def create(pkg: spack.package_base.PackageBase) -> "Builder":
"""Given a package object with an associated concrete spec, return the builder object that can
install it."""
def create(pkg):
"""Given a package object with an associated concrete spec,
return the builder object that can install it.
Args:
pkg (spack.package_base.PackageBase): package for which we want the builder
"""
if id(pkg) not in _BUILDERS:
_BUILDERS[id(pkg)] = _create(pkg)
return _BUILDERS[id(pkg)]
@@ -57,7 +75,7 @@ def __call__(self, spec, prefix):
return self.phase_fn(self.builder.pkg, spec, prefix)
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
def get_builder_class(pkg, name: str) -> Optional[type]:
"""Return the builder class if a package module defines it."""
cls = getattr(pkg.module, name, None)
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
@@ -65,7 +83,7 @@ def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
return None
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
def _create(pkg):
"""Return a new builder object for the package object being passed as argument.
The function inspects the build-system used by the package object and try to:
@@ -85,7 +103,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
to look for build-related methods in the ``*Package``.
Args:
pkg: package object for which we need a builder
pkg (spack.package_base.PackageBase): package object for which we need a builder
"""
package_buildsystem = buildsystem_name(pkg)
default_builder_cls = BUILDER_CLS[package_buildsystem]
@@ -150,8 +168,8 @@ def __forward(self, *args, **kwargs):
# with the same name is defined in the Package, it will override this definition
# (when _ForwardToBaseBuilder is initialized)
for method_name in (
base_cls.phases # type: ignore
+ base_cls.legacy_methods # type: ignore
base_cls.phases
+ base_cls.legacy_methods
+ getattr(base_cls, "legacy_long_methods", tuple())
+ ("setup_build_environment", "setup_dependent_build_environment")
):
@@ -163,14 +181,14 @@ def __forward(self):
return __forward
for attribute_name in base_cls.legacy_attributes: # type: ignore
for attribute_name in base_cls.legacy_attributes:
setattr(
_ForwardToBaseBuilder,
attribute_name,
property(forward_property_to_getattr(attribute_name)),
)
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
def __init__(self, pkg):
# Deal with custom phases in packages here
if hasattr(pkg, "phases"):
@@ -195,18 +213,99 @@ def setup_dependent_build_environment(self, env, dependent_spec):
return Adapter(pkg)
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
def buildsystem_name(pkg):
"""Given a package object with an associated concrete spec,
return the name of its build system."""
return the name of its build system.
Args:
pkg (spack.package_base.PackageBase): package for which we want
the build system name
"""
try:
return pkg.spec.variants["build_system"].value
except KeyError:
# We are reading an old spec without the build_system variant
return pkg.legacy_buildsystem # type: ignore
return pkg.legacy_buildsystem
class PhaseCallbacksMeta(type):
"""Permit to register arbitrary functions during class definition and run them
later, before or after a given install phase.
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
stored in a global shared state when a class being defined is parsed by the Python
interpreter. At class definition time that temporary storage gets flushed and a list
of callbacks is attached to the class being defined.
"""
def __new__(mcs, name, bases, attr_dict):
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
staged_callbacks = temporary_stage.callbacks
# Here we have an adapter from an old-style package. This means there is no
# hierarchy of builders, and every callback that had to be combined between
# *Package and *Builder has been combined already by _PackageAdapterMeta
if name == "Adapter":
continue
# If we are here we have callbacks. To get a complete list, we accumulate all the
# callbacks from base classes, we deduplicate them, then prepend what we have
# registered here.
#
# The order should be:
# 1. Callbacks are registered in order within the same class
# 2. Callbacks defined in derived classes precede those defined in base
# classes
callbacks_from_base = []
for base in bases:
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
if not current_callbacks:
continue
callbacks_from_base.extend(current_callbacks)
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
# Set the callbacks in this class and flush the temporary stage
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
del temporary_stage.callbacks[:]
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
@staticmethod
def run_after(phase, when=None):
"""Decorator to register a function for running after a given phase.
Args:
phase (str): phase after which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_AFTER.callbacks.append(item)
return fn
return _decorator
@staticmethod
def run_before(phase, when=None):
"""Decorator to register a function for running before a given phase.
Args:
phase (str): phase before which the function must run.
when (str): condition under which the function is run (if None, it is always run).
"""
def _decorator(fn):
key = (phase, when)
item = (key, fn)
_RUN_BEFORE.callbacks.append(item)
return fn
return _decorator
class BuilderMeta(
spack.phase_callbacks.PhaseCallbacksMeta,
PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore
):
@@ -301,12 +400,8 @@ def __new__(mcs, name, bases, attr_dict):
)
combine_callbacks = _PackageAdapterMeta.combine_callbacks
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
spack.phase_callbacks._RUN_BEFORE.attribute_name
)
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
spack.phase_callbacks._RUN_AFTER.attribute_name
)
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
@@ -326,8 +421,8 @@ def __init__(self, name, builder):
self.name = name
self.builder = builder
self.phase_fn = self._select_phase_fn()
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
def _make_callbacks(self, callbacks_attribute):
result = []
@@ -388,103 +483,15 @@ def copy(self):
return copy.deepcopy(self)
class BaseBuilder(metaclass=BuilderMeta):
"""An interface for builders, without any phases defined. This class is exposed in the package
API, so that packagers can create a single class to define ``setup_build_environment`` and
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
"""A builder is a class that, given a package object (i.e. associated with
concrete spec), knows how to install it.
Example:
The builder behaves like a sequence, and when iterated over return the
"phases" of the installation in the correct order.
.. code-block:: python
class AnyBuilder(BaseBuilder):
@run_after("install")
def fixup_install(self):
# do something after the package is installed
pass
def setup_build_environment(self, env):
env.set("MY_ENV_VAR", "my_value")
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
pass
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
pass
"""
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
self.pkg = pkg
@property
def spec(self) -> spack.spec.Spec:
return self.pkg.spec
@property
def stage(self):
return self.pkg.stage
@property
def prefix(self):
return self.pkg.prefix
def setup_build_environment(
self, env: spack.util.environment.EnvironmentModifications
) -> None:
"""Sets up the build environment for a package.
This method will be called before the current package prefix exists in
Spack's store.
Args:
env: environment modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
if not hasattr(super(), "setup_build_environment"):
return
super().setup_build_environment(env) # type: ignore
def setup_dependent_build_environment(
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
) -> None:
"""Sets up the build environment of a package that depends on this one.
This is similar to ``setup_build_environment``, but it is used to modify the build
environment of a package that *depends* on this one.
This gives packages the ability to set environment variables for the build of the
dependent, which can be useful to provide search hints for headers or libraries if they are
not in standard locations.
This method will be called before the dependent package prefix exists in Spack's store.
Args:
env: environment modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec: the spec of the dependent package about to be built. This allows the
extendee (self) to query the dependent's state. Note that *this* package's spec is
available as ``self.spec``
"""
if not hasattr(super(), "setup_dependent_build_environment"):
return
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
def __repr__(self):
fmt = "{name}{/hash:7}"
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
def __str__(self):
fmt = "{name}{/hash:7}"
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
class Builder(BaseBuilder, collections.abc.Sequence):
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
knows how to install it.
The builder behaves like a sequence, and when iterated over return the "phases" of the
installation in the correct order.
Args:
pkg (spack.package_base.PackageBase): package object to be built
"""
#: Sequence of phases. Must be defined in derived classes
@@ -499,22 +506,95 @@ class Builder(BaseBuilder, collections.abc.Sequence):
build_time_test_callbacks: List[str]
install_time_test_callbacks: List[str]
#: List of glob expressions. Each expression must either be absolute or relative to the package
#: source path. Matching artifacts found at the end of the build process will be copied in the
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
@property
def archive_files(self) -> List[str]:
return []
#: List of glob expressions. Each expression must either be
#: absolute or relative to the package source path.
#: Matching artifacts found at the end of the build process will be
#: copied in the same directory tree as _spack_build_logfile and
#: _spack_build_envfile.
archive_files: List[str] = []
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
super().__init__(pkg)
def __init__(self, pkg):
self.pkg = pkg
self.callbacks = {}
for phase in self.phases:
self.callbacks[phase] = InstallationPhase(phase, self)
@property
def spec(self):
return self.pkg.spec
@property
def stage(self):
return self.pkg.stage
@property
def prefix(self):
return self.pkg.prefix
def setup_build_environment(self, env):
"""Sets up the build environment for a package.
This method will be called before the current package prefix exists in
Spack's store.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the package is built. Package authors
can call methods on it to alter the build environment.
"""
if not hasattr(super(), "setup_build_environment"):
return
super().setup_build_environment(env)
def setup_dependent_build_environment(self, env, dependent_spec):
"""Sets up the build environment of packages that depend on this one.
This is similar to ``setup_build_environment``, but it is used to
modify the build environments of packages that *depend* on this one.
This gives packages like Python and others that follow the extension
model a way to implement common environment or compile-time settings
for dependencies.
This method will be called before the dependent package prefix exists
in Spack's store.
Examples:
1. Installing python modules generally requires ``PYTHONPATH``
to point to the ``lib/pythonX.Y/site-packages`` directory in the
module's install prefix. This method could be used to set that
variable.
Args:
env (spack.util.environment.EnvironmentModifications): environment
modifications to be applied when the dependent package is built.
Package authors can call methods on it to alter the build environment.
dependent_spec (spack.spec.Spec): the spec of the dependent package
about to be built. This allows the extendee (self) to query
the dependent's state. Note that *this* package's spec is
available as ``self.spec``
"""
if not hasattr(super(), "setup_dependent_build_environment"):
return
super().setup_dependent_build_environment(env, dependent_spec)
def __getitem__(self, idx):
key = self.phases[idx]
return self.callbacks[key]
def __len__(self):
return len(self.phases)
def __repr__(self):
msg = "{0}({1})"
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
def __str__(self):
msg = '"{0}" builder for "{1}"'
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
# Export these names as standalone to be used in packages
run_after = PhaseCallbacksMeta.run_after
run_before = PhaseCallbacksMeta.run_before

2264
lib/spack/spack/ci.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,41 +0,0 @@
# Spack CI generators
This document describes how the ci module can be extended to provide novel
ci generators. The module currently has only a single generator for gitlab.
The unit-tests for the ci module define a small custom generator for testing
purposes as well.
The process of generating a pipeline involves creating a ci-enabled spack
environment, activating it, and running `spack ci generate`, possibly with
arguments describing things like where the output should be written.
Internally pipeline generation is broken into two components: general and
ci platform specific.
## General pipeline functionality
General pipeline functionality includes building a pipeline graph (really,
a forest), pruning it in a variety of ways, and gathering attributes for all
the generated spec build jobs from the spack configuration.
All of the above functionality is defined in the `__init__.py` of the top-level
ci module, and should be roughly the same for pipelines generated for any
platform.
## CI platform specific functionality
Functionality specific to CI platforms (e.g. gitlab, gha, etc.) should be
defined in a dedicated module. In order to define a generator for a new
platform, there are only a few requirements:
1. add a file under `ci` in which you define a generator method decorated with
the `@generator` attribute. .
1. import it from `lib/spack/spack/ci/__init__.py`, so that your new generator
is registered.
1. the generator method must take as arguments PipelineDag, SpackCIConfig,
and PipelineOptions objects, in that order.
1. the generator method must produce an output file containing the
generated pipeline.

File diff suppressed because it is too large Load Diff

View File

@@ -1,825 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import copy
import json
import os
import re
import ssl
import sys
import time
from collections import deque
from enum import Enum
from typing import Dict, Generator, List, Optional, Set, Tuple
from urllib.parse import quote, urlencode, urlparse
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import Singleton, memoized
import spack.binary_distribution as bindist
import spack.config as cfg
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.mirrors.mirror
import spack.schema
import spack.spec
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
from spack import traverse
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp
def _urlopen():
error_handler = web_util.SpackHTTPDefaultErrorHandler()
# One opener with HTTPS ssl enabled
with_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
)
# One opener with HTTPS ssl disabled
without_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
)
# And dynamically dispatch based on the config:verify_ssl.
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
opener = with_ssl if verify_ssl else without_ssl
timeout = timeout or cfg.get("config:connect_timeout", 1)
return opener.open(fullurl, data, timeout)
return dispatch_open
IS_WINDOWS = sys.platform == "win32"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
_dyn_mapping_urlopener = Singleton(_urlopen)
def copy_files_to_artifacts(src, artifacts_dir):
"""
Copy file(s) to the given artifacts directory
Parameters:
src (str): the glob-friendly path expression for the file(s) to copy
artifacts_dir (str): the destination directory
"""
try:
fs.copy(src, artifacts_dir)
except Exception as err:
msg = (
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
f"exception: {str(err)}"
)
tty.warn(msg)
def win_quote(quote_str: str) -> str:
if IS_WINDOWS:
quote_str = f'"{quote_str}"'
return quote_str
def _spec_matches(spec, match_string):
return spec.intersects(match_string)
def _noop(x):
return x
def unpack_script(script_section, op=_noop):
script = []
for cmd in script_section:
if isinstance(cmd, list):
for subcmd in cmd:
script.append(op(subcmd))
else:
script.append(op(cmd))
return script
def ensure_expected_target_path(path: str) -> str:
"""Returns passed paths with all Windows path separators exchanged
for posix separators
TODO (johnwparent): Refactor config + cli read/write to deal only in posix style paths
"""
if path:
return path.replace("\\", "/")
return path
def update_env_scopes(
env: ev.Environment,
cli_scopes: List[str],
output_file: str,
transform_windows_paths: bool = False,
) -> None:
"""Add any config scopes from cli_scopes which aren't already included in the
environment, by reading the yaml, adding the missing includes, and writing the
updated yaml back to the same location.
"""
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
env_yaml_root = syaml.load(env_fd)
# Add config scopes to environment
env_includes = env_yaml_root["spack"].get("include", [])
include_scopes: List[str] = []
for scope in cli_scopes:
if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope)
env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
]
with open(output_file, "w", encoding="utf-8") as fd:
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
"""Write out the file describing specs that should be copied"""
buildcache_copies = {}
for release_spec in specs:
release_spec_dag_hash = release_spec.dag_hash()
# TODO: This assumes signed version of the spec
buildcache_copies[release_spec_dag_hash] = [
{
"src": url_util.join(
src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
"dest": url_util.join(
dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
},
{
"src": url_util.join(
src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
"dest": url_util.join(
dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
},
]
target_dir = os.path.dirname(output_file)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
with open(output_file, "w", encoding="utf-8") as fd:
fd.write(json.dumps(buildcache_copies))
class CDashHandler:
"""
Class for managing CDash data and processing.
"""
def __init__(self, ci_cdash):
# start with the gitlab ci configuration
self.url = ci_cdash.get("url")
self.build_group = ci_cdash.get("build-group")
self.project = ci_cdash.get("project")
self.site = ci_cdash.get("site")
# grab the authorization token when available
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
if self.auth_token:
tty.verbose("Using CDash auth token from environment")
# append runner description to the site if available
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
if runner:
self.site += f" ({runner})"
def args(self):
return [
"--cdash-upload-url",
win_quote(self.upload_url),
"--cdash-build",
win_quote(self.build_name()),
"--cdash-site",
win_quote(self.site),
"--cdash-buildstamp",
win_quote(self.build_stamp),
]
def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
"""Returns the CDash build name.
A name will be generated if the `spec` is provided,
otherwise, the value will be retrieved from the environment
through the `SPACK_CDASH_BUILD_NAME` variable.
Returns: (str) given spec's CDash build name."""
if spec:
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
return build_name
env_build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
tty.debug(f"Using CDash build name ({env_build_name}) from the environment")
return env_build_name
@property # type: ignore
def build_stamp(self):
"""Returns the CDash build stamp.
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
is preferred due to the representation of timestamps; otherwise,
one will be built.
Returns: (str) current CDash build stamp"""
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
if build_stamp:
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
return build_stamp
build_stamp = cdash_build_stamp(self.build_group, time.time())
tty.debug(f"Generated new build stamp ({build_stamp})")
return build_stamp
@property # type: ignore
@memoized
def project_enc(self):
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
encode = urlencode({"project": self.project})
index = encode.find("=") + 1
return encode[index:]
@property
def upload_url(self):
url_format = f"{self.url}/submit.php?project={self.project_enc}"
return url_format
def copy_test_results(self, source, dest):
"""Copy test results to artifacts directory."""
reports = fs.join_path(source, "*_Test*.xml")
copy_files_to_artifacts(reports, dest)
def create_buildgroup(self, opener, headers, url, group_name, group_type):
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code not in [200, 201]:
msg = f"Creating buildgroup failed (response code = {response_code})"
tty.warn(msg)
return None
response_text = response.read()
response_json = json.loads(response_text)
build_group_id = response_json["id"]
return build_group_id
def populate_buildgroup(self, job_names):
url = f"{self.url}/api/v1/buildgroup.php"
headers = {
"Authorization": f"Bearer {self.auth_token}",
"Content-Type": "application/json",
}
opener = build_opener(HTTPHandler)
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
group_id = self.create_buildgroup(
opener, headers, url, f"Latest {self.build_group}", "Latest"
)
if not parent_group_id or not group_id:
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
tty.warn(msg)
return
data = {
"dynamiclist": [
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
for name in job_names
]
}
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in populate_buildgroup"
tty.warn(msg)
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
"""Explicitly report skipping testing of a spec (e.g., it's CI
configuration identifies it as known to have broken tests or
the CI installation failed).
Args:
spec: spec being tested
report_dir: directory where the report will be written
reason: reason the test is being skipped
"""
configuration = CDashConfiguration(
upload_url=self.upload_url,
packages=[spec.name],
build=self.build_name(),
site=self.site,
buildstamp=self.build_stamp,
track=None,
)
reporter = CDash(configuration=configuration)
reporter.test_skipped_report(report_dir, spec, reason)
class PipelineType(Enum):
COPY_ONLY = 1
spack_copy_only = 1
PROTECTED_BRANCH = 2
spack_protected_branch = 2
PULL_REQUEST = 3
spack_pull_request = 3
class PipelineOptions:
"""A container for all pipeline options that can be specified (whether
via cli, config/yaml, or environment variables)"""
def __init__(
self,
env: ev.Environment,
buildcache_destination: spack.mirrors.mirror.Mirror,
artifacts_root: str = "jobs_scratch_dir",
print_summary: bool = True,
output_file: Optional[str] = None,
check_index_only: bool = False,
broken_specs_url: Optional[str] = None,
rebuild_index: bool = True,
untouched_pruning_dependent_depth: Optional[int] = None,
prune_untouched: bool = False,
prune_up_to_date: bool = True,
prune_external: bool = True,
stack_name: Optional[str] = None,
pipeline_type: Optional[PipelineType] = None,
require_signing: bool = False,
cdash_handler: Optional["CDashHandler"] = None,
):
"""
Args:
env: Active spack environment
buildcache_destination: The mirror where built binaries should be pushed
artifacts_root: Path to location where artifacts should be stored
print_summary: Print a summary of the scheduled pipeline
output_file: Path where output file should be written
check_index_only: Only fetch the index or fetch all spec files
broken_specs_url: URL where broken specs (on develop) should be reported
rebuild_index: Generate a job to rebuild mirror index after rebuilds
untouched_pruning_dependent_depth: How many parents to traverse from changed pkg specs
prune_untouched: Prune jobs for specs that were unchanged in git history
prune_up_to_date: Prune specs from pipeline if binary exists on the mirror
prune_external: Prune specs from pipeline if they are external
stack_name: Name of spack stack
pipeline_type: Type of pipeline running (optional)
require_signing: Require buildcache to be signed (fail w/out signing key)
cdash_handler: Object for communicating build information with CDash
"""
self.env = env
self.buildcache_destination = buildcache_destination
self.artifacts_root = artifacts_root
self.print_summary = print_summary
self.output_file = output_file
self.check_index_only = check_index_only
self.broken_specs_url = broken_specs_url
self.rebuild_index = rebuild_index
self.untouched_pruning_dependent_depth = untouched_pruning_dependent_depth
self.prune_untouched = prune_untouched
self.prune_up_to_date = prune_up_to_date
self.prune_external = prune_external
self.stack_name = stack_name
self.pipeline_type = pipeline_type
self.require_signing = require_signing
self.cdash_handler = cdash_handler
class PipelineNode:
spec: spack.spec.Spec
parents: Set[str]
children: Set[str]
def __init__(self, spec: spack.spec.Spec):
self.spec = spec
self.parents = set()
self.children = set()
@property
def key(self):
"""Return key of the stored spec"""
return PipelineDag.key(self.spec)
class PipelineDag:
"""Turn a list of specs into a simple directed graph, that doesn't keep track
of edge types."""
@classmethod
def key(cls, spec: spack.spec.Spec) -> str:
return spec.dag_hash()
def __init__(self, specs: List[spack.spec.Spec]) -> None:
# Build dictionary of nodes
self.nodes: Dict[str, PipelineNode] = {
PipelineDag.key(s): PipelineNode(s)
for s in traverse.traverse_nodes(specs, deptype=dt.ALL_TYPES, root=True)
}
# Create edges
for edge in traverse.traverse_edges(
specs, deptype=dt.ALL_TYPES, root=False, cover="edges"
):
parent_key = PipelineDag.key(edge.parent)
child_key = PipelineDag.key(edge.spec)
self.nodes[parent_key].children.add(child_key)
self.nodes[child_key].parents.add(parent_key)
def prune(self, node_key: str):
"""Remove a node from the graph, and reconnect its parents and children"""
node = self.nodes[node_key]
for parent in node.parents:
self.nodes[parent].children.remove(node_key)
self.nodes[parent].children |= node.children
for child in node.children:
self.nodes[child].parents.remove(node_key)
self.nodes[child].parents |= node.parents
del self.nodes[node_key]
def traverse_nodes(
self, direction: str = "children"
) -> Generator[Tuple[int, PipelineNode], None, None]:
"""Yields (depth, node) from the pipeline graph. Traversal is topologically
ordered from the roots if ``direction`` is ``children``, or from the leaves
if ``direction`` is ``parents``. The yielded depth is the length of the
longest path from the starting point to the yielded node."""
if direction == "children":
get_in_edges = lambda node: node.parents
get_out_edges = lambda node: node.children
else:
get_in_edges = lambda node: node.children
get_out_edges = lambda node: node.parents
sort_key = lambda k: self.nodes[k].spec.name
out_edges = {k: sorted(get_out_edges(n), key=sort_key) for k, n in self.nodes.items()}
num_in_edges = {k: len(get_in_edges(n)) for k, n in self.nodes.items()}
# Populate a queue with all the nodes that have no incoming edges
nodes = deque(
sorted(
[(0, key) for key in self.nodes.keys() if num_in_edges[key] == 0],
key=lambda item: item[1],
)
)
while nodes:
# Remove the next node, n, from the queue and yield it
depth, n_key = nodes.pop()
yield (depth, self.nodes[n_key])
# Remove an in-edge from every node, m, pointed to by an
# out-edge from n. If any of those nodes are left with
# 0 remaining in-edges, add them to the queue.
for m in out_edges[n_key]:
num_in_edges[m] -= 1
if num_in_edges[m] == 0:
nodes.appendleft((depth + 1, m))
def get_dependencies(self, node: PipelineNode) -> List[PipelineNode]:
"""Returns a list of nodes corresponding to the direct dependencies
of the given node."""
return [self.nodes[k] for k in node.children]
class SpackCIConfig:
"""Spack CI object used to generate intermediate representation
used by the CI generator(s).
"""
def __init__(self, ci_config):
"""Given the information from the ci section of the config
and the staged jobs, set up meta data needed for generating Spack
CI IR.
"""
self.ci_config = ci_config
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
self.ir = {
"jobs": {},
"rebuild-index": self.ci_config.get("rebuild-index", True),
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
"target": self.ci_config.get("target", "gitlab"),
}
jobs = self.ir["jobs"]
for name in self.named_jobs:
# Skip the special named jobs
if name not in ["any", "build"]:
jobs[name] = self.__init_job("")
def __init_job(self, release_spec):
"""Initialize job object"""
job_object = {"spec": release_spec, "attributes": {}}
if release_spec:
job_vars = job_object["attributes"].setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
return job_object
def __is_named(self, section):
"""Check if a pipeline-gen configuration section is for a named job,
and if so return the name otherwise return none.
"""
for _name in self.named_jobs:
keys = [f"{_name}-job", f"{_name}-job-remove"]
if any([key for key in keys if key in section]):
return _name
return None
@staticmethod
def __job_name(name, suffix=""):
"""Compute the name of a named job with appropriate suffix.
Valid suffixes are either '-remove' or empty string or None
"""
assert isinstance(name, str)
jname = name
if suffix:
jname = f"{name}-job{suffix}"
else:
jname = f"{name}-job"
return jname
def __apply_submapping(self, dest, spec, section):
"""Apply submapping setion to the IR dict"""
matched = False
only_first = section.get("match_behavior", "first") == "first"
for match_attrs in reversed(section["submapping"]):
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
for match_string in match_attrs["match"]:
if _spec_matches(spec, match_string):
matched = True
if "build-job-remove" in match_attrs:
spack.config.remove_yaml(dest, attrs["build-job-remove"])
if "build-job" in match_attrs:
spack.schema.merge_yaml(dest, attrs["build-job"])
break
if matched and only_first:
break
return dest
# Create jobs for all the pipeline specs
def init_pipeline_jobs(self, pipeline: PipelineDag):
for _, node in pipeline.traverse_nodes():
dag_hash = node.spec.dag_hash()
self.ir["jobs"][dag_hash] = self.__init_job(node.spec)
# Generate IR from the configs
def generate_ir(self):
"""Generate the IR from the Spack CI configurations."""
jobs = self.ir["jobs"]
# Implicit job defaults
defaults = [
{
"build-job": {
"script": [
"cd {env_dir}",
"spack env activate --without-view .",
"spack ci rebuild",
]
}
},
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
]
# Job overrides
overrides = [
# Reindex script
{
"reindex-job": {
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
}
},
# Cleanup script
{
"cleanup-job": {
"script:": ["spack -d mirror destroy {mirror_prefix}/$CI_PIPELINE_ID"]
}
},
# Add signing job tags
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
# Remove reserved tags
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
]
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
for section in reversed(pipeline_gen):
name = self.__is_named(section)
has_submapping = "submapping" in section
has_dynmapping = "dynamic-mapping" in section
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
if name:
remove_job_name = self.__job_name(name, suffix="-remove")
merge_job_name = self.__job_name(name)
do_remove = remove_job_name in section
do_merge = merge_job_name in section
def _apply_section(dest, src):
if do_remove:
dest = spack.config.remove_yaml(dest, src[remove_job_name])
if do_merge:
dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
if name == "build":
# Apply attributes to all build jobs
for _, job in jobs.items():
if job["spec"]:
_apply_section(job["attributes"], section)
elif name == "any":
# Apply section attributes too all jobs
for _, job in jobs.items():
_apply_section(job["attributes"], section)
else:
# Create a signing job if there is script and the job hasn't
# been initialized yet
if name == "signing" and name not in jobs:
if "signing-job" in section:
if "script" not in section["signing-job"]:
continue
else:
jobs[name] = self.__init_job("")
# Apply attributes to named job
_apply_section(jobs[name]["attributes"], section)
elif has_submapping:
# Apply section jobs with specs to match
for _, job in jobs.items():
if job["spec"]:
job["attributes"] = self.__apply_submapping(
job["attributes"], job["spec"], section
)
elif has_dynmapping:
mapping = section["dynamic-mapping"]
dynmap_name = mapping.get("name")
# Check if this section should be skipped
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
if dynmap_name and dynmap_skip:
if re.match(dynmap_skip, dynmap_name):
continue
# Get the endpoint
endpoint = mapping["endpoint"]
endpoint_url = urlparse(endpoint)
# Configure the request header
header = {"User-Agent": web_util.SPACK_USER_AGENT}
header.update(mapping.get("header", {}))
# Expand header environment variables
# ie. if tokens are passed
for value in header.values():
value = os.path.expandvars(value)
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
required = mapping.get("require", [])
allowed = mapping.get("allow", [])
ignored = mapping.get("ignore", [])
# required keys are implicitly allowed
allowed = sorted(set(allowed + required))
ignored = sorted(set(ignored))
required = sorted(set(required))
# Make sure required things are not also ignored
assert not any([ikey in required for ikey in ignored])
def job_query(job):
job_vars = job["attributes"]["variables"]
query = (
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
" {SPACK_JOB_SPEC_VARIANTS}"
" arch={SPACK_JOB_SPEC_ARCH}"
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
).format_map(job_vars)
return f"spec={quote(query)}"
for job in jobs.values():
if not job["spec"]:
continue
# Create request for this job
query = job_query(job)
request = Request(
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
)
try:
response = _dyn_mapping_urlopener(
request, verify_ssl=verify_ssl, timeout=timeout
)
except Exception as e:
# For now just ignore any errors from dynamic mapping and continue
# This is still experimental, and failures should not stop CI
# from running normally
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
tty.warn(f"{e}")
continue
config = json.load(codecs.getreader("utf-8")(response))
# Strip ignore keys
if ignored:
for key in ignored:
if key in config:
config.pop(key)
# Only keep allowed keys
clean_config = {}
if allowed:
for key in allowed:
if key in config:
clean_config[key] = config[key]
else:
clean_config = config
# Verify all of the required keys are present
if required:
missing_keys = []
for key in required:
if key not in clean_config.keys():
missing_keys.append(key)
if missing_keys:
tty.warn(f"Response missing required keys: {missing_keys}")
if clean_config:
job["attributes"] = spack.schema.merge_yaml(
job.get("attributes", {}), clean_config
)
for _, job in jobs.items():
if job["spec"]:
job["spec"] = job["spec"].name
return self.ir
class SpackCIError(spack.error.SpackError):
def __init__(self, msg):
super().__init__(msg)

View File

@@ -1,36 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# Holds all known formatters
"""Generators that support writing out pipelines for various CI platforms,
using a common pipeline graph definition.
"""
import spack.error
_generators = {}
def generator(name):
"""Decorator to register a pipeline generator method.
A generator method should take PipelineDag, SpackCIConfig, and
PipelineOptions arguments, and should produce a pipeline file.
"""
def _decorator(generate_method):
_generators[name] = generate_method
return generate_method
return _decorator
def get_generator(name):
try:
return _generators[name]
except KeyError:
raise UnknownGeneratorException(name)
class UnknownGeneratorException(spack.error.SpackError):
def __init__(self, generator_name):
super().__init__(f"No registered generator for {generator_name}")

View File

@@ -1,416 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import os
import shutil
from typing import List, Optional
import ruamel.yaml
import llnl.util.tty as tty
import spack
import spack.binary_distribution as bindist
import spack.config as cfg
import spack.mirrors.mirror
import spack.schema
import spack.spec
import spack.util.spack_yaml as syaml
from .common import (
SPACK_RESERVED_TAGS,
PipelineDag,
PipelineOptions,
PipelineType,
SpackCIConfig,
SpackCIError,
ensure_expected_target_path,
unpack_script,
update_env_scopes,
write_pipeline_manifest,
)
from .generator_registry import generator
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
JOB_RETRY_CONDITIONS = [
# "always",
"unknown_failure",
"script_failure",
"api_failure",
"stuck_or_timeout_failure",
"runner_system_failure",
"runner_unsupported",
"stale_schedule",
# "job_execution_timeout",
"archived_failure",
"unmet_prerequisites",
"scheduler_failure",
"data_integrity_failure",
]
JOB_NAME_FORMAT = "{name}{@version} {/hash}"
def _remove_reserved_tags(tags):
"""Convenience function to strip reserved tags from jobs"""
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def get_job_name(spec: spack.spec.Spec, build_group: Optional[str] = None) -> str:
"""Given a spec and possibly a build group, return the job name. If the
resulting name is longer than 255 characters, it will be truncated.
Arguments:
spec: Spec job will build
build_group: Name of build group this job belongs to (a CDash notion)
Returns: The job name
"""
job_name = spec.format(JOB_NAME_FORMAT)
if build_group:
job_name = f"{job_name} {build_group}"
return job_name[:255]
def maybe_generate_manifest(pipeline: PipelineDag, options: PipelineOptions, manifest_path):
# TODO: Consider including only hashes of rebuilt specs in the manifest,
# instead of full source and destination urls. Also, consider renaming
# the variable that controls whether or not to write the manifest from
# "SPACK_COPY_BUILDCACHE" to "SPACK_WRITE_PIPELINE_MANIFEST" or similar.
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
if spack_buildcache_copy:
buildcache_copy_src_prefix = options.buildcache_destination.fetch_url
buildcache_copy_dest_prefix = spack_buildcache_copy
if options.pipeline_type == PipelineType.COPY_ONLY:
manifest_specs = [s for s in options.env.all_specs() if not s.external]
else:
manifest_specs = [n.spec for _, n in pipeline.traverse_nodes(direction="children")]
write_pipeline_manifest(
manifest_specs, buildcache_copy_src_prefix, buildcache_copy_dest_prefix, manifest_path
)
@generator("gitlab")
def generate_gitlab_yaml(pipeline: PipelineDag, spack_ci: SpackCIConfig, options: PipelineOptions):
"""Given a pipeline graph, job attributes, and pipeline options,
write a pipeline that can be consumed by GitLab to the given output file.
Arguments:
pipeline: An already pruned graph of jobs representing all the specs to build
spack_ci: An object containing the configured attributes of all jobs in the pipeline
options: An object containing all the pipeline options gathered from yaml, env, etc...
"""
ci_project_dir = os.environ.get("CI_PROJECT_DIR") or os.getcwd()
generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
generate_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
artifacts_root = options.artifacts_root
if artifacts_root.startswith(ci_project_dir):
artifacts_root = os.path.relpath(artifacts_root, ci_project_dir)
pipeline_artifacts_dir = os.path.join(ci_project_dir, artifacts_root)
output_file = options.output_file
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
else:
output_file_path = os.path.abspath(output_file)
gen_ci_dir = os.path.dirname(output_file_path)
if not os.path.exists(gen_ci_dir):
os.makedirs(gen_ci_dir)
spack_ci_ir = spack_ci.generate_ir()
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
# Now that we've added the mirrors we know about, they should be properly
# reflected in the environment manifest file, so copy that into the
# concrete environment directory, along with the spack.lock file.
if not os.path.exists(concrete_env_dir):
os.makedirs(concrete_env_dir)
shutil.copyfile(options.env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
shutil.copyfile(options.env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
update_env_scopes(
options.env,
[
os.path.relpath(s.path, concrete_env_dir)
for s in cfg.scopes().values()
if not s.writable
and isinstance(s, (cfg.DirectoryConfigScope))
and os.path.exists(s.path)
],
os.path.join(concrete_env_dir, "spack.yaml"),
# Here transforming windows paths is only required in the special case
# of copy_only_pipelines, a unique scenario where the generate job and
# child pipelines are run on different platforms. To make this compatible
# w/ Windows, we cannot write Windows style path separators that will be
# consumed on by the Posix copy job runner.
#
# TODO (johnwparent): Refactor config + cli read/write to deal only in
# posix style paths
transform_windows_paths=(options.pipeline_type == PipelineType.COPY_ONLY),
)
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
# We communicate relative paths to the downstream jobs to avoid issues in
# situations where the CI_PROJECT_DIR varies between the pipeline
# generation job and the rebuild jobs. This can happen when gitlab
# checks out the project into a runner-specific directory, for example,
# and different runners are picked for generate and rebuild jobs.
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", rel_concrete_env_dir)
output_object = {}
job_id = 0
stage_id = 0
stages: List[List] = []
stage_names = []
max_length_needs = 0
max_needs_job = ""
if not options.pipeline_type == PipelineType.COPY_ONLY:
for level, node in pipeline.traverse_nodes(direction="parents"):
stage_id = level
if len(stages) == stage_id:
stages.append([])
stages[stage_id].append(node.spec)
stage_name = f"stage-{level}"
if stage_name not in stage_names:
stage_names.append(stage_name)
release_spec = node.spec
release_spec_dag_hash = release_spec.dag_hash()
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
if not job_object:
tty.warn(f"No match found for {release_spec}, skipping it")
continue
if options.pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
if options.pipeline_type == PipelineType.PROTECTED_BRANCH:
job_object["tags"].extend(["protected"])
elif options.pipeline_type == PipelineType.PULL_REQUEST:
job_object["tags"].extend(["public"])
if "script" not in job_object:
raise AttributeError
job_object["script"] = unpack_script(job_object["script"], op=main_script_replacements)
if "before_script" in job_object:
job_object["before_script"] = unpack_script(job_object["before_script"])
if "after_script" in job_object:
job_object["after_script"] = unpack_script(job_object["after_script"])
build_group = options.cdash_handler.build_group if options.cdash_handler else None
job_name = get_job_name(release_spec, build_group)
dep_nodes = pipeline.get_dependencies(node)
job_object["needs"] = [
{"job": get_job_name(dep_node.spec, build_group), "artifacts": False}
for dep_node in dep_nodes
]
job_object["needs"].append(
{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}
)
job_vars = job_object["variables"]
# Let downstream jobs know whether the spec needed rebuilding, regardless
# whether DAG pruning was enabled or not.
already_built = bindist.get_mirrors_for_spec(spec=release_spec, index_only=True)
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = "False" if already_built else "True"
if options.cdash_handler:
build_name = options.cdash_handler.build_name(release_spec)
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
build_stamp = options.cdash_handler.build_stamp
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
job_object["artifacts"] = spack.schema.merge_yaml(
job_object.get("artifacts", {}),
{
"when": "always",
"paths": [
rel_job_log_dir,
rel_job_repro_dir,
rel_job_test_dir,
rel_user_artifacts_dir,
],
},
)
job_object["stage"] = stage_name
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
job_object["interruptible"] = True
length_needs = len(job_object["needs"])
if length_needs > max_length_needs:
max_length_needs = length_needs
max_needs_job = job_name
output_object[job_name] = job_object
job_id += 1
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
if job_id > 0:
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
service_job_retries = {
"max": 2,
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
}
# In some cases, pipeline generation should write a manifest. Currently
# the only purpose is to specify a list of sources and destinations for
# everything that should be copied.
distinguish_stack = options.stack_name if options.stack_name else "rebuilt"
manifest_path = os.path.join(
pipeline_artifacts_dir, "specs_to_copy", f"copy_{distinguish_stack}_specs.json"
)
maybe_generate_manifest(pipeline, options, manifest_path)
if options.pipeline_type == PipelineType.COPY_ONLY:
stage_names.append("copy")
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
sync_job["stage"] = "copy"
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}]
if "variables" not in sync_job:
sync_job["variables"] = {}
sync_job["variables"][
"SPACK_COPY_ONLY_DESTINATION"
] = options.buildcache_destination.fetch_url
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
if "buildcache-source" not in pipeline_mirrors:
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
sync_job["dependencies"] = []
output_object["copy"] = sync_job
job_id += 1
if job_id > 0:
if (
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
and options.pipeline_type == PipelineType.PROTECTED_BRANCH
):
# External signing: generate a job to check and sign binary pkgs
stage_names.append("stage-sign-pkgs")
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
signing_job["script"] = unpack_script(signing_job["script"])
signing_job["stage"] = "stage-sign-pkgs"
signing_job["when"] = "always"
signing_job["retry"] = {"max": 2, "when": ["always"]}
signing_job["interruptible"] = True
if "variables" not in signing_job:
signing_job["variables"] = {}
signing_job["variables"][
"SPACK_BUILDCACHE_DESTINATION"
] = options.buildcache_destination.push_url
signing_job["dependencies"] = []
output_object["sign-pkgs"] = signing_job
if options.rebuild_index:
# Add a final job to regenerate the index
stage_names.append("stage-rebuild-index")
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
final_job["stage"] = "stage-rebuild-index"
target_mirror = options.buildcache_destination.push_url
final_job["script"] = unpack_script(
final_job["script"],
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
)
final_job["when"] = "always"
final_job["retry"] = service_job_retries
final_job["interruptible"] = True
final_job["dependencies"] = []
output_object["rebuild-index"] = final_job
output_object["stages"] = stage_names
# Capture the version of Spack used to generate the pipeline, that can be
# passed to `git checkout` for version consistency. If we aren't in a Git
# repository, presume we are a Spack release and use the Git tag instead.
spack_version = spack.get_version()
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
rebuild_everything = not options.prune_up_to_date and not options.prune_untouched
output_object["variables"] = {
"SPACK_ARTIFACTS_ROOT": artifacts_root,
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
"SPACK_VERSION": spack_version,
"SPACK_CHECKOUT_VERSION": version_to_clone,
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
"SPACK_PIPELINE_TYPE": options.pipeline_type.name if options.pipeline_type else "None",
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(options.prune_up_to_date),
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": str(options.require_signing),
}
if options.stack_name:
output_object["variables"]["SPACK_CI_STACK_NAME"] = options.stack_name
output_vars = output_object["variables"]
for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val)
else:
# No jobs were generated
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
# If this job fails ignore the status and carry on
noop_job["retry"] = 0
noop_job["allow_failure"] = True
tty.debug("No specs to rebuild, generating no-op job")
output_object = {"no-specs-to-rebuild": noop_job}
# Ensure the child pipeline always runs
output_object["workflow"] = {"rules": [{"when": "always"}]}
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value
# Minimize yaml output size through use of anchors
syaml.anchorify(sorted_output)
with open(output_file, "w", encoding="utf-8") as f:
ruamel.yaml.YAML().dump(sorted_output, f)

View File

@@ -4,13 +4,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import difflib
import importlib
import os
import re
import sys
from collections import Counter
from typing import List, Optional, Union
from typing import List, Union
import llnl.string
import llnl.util.tty as tty
@@ -24,18 +22,15 @@
import spack.environment as ev
import spack.error
import spack.extensions
import spack.parser
import spack.paths
import spack.repo
import spack.spec
import spack.spec_parser
import spack.store
import spack.traverse as traverse
import spack.user_environment as uenv
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
from ..enums import InstallRecordStatus
# cmd has a submodule called "list" so preserve the python list module
python_list = list
@@ -126,8 +121,6 @@ def get_module(cmd_name):
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)
if not module:
raise CommandNotFoundError(cmd_name)
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
@@ -163,12 +156,12 @@ def quote_kvp(string: str) -> str:
or ``name==``, and we assume the rest of the argument is the value. This covers the
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
"""
match = spack.spec_parser.SPLIT_KVP.match(string)
match = spack.parser.SPLIT_KVP.match(string)
if not match:
return string
key, delim, value = match.groups()
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
def parse_specs(
@@ -180,7 +173,7 @@ def parse_specs(
args = [args] if isinstance(args, str) else args
arg_string = " ".join([quote_kvp(arg) for arg in args])
specs = spack.spec_parser.parse(arg_string)
specs = spack.parser.parse(arg_string)
if not concretize:
return specs
@@ -196,43 +189,6 @@ def _concretize_spec_pairs(to_concretize, tests=False):
rules from config."""
unify = spack.config.get("concretizer:unify", False)
# Special case for concretizing a single spec
if len(to_concretize) == 1:
abstract, concrete = to_concretize[0]
return [concrete or abstract.concretized()]
# Special case if every spec is either concrete or has an abstract hash
if all(
concrete or abstract.concrete or abstract.abstract_hash
for abstract, concrete in to_concretize
):
# Get all the concrete specs
ret = [
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
for abstract, concrete in to_concretize
]
# If unify: true, check that specs don't conflict
# Since all concrete, "when_possible" is not relevant
if unify is True: # True, "when_possible", False are possible values
runtimes = spack.repo.PATH.packages_with_tags("runtime")
specs_per_name = Counter(
spec.name
for spec in traverse.traverse_nodes(
ret, deptype=("link", "run"), key=traverse.by_dag_hash
)
if spec.name not in runtimes # runtimes are allowed multiple times
)
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
if conflicts:
raise spack.error.SpecError(
"Specs conflict and `concretizer:unify` is configured true.",
f" specs depend on multiple versions of {', '.join(conflicts)}",
)
return ret
# Standard case
concretize_method = spack.concretize.concretize_separately # unify: false
if unify is True:
concretize_method = spack.concretize.concretize_together
@@ -272,48 +228,39 @@ def matching_specs_from_env(specs):
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
def disambiguate_spec(
spec: spack.spec.Spec,
env: Optional[ev.Environment],
local: bool = False,
installed: Union[bool, InstallRecordStatus] = True,
first: bool = False,
) -> spack.spec.Spec:
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
"""Given a spec, figure out which installed package it refers to.
Args:
spec: a spec to disambiguate
env: a spack environment, if one is active, or None if no environment is active
local: do not search chained spack instances
installed: install status argument passed to database query.
first: returns the first matching spec, even if more than one match is found
Arguments:
spec (spack.spec.Spec): a spec to disambiguate
env (spack.environment.Environment): a spack environment,
if one is active, or None if no environment is active
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
hashes = env.all_hashes() if env else None
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
def disambiguate_spec_from_hashes(
spec: spack.spec.Spec,
hashes: List[str],
local: bool = False,
installed: Union[bool, InstallRecordStatus] = True,
first: bool = False,
) -> spack.spec.Spec:
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
Arguments:
spec: a spec to disambiguate
hashes: a set of hashes of specs among which to disambiguate
local: if True, do not search chained spack instances
installed: install status argument passed to database query.
first: returns the first matching spec, even if more than one match is found
spec (spack.spec.Spec): a spec to disambiguate
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
local (bool): do not search chained spack instances
installed (bool or spack.database.InstallStatus or typing.Iterable):
install status argument passed to database query.
See ``spack.database.Database._query`` for details.
"""
if local:
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
else:
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
if not matching_specs:
tty.die(f"Spec '{spec}' matches no installed packages.")
tty.die("Spec '%s' matches no installed packages." % spec)
elif first:
return matching_specs[0]
@@ -694,24 +641,3 @@ def find_environment(args):
def first_line(docstring):
"""Return the first line of the docstring."""
return docstring.split("\n")[0]
class CommandNotFoundError(spack.error.SpackError):
"""Exception class thrown when a requested command is not recognized as
such.
"""
def __init__(self, cmd_name):
msg = (
f"{cmd_name} is not a recognized Spack command or extension command; "
"check with `spack commands`."
)
long_msg = None
similar = difflib.get_close_matches(cmd_name, all_commands())
if 1 <= len(similar) <= 5:
long_msg = "\nDid you mean one of the following commands?\n "
long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)

View File

@@ -16,7 +16,7 @@
import spack.bootstrap.config
import spack.bootstrap.core
import spack.config
import spack.mirrors.utils
import spack.mirror
import spack.spec
import spack.stage
import spack.util.path
@@ -400,7 +400,7 @@ def _mirror(args):
llnl.util.tty.set_msg_enabled(False)
spec = spack.spec.Spec(spec_str).concretized()
for node in spec.traverse():
spack.mirrors.utils.create(mirror_dir, [node])
spack.mirror.create(mirror_dir, [node])
llnl.util.tty.set_msg_enabled(True)
if args.binary_packages:
@@ -419,7 +419,7 @@ def write_metadata(subdir, metadata):
metadata_rel_dir = os.path.join("metadata", subdir)
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
with open(metadata_yaml, mode="w") as f:
spack.util.spack_yaml.dump(metadata, stream=f)
return os.path.dirname(metadata_yaml), metadata_rel_dir

View File

@@ -6,7 +6,7 @@
from spack.context import Context
description = (
"run a command in a spec's install environment, or dump its environment to screen or file"
"use a spec's build environment to run a command, dump to screen or file, or dive into it"
)
section = "build"
level = "long"

View File

@@ -21,7 +21,7 @@
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.mirrors.mirror
import spack.mirror
import spack.oci.oci
import spack.spec
import spack.stage
@@ -34,8 +34,6 @@
from spack.cmd.common import arguments
from spack.spec import Spec, save_dependency_specfiles
from ..enums import InstallRecordStatus
description = "create, download and install binary packages"
section = "packaging"
level = "long"
@@ -310,10 +308,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
def _matching_specs(specs: List[Spec]) -> List[Spec]:
"""Disambiguate specs and return a list of matching specs"""
return [
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
for s in specs
]
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
def _format_spec(spec: Spec) -> str:
@@ -392,7 +387,7 @@ def push_fn(args):
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
assert isinstance(mirror, spack.mirror.Mirror)
push_url = mirror.push_url
@@ -731,7 +726,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
deduped_manifest = {}
for manifest_path in manifest_file_list:
with open(manifest_path, encoding="utf-8") as fd:
with open(manifest_path) as fd:
manifest = json.loads(fd.read())
for spec_hash, copy_list in manifest.items():
# Last duplicate hash wins
@@ -750,7 +745,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
copy_buildcache_file(copy_file["src"], dest)
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
# Special case OCI images for now.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)

View File

@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
if match:
new_versions.append((Version(match.group(1)), ver_line))
with open(filename, "r+", encoding="utf-8") as f:
with open(filename, "r+") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)

View File

@@ -6,6 +6,7 @@
import json
import os
import shutil
import warnings
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
@@ -16,11 +17,10 @@
import spack.ci as spack_ci
import spack.cmd
import spack.cmd.buildcache as buildcache
import spack.cmd.common.arguments
import spack.config as cfg
import spack.environment as ev
import spack.hash_types as ht
import spack.mirrors.mirror
import spack.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
@@ -62,8 +62,22 @@ def setup_parser(subparser):
"path to the file where generated jobs file should be written. "
"default is .gitlab-ci.yml in the root of the repository",
)
prune_dag_group = generate.add_mutually_exclusive_group()
prune_dag_group.add_argument(
generate.add_argument(
"--optimize",
action="store_true",
default=False,
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
"run the generated document through a series of optimization passes "
"designed to reduce the size of the generated file",
)
generate.add_argument(
"--dependencies",
action="store_true",
default=False,
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
)
prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument(
"--prune-dag",
action="store_true",
dest="prune_dag",
@@ -71,7 +85,7 @@ def setup_parser(subparser):
help="skip up-to-date specs\n\n"
"do not generate jobs for specs that are up-to-date on the mirror",
)
prune_dag_group.add_argument(
prune_group.add_argument(
"--no-prune-dag",
action="store_false",
dest="prune_dag",
@@ -79,23 +93,6 @@ def setup_parser(subparser):
help="process up-to-date specs\n\n"
"generate jobs for specs even when they are up-to-date on the mirror",
)
prune_ext_group = generate.add_mutually_exclusive_group()
prune_ext_group.add_argument(
"--prune-externals",
action="store_true",
dest="prune_externals",
default=True,
help="skip external specs\n\n"
"do not generate jobs for specs that are marked as external",
)
prune_ext_group.add_argument(
"--no-prune-externals",
action="store_false",
dest="prune_externals",
default=True,
help="process external specs\n\n"
"generate jobs for specs even when they are marked as external",
)
generate.add_argument(
"--check-index-only",
action="store_true",
@@ -111,18 +108,14 @@ def setup_parser(subparser):
)
generate.add_argument(
"--artifacts-root",
default="jobs_scratch_dir",
default=None,
help="path to the root of the artifacts directory\n\n"
"The spack ci module assumes it will normally be run from within your project "
"directory, wherever that is checked out to run your ci. The artifacts root directory "
"should specifiy a name that can safely be used for artifacts within your project "
"directory.",
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
"this directory. their location will be passed to generated child jobs through the "
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
)
generate.set_defaults(func=ci_generate)
spack.cmd.common.arguments.add_concretizer_args(generate)
spack.cmd.common.arguments.add_common_arguments(generate, ["jobs"])
# Rebuild the buildcache index associated with the mirror in the
# active, gitlab-enabled environment.
index = subparsers.add_parser(
@@ -152,7 +145,6 @@ def setup_parser(subparser):
help="stop stand-alone tests after the first failure",
)
rebuild.set_defaults(func=ci_rebuild)
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
# Facilitate reproduction of a failed CI build job
reproduce = subparsers.add_parser(
@@ -195,8 +187,42 @@ def ci_generate(args):
before invoking this command. the value must be the CDash authorization token needed to create
a build group and register all generated jobs under it
"""
if args.optimize:
warnings.warn(
"The --optimize option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
if args.dependencies:
warnings.warn(
"The --dependencies option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
env = spack.cmd.require_active_env(cmd_name="ci generate")
spack_ci.generate_pipeline(env, args)
output_file = args.output_file
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
else:
output_file_path = os.path.abspath(output_file)
gen_ci_dir = os.path.dirname(output_file_path)
if not os.path.exists(gen_ci_dir):
os.makedirs(gen_ci_dir)
# Generate the jobs
spack_ci.generate_gitlab_ci_yaml(
env,
True,
output_file,
prune_dag=prune_dag,
check_index_only=index_only,
artifacts_root=artifacts_root,
)
def ci_reindex(args):
@@ -214,7 +240,7 @@ def ci_reindex(args):
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
mirror = spack.mirror.Mirror(remote_mirror_url)
buildcache.update_index(mirror, update_keys=True)
@@ -302,7 +328,7 @@ def ci_rebuild(args):
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
buildcache_destination = None
if "buildcache-destination" not in pipeline_mirrors:
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
@@ -361,7 +387,7 @@ def ci_rebuild(args):
# Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
with open(job_spec_json_path, "w") as fd:
fd.write(job_spec.to_json(hash=ht.dag_hash))
# Write some other details to aid in reproduction into an artifact
@@ -371,7 +397,7 @@ def ci_rebuild(args):
"job_spec_json": job_spec_json_file,
"ci_project_dir": ci_project_dir,
}
with open(repro_file, "w", encoding="utf-8") as fd:
with open(repro_file, "w") as fd:
fd.write(json.dumps(repro_details))
# Write information about spack into an artifact in the repro dir
@@ -407,19 +433,14 @@ def ci_rebuild(args):
if not config["verify_ssl"]:
spack_cmd.append("-k")
install_args = [
f'--use-buildcache={spack_ci.common.win_quote("package:never,dependencies:only")}'
]
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries:
install_args.append("--no-check-signature")
if args.jobs:
install_args.append(f"-j{args.jobs}")
slash_hash = spack_ci.common.win_quote("/" + job_spec.dag_hash())
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
@@ -584,7 +605,7 @@ def ci_rebuild(args):
rebuild_timer.stop()
try:
with open("install_timers.json", "w", encoding="utf-8") as timelog:
with open("install_timers.json", "w") as timelog:
extra_attributes = {"name": ".ci-rebuild"}
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:

View File

@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
documented_commands: Set[str] = set()
for filename in args.rst_files:
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
for line in f:
match = re.match(r"\.\. _cmd-(spack-.*):", line)
if match:
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
if not args.header:
return
with open(args.header, encoding="utf-8") as header:
with open(args.header) as header:
out.write(header.read())
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
if args.update:
tty.msg(f"Updating file: {args.update}")
with open(args.update, "w", encoding="utf-8") as f:
with open(args.update, "w") as f:
prepend_header(args, f)
formatter(args, f)

View File

@@ -14,8 +14,7 @@
import spack.config
import spack.deptypes as dt
import spack.environment as ev
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.mirror
import spack.reporters
import spack.spec
import spack.store
@@ -169,7 +168,7 @@ def installed_specs(args):
else:
packages = []
for file in args.specfiles:
with open(file, "r", encoding="utf-8") as f:
with open(file, "r") as f:
s = spack.spec.Spec.from_yaml(f)
packages.append(s.format())
return packages
@@ -690,31 +689,31 @@ def mirror_name_or_url(m):
# If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m or m in (".", ".."):
return spack.mirrors.mirror.Mirror(m)
return spack.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist.
try:
return spack.mirrors.utils.require_mirror_name(m)
return spack.mirror.require_mirror_name(m)
except ValueError as e:
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
def mirror_url(url):
try:
return spack.mirrors.mirror.Mirror.from_url(url)
return spack.mirror.Mirror.from_url(url)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_directory(path):
try:
return spack.mirrors.mirror.Mirror.from_local_path(path)
return spack.mirror.Mirror.from_local_path(path)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e
def mirror_name(name):
try:
return spack.mirrors.utils.require_mirror_name(name)
return spack.mirror.require_mirror_name(name)
except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e

View File

@@ -10,12 +10,15 @@
import spack.cmd
import spack.deptypes as dt
import spack.error
import spack.prompt
import spack.spec
import spack.store
from spack import build_environment, traverse
from spack.cmd.common import arguments
from spack.cmd.location import location_emulator
from spack.context import Context
from spack.util.environment import dump_environment, pickle_environment
from spack.util.shell_detection import active_shell_type
def setup_parser(subparser):
@@ -26,6 +29,17 @@ def setup_parser(subparser):
subparser.add_argument(
"--pickle", metavar="FILE", help="dump a pickled source-able environment to FILE"
)
subparser.add_argument(
"-d", "--dive", action="store_true", help="dive into the build-env in a subshell"
)
subparser.add_argument(
"-c",
"--cd",
help="location to dive to or run command from (takes arguments from 'spack cd')",
)
subparser.add_argument(
"--status", action="store_true", help="check shell for an active build environment"
)
subparser.add_argument(
"spec",
nargs=argparse.REMAINDER,
@@ -75,7 +89,38 @@ def neighbors(self, item):
return item.edge.spec.edges_to_dependencies(depflag=depflag)
def run_command_in_subshell(
spec, context, cmd, prompt=False, dirty=False, cd_arg=None, shell=active_shell_type()
):
mods = build_environment.setup_package(spec.package, dirty, context)
if prompt:
mods.extend(spack.prompt.prompt_modifications(f"{spec.name}-{str(context)}-env", shell))
mods.apply_modifications()
if cd_arg:
prefix = "-" if len(cd_arg) == 1 else "--"
loc_args = [f"{prefix}{cd_arg}"]
# don't add spec for cd if using env since spec hash is not the env
if not (cd_arg == "e" or cd_arg == "env"):
loc_args.append(f"/{spec.dag_hash()}")
location = location_emulator(*loc_args)
os.chdir(location)
os.execvp(cmd[0], cmd)
def emulate_env_utility(cmd_name, context: Context, args):
if args.status:
context_var = os.environ.get(f"SPACK_{str(context).upper()}_ENV", None)
if context_var:
tty.msg(f"In {str(context)} env {context_var}")
else:
tty.msg(f"{str(context)} environment not detected")
exit(0)
if not args.spec:
tty.die("spack %s requires a spec." % cmd_name)
@@ -92,6 +137,12 @@ def emulate_env_utility(cmd_name, context: Context, args):
spec = args.spec[0]
cmd = args.spec[1:]
if args.dive:
if cmd:
tty.die("--dive and additional commands can't be run together")
else:
cmd = [active_shell_type()]
if not spec:
tty.die("spack %s requires a spec." % cmd_name)
@@ -106,6 +157,7 @@ def emulate_env_utility(cmd_name, context: Context, args):
visitor = AreDepsInstalledVisitor(context=context)
# Mass install check needs read transaction.
# FIXME: this command is slow
with spack.store.STORE.db.read_transaction():
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
@@ -123,7 +175,11 @@ def emulate_env_utility(cmd_name, context: Context, args):
),
)
build_environment.setup_package(spec.package, args.dirty, context)
if cmd:
run_command_in_subshell(spec, context, cmd, prompt=args.dive, cd_arg=args.cd)
else:
# setup build env if no command to run
build_environment.setup_package(spec.package, args.dirty, context)
if args.dump:
# Dump a source-able environment to a text file.
@@ -135,10 +191,6 @@ def emulate_env_utility(cmd_name, context: Context, args):
tty.msg("Pickling a source-able environment to {0}".format(args.pickle))
pickle_environment(args.pickle)
if cmd:
# Execute the command with the new environment
os.execvp(cmd[0], cmd)
elif not bool(args.pickle or args.dump):
# If no command or dump/pickle option then act like the "env" command
# and print out env vars.

View File

@@ -14,7 +14,6 @@
import spack.config
import spack.environment as ev
import spack.error
import spack.schema
import spack.schema.env
import spack.spec
import spack.store
@@ -567,7 +566,7 @@ def config_prefer_upstream(args):
# Simply write the config to the specified file.
existing = spack.config.get("packages", scope=scope)
new = spack.schema.merge_yaml(existing, pkgs)
new = spack.config.merge_yaml(existing, pkgs)
spack.config.set("packages", new, scope)
config_file = spack.config.CONFIG.get_config_filename(scope, section)

View File

@@ -110,7 +110,7 @@ def write(self, pkg_path):
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
package_template.format(
name=self.name,

View File

@@ -23,10 +23,9 @@
import spack.installer
import spack.store
from spack.cmd.common import arguments
from spack.database import InstallStatuses
from spack.error import SpackError
from ..enums import InstallRecordStatus
description = "replace one package with another via symlinks"
section = "admin"
level = "long"
@@ -96,12 +95,8 @@ def deprecate(parser, args):
if len(specs) != 2:
raise SpackError("spack deprecate requires exactly two specs")
deprecate = spack.cmd.disambiguate_spec(
specs[0],
env,
local=True,
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
)
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
if args.install:
deprecator = specs[1].concretized()

View File

@@ -8,15 +8,18 @@
import llnl.util.tty as tty
import spack.build_environment
import spack.cmd
import spack.cmd.common.arguments
import spack.config
import spack.environment as ev
import spack.repo
import spack.version
from spack.cmd.common import arguments
from spack.cmd.common.env_utility import run_command_in_subshell
from spack.context import Context
from spack.installer import PackageInstaller
description = "developer build: build from code in current working directory"
description = "developer build: build from user managed code"
section = "build"
level = "long"
@@ -28,7 +31,11 @@ def setup_parser(subparser):
"--source-path",
dest="source_path",
default=None,
help="path to source directory (defaults to the current directory)",
help=(
"path to source directory (defaults to the current directory)."
" ignored when using an active environment since the path is determined"
" by the develop section of the environment manifest."
),
)
subparser.add_argument(
"-i",
@@ -53,12 +60,19 @@ def setup_parser(subparser):
help="do not display verbose build output while installing",
)
subparser.add_argument(
"-D",
"--drop-in",
type=str,
dest="shell",
default=None,
help="drop into a build environment in a new shell, e.g., bash",
)
subparser.add_argument(
"-p",
"--prompt",
action="store_true",
help="change the prompt when droping into the build-env",
)
subparser.add_argument(
"--test",
default=None,
@@ -102,25 +116,35 @@ def dev_build(self, args):
if not spack.repo.PATH.exists(spec.name):
raise spack.repo.UnknownPackageError(spec.name)
if not spec.versions.concrete_range_as_version:
tty.die(
"spack dev-build spec must have a single, concrete version. "
"Did you forget a package version number?"
)
env = ev.active_environment()
if env:
matches = env.all_matching_specs(spec)
dev_matches = [m for m in matches if m.is_develop]
if len(dev_matches) > 1:
tty.die("Too many matching develop specs in the active environment")
elif len(dev_matches) < 1:
tty.die("No matching develop specs found in the active environment")
else:
spec = dev_matches[0]
else:
if not spec.versions.concrete_range_as_version:
version = max(spec.package_class.versions.keys())
spec.versions = spack.version.VersionList([version])
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
source_path = args.source_path
if source_path is None:
source_path = os.getcwd()
source_path = os.path.abspath(source_path)
source_path = args.source_path
if source_path is None:
source_path = os.getcwd()
source_path = os.path.abspath(source_path)
# Forces the build to run out of the source directory.
spec.constrain("dev_path=%s" % source_path)
spec.concretize()
# Forces the build to run out of the source directory.
spec.constrain("dev_path=%s" % source_path)
spec.concretize()
if spec.installed:
tty.error("Already installed in %s" % spec.prefix)
tty.msg("Uninstall or try adding a version suffix for this dev build.")
sys.exit(1)
if spec.installed:
tty.error("Already installed in %s" % spec.prefix)
tty.msg("Uninstall or try adding a version suffix for this dev build.")
sys.exit(1)
# disable checksumming if requested
if args.no_checksum:
@@ -146,5 +170,6 @@ def dev_build(self, args):
# drop into the build environment of the package?
if args.shell is not None:
spack.build_environment.setup_package(spec.package, dirty=False)
os.execvp(args.shell, [args.shell])
run_command_in_subshell(
spec, Context.BUILD, [args.shell], prompt=args.prompt, shell=args.shell
)

View File

@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
path = repo.filename_for_package_name(name)
try:
with open(path, "r", encoding="utf-8"):
with open(path, "r"):
return path
except OSError as e:
if e.errno == errno.ENOENT:
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
# Try to open direct match.
try:
with open(file_path, "r", encoding="utf-8"):
with open(file_path, "r"):
return file_path
except OSError as e:
if e.errno != errno.ENOENT:

View File

@@ -865,7 +865,7 @@ def env_loads(args):
args.recurse_dependencies = False
loads_file = fs.join_path(env.path, "loads")
with open(loads_file, "w", encoding="utf-8") as f:
with open(loads_file, "w") as f:
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
spack.cmd.modules.loads(module_type, specs, args, f)
@@ -1053,7 +1053,7 @@ def env_depfile(args):
# Finally write to stdout/file.
if args.output:
with open(args.output, "w", encoding="utf-8") as f:
with open(args.output, "w") as f:
f.write(makefile)
else:
sys.stdout.write(makefile)

View File

@@ -17,8 +17,7 @@
import spack.spec
import spack.store
from spack.cmd.common import arguments
from ..enums import InstallRecordStatus
from spack.database import InstallStatuses
description = "list and search installed packages"
section = "basic"
@@ -138,22 +137,21 @@ def setup_parser(subparser):
subparser.add_argument(
"--loaded", action="store_true", help="show only packages loaded in the user environment"
)
only_missing_or_deprecated = subparser.add_mutually_exclusive_group()
only_missing_or_deprecated.add_argument(
subparser.add_argument(
"-M",
"--only-missing",
action="store_true",
dest="only_missing",
help="show only missing dependencies",
)
only_missing_or_deprecated.add_argument(
"--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument(
"--deprecated",
action="store_true",
help="show deprecated packages as well as installed specs",
)
subparser.add_argument(
"--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument(
"--install-tree",
action="store",
@@ -167,23 +165,14 @@ def setup_parser(subparser):
def query_arguments(args):
if args.only_missing and (args.deprecated or args.missing):
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing")
if args.only_deprecated and (args.deprecated or args.missing):
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
installed = InstallRecordStatus.INSTALLED
if args.only_missing:
installed = InstallRecordStatus.MISSING
elif args.only_deprecated:
installed = InstallRecordStatus.DEPRECATED
if args.missing:
installed |= InstallRecordStatus.MISSING
if args.deprecated:
installed |= InstallRecordStatus.DEPRECATED
# Set up query arguments.
installed = []
if not (args.only_missing or args.only_deprecated):
installed.append(InstallStatuses.INSTALLED)
if (args.deprecated or args.only_deprecated) and not args.only_missing:
installed.append(InstallStatuses.DEPRECATED)
if (args.missing or args.only_missing) and not args.only_deprecated:
installed.append(InstallStatuses.MISSING)
predicate_fn = None
if args.unknown:

View File

@@ -8,7 +8,7 @@
import tempfile
import spack.binary_distribution
import spack.mirrors.mirror
import spack.mirror
import spack.paths
import spack.stage
import spack.util.gpg
@@ -217,11 +217,11 @@ def gpg_publish(args):
mirror = None
if args.directory:
url = spack.util.url.path_to_file_url(args.directory)
mirror = spack.mirrors.mirror.Mirror(url, url)
mirror = spack.mirror.Mirror(url, url)
elif args.mirror_name:
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
elif args.mirror_url:
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(

View File

@@ -78,8 +78,8 @@
boxlib @B{dim=2} boxlib built for 2 dimensions
libdwarf @g{%intel} ^libelf@g{%gcc}
libdwarf, built with intel compiler, linked to libelf built with gcc
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
mvapich2, built with gcc compiler, with support for multiple fabrics
mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
mvapich2, built with pgi compiler, with support for multiple fabrics
"""

View File

@@ -11,7 +11,6 @@
import llnl.util.tty.color as color
from llnl.util.tty.colify import colify
import spack.builder
import spack.deptypes as dt
import spack.fetch_strategy as fs
import spack.install_test
@@ -203,13 +202,11 @@ def print_namespace(pkg, args):
def print_phases(pkg, args):
"""output installation phases"""
builder = spack.builder.create(pkg)
if hasattr(builder, "phases") and builder.phases:
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
color.cprint("")
color.cprint(section_title("Installation Phases:"))
phase_str = ""
for phase in builder.phases:
for phase in pkg.builder.phases:
phase_str += " {0}".format(phase)
color.cprint(phase_str)

View File

@@ -291,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
tty.error("'spack install' created no log.")
else:
sys.stderr.write("Full build log:\n")
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
with open(e.pkg.log_path, errors="replace") as log:
shutil.copyfileobj(log, sys.stderr)
@@ -445,7 +445,7 @@ def concrete_specs_from_file(args):
"""Return the list of concrete specs read from files."""
result = []
for file in args.specfiles:
with open(file, "r", encoding="utf-8") as f:
with open(file, "r") as f:
if file.endswith("yaml") or file.endswith("yml"):
s = spack.spec.Spec.from_yaml(f)
else:

View File

@@ -191,7 +191,7 @@ def verify(args):
for relpath in _licensed_files(args):
path = os.path.join(args.root, relpath)
with open(path, encoding="utf-8") as f:
with open(path) as f:
lines = [line for line in f][:license_lines]
error = _check_license(lines, path)

View File

@@ -340,7 +340,7 @@ def list(parser, args):
return
tty.msg("Updating file: %s" % args.update)
with open(args.update, "w", encoding="utf-8") as f:
with open(args.update, "w") as f:
formatter(sorted_packages, f)
elif args.count:

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os
import llnl.util.tty as tty
@@ -15,7 +16,7 @@
import spack.stage
from spack.cmd.common import arguments
description = "print out locations of packages and spack directories"
description = "location = str out locations of packages and spack directories"
section = "basic"
level = "long"
@@ -86,15 +87,11 @@ def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["spec"])
def location(parser, args):
def _location(parser, args):
if args.module_dir:
print(spack.paths.module_path)
return
return spack.paths.module_path
if args.spack_root:
print(spack.paths.prefix)
return
return spack.paths.prefix
# no -e corresponds to False, -e without arg to None, -e name to the string name.
if args.location_env is not False:
if args.location_env is None:
@@ -106,16 +103,13 @@ def location(parser, args):
if not ev.exists(args.location_env):
tty.die("no such environment: '%s'" % args.location_env)
path = ev.root(args.location_env)
print(path)
return
return path
if args.packages:
print(spack.repo.PATH.first_repo().root)
return
return spack.repo.PATH.first_repo().root
if args.stages:
print(spack.stage.get_stage_root())
return
return spack.stage.get_stage_root()
specs = spack.cmd.parse_specs(args.spec)
@@ -129,15 +123,13 @@ def location(parser, args):
if args.install_dir:
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env, first=args.find_first)
print(spec.prefix)
return
return spec.prefix
spec = specs[0]
# Package dir just needs the spec name
if args.package_dir:
print(spack.repo.PATH.dirname_for_package_name(spec.name))
return
return spack.repo.PATH.dirname_for_package_name(spec.name)
# Either concretize or filter from already concretized environment
spec = spack.cmd.matching_spec_from_env(spec)
@@ -145,20 +137,17 @@ def location(parser, args):
builder = spack.builder.create(pkg)
if args.stage_dir:
print(pkg.stage.path)
return
return pkg.stage.path
if args.build_dir:
# Out of source builds have build_directory defined
if hasattr(builder, "build_directory"):
# build_directory can be either absolute or relative to the stage path
# in either case os.path.join makes it absolute
print(os.path.normpath(os.path.join(pkg.stage.path, builder.build_directory)))
return
return os.path.normpath(os.path.join(pkg.stage.path, builder.build_directory))
# Otherwise assume in-source builds
print(pkg.stage.source_path)
return
return pkg.stage.source_path
# source dir remains, which requires the spec to be staged
if not pkg.stage.expanded:
@@ -168,4 +157,15 @@ def location(parser, args):
)
# Default to source dir.
print(pkg.stage.source_path)
return pkg.stage.source_path
# Is this too hacky? I don't want to reproduce the parser for an internal function
def location_emulator(*args):
parser = argparse.ArgumentParser()
setup_parser(parser)
return _location(parser, parser.parse_args(args))
def location(parser, args):
print(_location(parser, args))

View File

@@ -31,7 +31,7 @@ def line_to_rtf(str):
return str.replace("\n", "\\par")
contents = ""
with open(file_path, "r+", encoding="utf-8") as f:
with open(file_path, "r+") as f:
for line in f.readlines():
contents += line_to_rtf(line)
return rtf_header.format(contents)
@@ -93,7 +93,7 @@ def make_installer(parser, args):
rtf_spack_license = txt_to_rtf(spack_license)
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
with open(spack_license, "w", encoding="utf-8") as rtf_license:
with open(spack_license, "w") as rtf_license:
written = rtf_license.write(rtf_spack_license)
if written == 0:
raise RuntimeError("Failed to generate properly formatted license file")

View File

@@ -10,8 +10,7 @@
import spack.cmd
import spack.store
from spack.cmd.common import arguments
from ..enums import InstallRecordStatus
from spack.database import InstallStatuses
description = "mark packages as explicitly or implicitly installed"
section = "admin"
@@ -68,7 +67,8 @@ def find_matching_specs(specs, allow_multiple_matches=False):
has_errors = False
for spec in specs:
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED)
install_query = [InstallStatuses.INSTALLED]
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:

View File

@@ -14,8 +14,7 @@
import spack.concretize
import spack.config
import spack.environment as ev
import spack.mirrors.mirror
import spack.mirrors.utils
import spack.mirror
import spack.repo
import spack.spec
import spack.util.web as web_util
@@ -366,15 +365,15 @@ def mirror_add(args):
connection["autopush"] = args.autopush
if args.signed is not None:
connection["signed"] = args.signed
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
mirror = spack.mirror.Mirror(connection, name=args.name)
else:
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
spack.mirrors.utils.add(mirror, args.scope)
mirror = spack.mirror.Mirror(args.url, name=args.name)
spack.mirror.add(mirror, args.scope)
def mirror_remove(args):
"""remove a mirror by name"""
spack.mirrors.utils.remove(args.name, args.scope)
spack.mirror.remove(args.name, args.scope)
def _configure_mirror(args):
@@ -383,7 +382,7 @@ def _configure_mirror(args):
if args.name not in mirrors:
tty.die(f"No mirror found with name {args.name}.")
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
direction = "fetch" if args.fetch else "push" if args.push else None
changes = {}
if args.url:
@@ -450,7 +449,7 @@ def mirror_set_url(args):
def mirror_list(args):
"""print out available mirrors to the console"""
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
if not mirrors:
tty.msg("No mirrors configured.")
return
@@ -468,7 +467,7 @@ def specs_from_text_file(filename, concretize=False):
concretize (bool): if True concretize the specs before returning
the list.
"""
with open(filename, "r", encoding="utf-8") as f:
with open(filename, "r") as f:
specs_in_file = f.readlines()
specs_in_file = [s.strip() for s in specs_in_file]
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
@@ -490,9 +489,9 @@ def concrete_specs_from_user(args):
def extend_with_additional_versions(specs, num_versions):
if num_versions == "all":
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
mirror_specs = spack.mirror.get_all_versions(specs)
else:
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
mirror_specs = [x.concretized() for x in mirror_specs]
return mirror_specs
@@ -571,7 +570,7 @@ def concrete_specs_from_environment():
def all_specs_with_all_versions():
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version))
return mirror_specs
@@ -660,21 +659,19 @@ def _specs_and_action(args):
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
)
for candidate in mirror_specs:
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
mirror_stats.next_spec(pkg_obj.spec)
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirrors.utils.create(
path, mirror_specs, skip_unstable_versions
)
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
@@ -684,7 +681,7 @@ def mirror_destroy(args):
mirror_url = None
if args.mirror_name:
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
mirror_url = result.push_url
elif args.mirror_url:
mirror_url = args.mirror_url

View File

@@ -8,7 +8,6 @@
import spack.cmd.common.arguments
import spack.cmd.modules
import spack.config
import spack.modules
import spack.modules.lmod

View File

@@ -7,7 +7,6 @@
import spack.cmd.common.arguments
import spack.cmd.modules
import spack.config
import spack.modules
import spack.modules.tcl

View File

@@ -150,7 +150,7 @@ def pkg_source(args):
content = ph.canonical_source(spec)
else:
message = "Source for %s:" % filename
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
content = f.read()
if sys.stdout.isatty():

View File

@@ -94,7 +94,7 @@ def ipython_interpreter(args):
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with open(startup_file, encoding="utf-8") as startup:
with open(startup_file) as startup:
exec(startup.read())
# IPython can also support running a script OR command, not both
@@ -126,7 +126,7 @@ def python_interpreter(args):
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with open(startup_file, encoding="utf-8") as startup:
with open(startup_file) as startup:
console.runsource(startup.read(), startup_file, "exec")
if args.python_command:
propagate_exceptions_from(console)

View File

@@ -82,6 +82,14 @@ def spec(parser, args):
if args.namespaces:
fmt = "{namespace}." + fmt
tree_kwargs = {
"cover": args.cover,
"format": fmt,
"hashlen": None if args.very_long else 7,
"show_types": args.types,
"status_fn": install_status_fn if args.install_status else None,
}
# use a read transaction if we are getting install status for every
# spec in the DAG. This avoids repeatedly querying the DB.
tree_context = lang.nullcontext
@@ -91,35 +99,46 @@ def spec(parser, args):
env = ev.active_environment()
if args.specs:
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
input_specs = spack.cmd.parse_specs(args.specs)
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
specs = list(zip(input_specs, concretized_specs))
elif env:
env.concretize()
concrete_specs = env.concrete_roots()
specs = env.concretized_specs()
if not args.format:
# environments are printed together in a combined tree() invocation,
# except when using --yaml or --json, which we print spec by spec below.
tree_kwargs["key"] = spack.traverse.by_dag_hash
tree_kwargs["hashes"] = args.long or args.very_long
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
return
else:
tty.die("spack spec requires at least one spec or an active environment")
# With --yaml, --json, or --format, just print the raw specs to output
if args.format:
for spec in concrete_specs:
for input, output in specs:
# With --yaml or --json, just print the raw specs to output
if args.format:
if args.format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
elif args.format == "json":
print(spec.to_json(hash=ht.dag_hash))
print(output.to_json(hash=ht.dag_hash))
else:
print(spec.format(args.format))
return
print(output.format(args.format))
continue
with tree_context():
print(
spack.spec.tree(
concrete_specs,
cover=args.cover,
format=fmt,
hashlen=None if args.very_long else 7,
show_types=args.types,
status_fn=install_status_fn if args.install_status else None,
hashes=args.long or args.very_long,
key=spack.traverse.by_dag_hash,
)
)
with tree_context():
# Only show the headers for input specs that are not concrete to avoid
# repeated output. This happens because parse_specs outputs concrete
# specs for `/hash` inputs.
if not input.concrete:
tree_kwargs["hashes"] = False # Always False for input spec
print("Input spec")
print("--------------------------------")
print(input.tree(**tree_kwargs))
print("Concretized")
print("--------------------------------")
tree_kwargs["hashes"] = args.long or args.very_long
print(output.tree(**tree_kwargs))

View File

@@ -19,48 +19,11 @@
level = "long"
class StageFilter:
"""
Encapsulation of reasons to skip staging
"""
def __init__(self, exclusions, skip_installed):
"""
:param exclusions: A list of specs to skip if satisfied.
:param skip_installed: A boolean indicating whether to skip already installed specs.
"""
self.exclusions = exclusions
self.skip_installed = skip_installed
def __call__(self, spec):
"""filter action, true means spec should be filtered"""
if spec.external:
return True
if self.skip_installed and spec.installed:
return True
if any(spec.satisfies(exclude) for exclude in self.exclusions):
return True
return False
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
subparser.add_argument(
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
)
subparser.add_argument(
"-e",
"--exclude",
action="append",
default=[],
help="exclude packages that satisfy the specified specs",
)
subparser.add_argument(
"-s", "--skip-installed", action="store_true", help="dont restage already installed specs"
)
arguments.add_concretizer_args(subparser)
@@ -68,14 +31,11 @@ def stage(parser, args):
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
exclusion_specs = spack.cmd.parse_specs(args.exclude, concretize=False)
filter = StageFilter(exclusion_specs, args.skip_installed)
if not args.specs:
env = ev.active_environment()
if not env:
tty.die("`spack stage` requires a spec or an active environment")
return _stage_env(env, filter)
return _stage_env(env)
specs = spack.cmd.parse_specs(args.specs, concretize=False)
@@ -89,11 +49,6 @@ def stage(parser, args):
specs = spack.cmd.matching_specs_from_env(specs)
for spec in specs:
spec = spack.cmd.matching_spec_from_env(spec)
if filter(spec):
continue
pkg = spec.package
if custom_path:
@@ -102,13 +57,9 @@ def stage(parser, args):
_stage(pkg)
def _stage_env(env: ev.Environment, filter):
def _stage_env(env: ev.Environment):
tty.msg(f"Staging specs from environment {env.name}")
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
if filter(spec):
continue
_stage(spec.package)

View File

@@ -3,21 +3,18 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import ast
import os
import re
import sys
from itertools import zip_longest
from typing import Dict, List, Optional
import llnl.util.tty as tty
import llnl.util.tty.color as color
from llnl.util.filesystem import working_dir
import spack.paths
import spack.repo
import spack.util.git
from spack.util.executable import Executable, which
from spack.util.executable import which
description = "runs source code style checks on spack"
section = "developer"
@@ -39,7 +36,10 @@ def grouper(iterable, n, fillvalue=None):
#: double-check the results of other tools (if, e.g., --fix was provided)
#: The list maps an executable name to a method to ensure the tool is
#: bootstrapped or present in the environment.
tool_names = ["import", "isort", "black", "flake8", "mypy"]
tool_names = ["isort", "black", "flake8", "mypy"]
#: tools we run in spack style
tools = {}
#: warnings to ignore in mypy
mypy_ignores = [
@@ -61,28 +61,14 @@ def is_package(f):
#: decorator for adding tools to the list
class tool:
def __init__(self, name: str, required: bool = False, external: bool = True) -> None:
def __init__(self, name, required=False):
self.name = name
self.external = external
self.required = required
def __call__(self, fun):
self.fun = fun
tools[self.name] = self
tools[self.name] = (fun, self.required)
return fun
@property
def installed(self) -> bool:
return bool(which(self.name)) if self.external else True
@property
def executable(self) -> Optional[Executable]:
return which(self.name) if self.external else None
#: tools we run in spack style
tools: Dict[str, tool] = {}
def changed_files(base="develop", untracked=True, all_files=False, root=None):
"""Get list of changed files in the Spack repository.
@@ -190,22 +176,22 @@ def setup_parser(subparser):
"-t",
"--tool",
action="append",
help="specify which tools to run (default: %s)" % ", ".join(tool_names),
help="specify which tools to run (default: %s)" % ",".join(tool_names),
)
tool_group.add_argument(
"-s",
"--skip",
metavar="TOOL",
action="append",
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
help="specify tools to skip (choose from %s)" % ",".join(tool_names),
)
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
def cwd_relative(path, root, initial_working_dir):
def cwd_relative(path, args):
"""Translate prefix-relative path to current working directory-relative."""
return os.path.relpath(os.path.join(root, path), initial_working_dir)
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
def rewrite_and_print_output(
@@ -215,10 +201,7 @@ def rewrite_and_print_output(
# print results relative to current working directory
def translate(match):
return replacement.format(
cwd_relative(match.group(1), args.root, args.initial_working_dir),
*list(match.groups()[1:]),
)
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
for line in output.split("\n"):
if not line:
@@ -237,7 +220,7 @@ def print_style_header(file_list, args, tools_to_run):
# translate modified paths to cwd_relative if needed
paths = [filename.strip() for filename in file_list]
if not args.root_relative:
paths = [cwd_relative(filename, args.root, args.initial_working_dir) for filename in paths]
paths = [cwd_relative(filename, args) for filename in paths]
tty.msg("Modified files", *paths)
sys.stdout.flush()
@@ -323,6 +306,8 @@ def process_files(file_list, is_args):
rewrite_and_print_output(output, args, pat, replacement)
packages_isort_args = (
"--rm",
"spack",
"--rm",
"spack.pkgkit",
"--rm",
@@ -367,137 +352,17 @@ def run_black(black_cmd, file_list, args):
return returncode
def _module_part(root: str, expr: str):
parts = expr.split(".")
# spack.pkg is for repositories, don't try to resolve it here.
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
return None
while parts:
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
f2 = os.path.join(root, "lib", "spack", *parts, "__init__.py")
if (
os.path.exists(f1)
# ensure case sensitive match
and f"{parts[-1]}.py" in os.listdir(os.path.dirname(f1))
or os.path.exists(f2)
):
return ".".join(parts)
parts.pop()
return None
def _run_import_check(
file_list: List[str],
*,
fix: bool,
root_relative: bool,
root=spack.paths.prefix,
working_dir=spack.paths.prefix,
out=sys.stdout,
):
if sys.version_info < (3, 9):
print("import check requires Python 3.9 or later")
return 0
is_use = re.compile(r"(?<!from )(?<!import )(?:llnl|spack)\.[a-zA-Z0-9_\.]+")
# redundant imports followed by a `# comment` are ignored, cause there can be legimitate reason
# to import a module: execute module scope init code, or to deal with circular imports.
is_abs_import = re.compile(r"^import ((?:llnl|spack)\.[a-zA-Z0-9_\.]+)$", re.MULTILINE)
exit_code = 0
for file in file_list:
to_add = set()
to_remove = []
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
try:
with open(file, "r", encoding="utf-8") as f:
contents = f.read()
parsed = ast.parse(contents)
except Exception:
exit_code = 1
print(f"{pretty_path}: could not parse", file=out)
continue
for m in is_abs_import.finditer(contents):
if contents.count(m.group(1)) == 1:
to_remove.append(m.group(0))
exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
# Clear all strings to avoid matching comments/strings etc.
for node in ast.walk(parsed):
if isinstance(node, ast.Constant) and isinstance(node.value, str):
node.value = ""
filtered_contents = ast.unparse(parsed) # novermin
for m in is_use.finditer(filtered_contents):
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1
print(f"{pretty_path}: missing import: {module} ({m.group(0)})", file=out)
if not fix or not to_add and not to_remove:
continue
with open(file, "r", encoding="utf-8") as f:
lines = f.readlines()
if to_add:
# insert missing imports before the first import, delegate ordering to isort
for node in parsed.body:
if isinstance(node, (ast.Import, ast.ImportFrom)):
first_line = node.lineno
break
else:
print(f"{pretty_path}: could not fix", file=out)
continue
lines.insert(first_line, "\n".join(f"import {x}" for x in to_add) + "\n")
new_contents = "".join(lines)
# remove redundant imports
for statement in to_remove:
new_contents = new_contents.replace(f"{statement}\n", "")
with open(file, "w", encoding="utf-8") as f:
f.write(new_contents)
return exit_code
@tool("import", external=False)
def run_import_check(import_check_cmd, file_list, args):
exit_code = _run_import_check(
file_list,
fix=args.fix,
root_relative=args.root_relative,
root=args.root,
working_dir=args.initial_working_dir,
)
print_tool_result("import", exit_code)
return exit_code
def validate_toolset(arg_value):
"""Validate --tool and --skip arguments (sets of optionally comma-separated tools)."""
tools = set(",".join(arg_value).split(",")) # allow args like 'isort,flake8'
for tool in tools:
if tool not in tool_names:
tty.die("Invalid tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
tty.die("Invaild tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
return tools
def missing_tools(tools_to_run: List[str]) -> List[str]:
return [t for t in tools_to_run if not tools[t].installed]
def missing_tools(tools_to_run):
return [t for t in tools_to_run if which(t) is None]
def _bootstrap_dev_dependencies():
@@ -552,9 +417,9 @@ def prefix_relative(path):
print_style_header(file_list, args, tools_to_run)
for tool_name in tools_to_run:
tool = tools[tool_name]
run_function, required = tools[tool_name]
print_tool_header(tool_name)
return_code |= tool.fun(tool.executable, file_list, args)
return_code |= run_function(which(tool_name), file_list, args)
if return_code == 0:
tty.msg(color.colorize("@*{spack style checks were clean}"))

Some files were not shown because too many files have changed in this diff Show More