Compare commits

..

3 Commits

Author SHA1 Message Date
Todd Gamblin
a8a776b5b7 WIP 2023-10-26 06:10:04 +02:00
Todd Gamblin
3b859363cb add target_names method 2023-10-26 06:10:04 +02:00
Todd Gamblin
e3f3e3943c WIP 2023-10-26 06:10:04 +02:00
1007 changed files with 6932 additions and 25027 deletions

View File

@@ -23,7 +23,7 @@ jobs:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages

View File

@@ -159,9 +159,6 @@ jobs:
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
with:
python-version: "3.12"
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh

View File

@@ -38,11 +38,12 @@ jobs:
# Meaning of the various items in the matrix list
# 0: Container name (e.g. ubuntu-bionic)
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
# 2: Base image (e.g. ubuntu:18.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
@@ -57,20 +58,18 @@ jobs:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: docker/metadata-action@31cebacef4805868f9ce9a0cb03ee36c32df2ac4
id: docker_meta
with:
images: |
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
tags: |
type=schedule,pattern=nightly
type=schedule,pattern=develop
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
- name: Set Container Tag Normal (Nightly)
run: |
container="${{ matrix.dockerfile[0] }}:latest"
echo "container=${container}" >> $GITHUB_ENV
echo "versioned=${container}" >> $GITHUB_ENV
# On a new release create a container with the same tag as the release.
- name: Set Container Tag on Release
if: github.event_name == 'release'
run: |
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
echo "versioned=${versioned}" >> $GITHUB_ENV
- name: Generate the Dockerfile
env:
@@ -93,13 +92,13 @@ jobs:
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -107,18 +106,21 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
tags: |
spack/${{ env.container }}
spack/${{ env.versioned }}
ghcr.io/spack/${{ env.container }}
ghcr.io/spack/${{ env.versioned }}

View File

@@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,7 +1,7 @@
black==23.11.0
black==23.9.1
clingo==5.6.2
flake8==6.1.0
isort==5.12.0
mypy==1.7.1
mypy==1.6.1
types-six==1.16.21.9
vermin==1.6.0
vermin==1.5.2

View File

@@ -54,7 +54,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -101,7 +101,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -159,7 +159,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -194,7 +194,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -215,7 +215,7 @@ jobs:
$(which spack) bootstrap disable spack-install
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,macos

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: '3.11'
cache: 'pip'
@@ -38,7 +38,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: '3.11'
cache: 'pip'

View File

@@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: 3.9
- name: Install Python packages
@@ -42,7 +42,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: 3.9
- name: Install Python packages
@@ -66,7 +66,7 @@ jobs:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
with:
fetch-depth: 0
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,320 +1,3 @@
# v0.21.0 (2023-11-11)
`v0.21.0` is a major feature release.
## Features in this release
1. **Better error messages with condition chaining**
In v0.18, we added better error messages that could tell you what problem happened,
but they couldn't tell you *why* it happened. `0.21` adds *condition chaining* to the
solver, and Spack can now trace back through the conditions that led to an error and
build a tree of causes potential causes and where they came from. For example:
```console
$ spack solve hdf5 ^cmake@3.0.1
==> Error: concretization failed for the following reasons:
1. Cannot satisfy 'cmake@3.0.1'
2. Cannot satisfy 'cmake@3.0.1'
required because hdf5 ^cmake@3.0.1 requested from CLI
3. Cannot satisfy 'cmake@3.18:' and 'cmake@3.0.1
required because hdf5 ^cmake@3.0.1 requested from CLI
required because hdf5 depends on cmake@3.18: when @1.13:
required because hdf5 ^cmake@3.0.1 requested from CLI
4. Cannot satisfy 'cmake@3.12:' and 'cmake@3.0.1
required because hdf5 depends on cmake@3.12:
required because hdf5 ^cmake@3.0.1 requested from CLI
required because hdf5 ^cmake@3.0.1 requested from CLI
```
More details in #40173.
2. **OCI build caches**
You can now use an arbitrary [OCI](https://opencontainers.org) registry as a build
cache:
```console
$ spack mirror add my_registry oci://user/image # Dockerhub
$ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR
$ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login creds
$ spack buildcache push my_registry [specs...]
```
And you can optionally add a base image to get *runnable* images:
```console
$ spack buildcache push --base-image ubuntu:23.04 my_registry python
Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
$ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
```
This creates a container image from the Spack installations on the host system,
without the need to run `spack install` from a `Dockerfile` or `sif` file. It also
addresses the inconvenience of losing binaries of dependencies when `RUN spack
install` fails inside `docker build`.
Further, the container image layers and build cache tarballs are the same files. This
means that `spack install` and `docker pull` use the exact same underlying binaries.
If you previously used `spack install` inside of `docker build`, this feature helps
you save storage by a factor two.
More details in #38358.
3. **Multiple versions of build dependencies**
Increasingly, complex package builds require multiple versions of some build
dependencies. For example, Python packages frequently require very specific versions
of `setuptools`, `cython`, and sometimes different physics packages require different
versions of Python to build. The concretizer enforced that every solve was *unified*,
i.e., that there only be one version of every package. The concretizer now supports
"duplicate" nodes for *build dependencies*, but enforces unification through
transitive link and run dependencies. This will allow it to better resolve complex
dependency graphs in ecosystems like Python, and it also gets us very close to
modeling compilers as proper dependencies.
This change required a major overhaul of the concretizer, as well as a number of
performance optimizations. See #38447, #39621.
4. **Cherry-picking virtual dependencies**
You can now select only a subset of virtual dependencies from a spec that may provide
more. For example, if you want `mpich` to be your `mpi` provider, you can be explicit
by writing:
```
hdf5 ^[virtuals=mpi] mpich
```
Or, if you want to use, e.g., `intel-parallel-studio` for `blas` along with an external
`lapack` like `openblas`, you could write:
```
strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
```
The `virtuals=mpi` is an edge attribute, and dependency edges in Spack graphs now
track which virtuals they satisfied. More details in #17229 and #35322.
Note for packaging: in Spack 0.21 `spec.satisfies("^virtual")` is true if and only if
the package specifies `depends_on("virtual")`. This is different from Spack 0.20,
where depending on a provider implied depending on the virtual provided. See #41002
for an example where `^mkl` was being used to test for several `mkl` providers in a
package that did not depend on `mkl`.
5. **License directive**
Spack packages can now have license metadata, with the new `license()` directive:
```python
license("Apache-2.0")
```
Licenses use [SPDX identifiers](https://spdx.org/licenses), and you can use SPDX
expressions to combine them:
```python
license("Apache-2.0 OR MIT")
```
Like other directives in Spack, it's conditional, so you can handle complex cases like
Spack itself:
```python
license("LGPL-2.1", when="@:0.11")
license("Apache-2.0 OR MIT", when="@0.12:")
```
More details in #39346, #40598.
6. **`spack deconcretize` command**
We are getting close to having a `spack update` command for environments, but we're
not quite there yet. This is the next best thing. `spack deconcretize` gives you
control over what you want to update in an already concrete environment. If you have
an environment built with, say, `meson`, and you want to update your `meson` version,
you can run:
```console
spack deconcretize meson
```
and have everything that depends on `meson` rebuilt the next time you run `spack
concretize`. In a future Spack version, we'll handle all of this in a single command,
but for now you can use this to drop bits of your lockfile and resolve your
dependencies again. More in #38803.
7. **UI Improvements**
The venerable `spack info` command was looking shabby compared to the rest of Spack's
UI, so we reworked it to have a bit more flair. `spack info` now makes much better
use of terminal space and shows variants, their values, and their descriptions much
more clearly. Conditional variants are grouped separately so you can more easily
understand how packages are structured. More in #40998.
`spack checksum` now allows you to filter versions from your editor, or by version
range. It also notifies you about potential download URL changes. See #40403.
8. **Environments can include definitions**
Spack did not previously support using `include:` with The
[definitions](https://spack.readthedocs.io/en/latest/environments.html#spec-list-references)
section of an environment, but now it does. You can use this to curate lists of specs
and more easily reuse them across environments. See #33960.
9. **Aliases**
You can now add aliases to Spack commands in `config.yaml`, e.g. this might enshrine
your favorite args to `spack find` as `spack f`:
```yaml
config:
aliases:
f: find -lv
```
See #17229.
10. **Improved autoloading of modules**
Spack 0.20 was the first release to enable autoloading of direct dependencies in
module files.
The downside of this was that `module avail` and `module load` tab completion would
show users too many modules to choose from, and many users disabled generating
modules for dependencies through `exclude_implicits: true`. Further, it was
necessary to keep hashes in module names to avoid file name clashes.
In this release, you can start using `hide_implicits: true` instead, which exposes
only explicitly installed packages to the user, while still autoloading
dependencies. On top of that, you can safely use `hash_length: 0`, as this config
now only applies to the modules exposed to the user -- you don't have to worry about
file name clashes for hidden dependencies.
Note: for `tcl` this feature requires Modules 4.7 or higher
11. **Updated container labeling**
Nightly Docker images from the `develop` branch will now be tagged as `:develop` and
`:nightly`. The `:latest` tag is no longer associated with `:develop`, but with the
latest stable release. Releases will be tagged with `:{major}`, `:{major}.{minor}`
and `:{major}.{minor}.{patch}`. `ubuntu:18.04` has also been removed from the list of
generated Docker images, as it is no longer supported. See #40593.
## Other new commands and directives
* `spack env activate` without arguments now loads a `default` environment that you do
not have to create (#40756).
* `spack find -H` / `--hashes`: a new shortcut for piping `spack find` output to
other commands (#38663)
* Add `spack checksum --verify`, fix `--add` (#38458)
* New `default_args` context manager factors out common args for directives (#39964)
* `spack compiler find --[no]-mixed-toolchain` lets you easily mix `clang` and
`gfortran` on Linux (#40902)
## Performance improvements
* `spack external find` execution is now much faster (#39843)
* `spack location -i` now much faster on success (#40898)
* Drop redundant rpaths post install (#38976)
* ASP-based solver: avoid cycles in clingo using hidden directive (#40720)
* Fix multiple quadratic complexity issues in environments (#38771)
## Other new features of note
* archspec: update to v0.2.2, support for Sapphire Rapids, Power10, Neoverse V2 (#40917)
* Propagate variants across nodes that don't have that variant (#38512)
* Implement fish completion (#29549)
* Can now distinguish between source/binary mirror; don't ping mirror.spack.io as much (#34523)
* Improve status reporting on install (add [n/total] display) (#37903)
## Windows
This release has the best Windows support of any Spack release yet, with numerous
improvements and much larger swaths of tests passing:
* MSVC and SDK improvements (#37711, #37930, #38500, #39823, #39180)
* Windows external finding: update default paths; treat .bat as executable on Windows (#39850)
* Windows decompression: fix removal of intermediate file (#38958)
* Windows: executable/path handling (#37762)
* Windows build systems: use ninja and enable tests (#33589)
* Windows testing (#36970, #36972, #36973, #36840, #36977, #36792, #36834, #34696, #36971)
* Windows PowerShell support (#39118, #37951)
* Windows symlinking and libraries (#39933, #38599, #34701, #38578, #34701)
## Notable refactors
* User-specified flags take precedence over others in Spack compiler wrappers (#37376)
* Improve setup of build, run, and test environments (#35737, #40916)
* `make` is no longer a required system dependency of Spack (#40380)
* Support Python 3.12 (#40404, #40155, #40153)
* docs: Replace package list with packages.spack.io (#40251)
* Drop Python 2 constructs in Spack (#38720, #38718, #38703)
## Binary cache and stack updates
* e4s arm stack: duplicate and target neoverse v1 (#40369)
* Add macOS ML CI stacks (#36586)
* E4S Cray CI Stack (#37837)
* e4s cray: expand spec list (#38947)
* e4s cray sles ci: expand spec list (#39081)
## Removals, deprecations, and syntax changes
* ASP: targets, compilers and providers soft-preferences are only global (#31261)
* Parser: fix ambiguity with whitespace in version ranges (#40344)
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
* Remove deprecated "extra_instructions" option for containers (#40365)
* Stand-alone test feature deprecation postponed to v0.22 (#40600)
* buildcache push: make `--allow-root` the default and deprecate the option (#38878)
## Notable Bugfixes
* Bugfix: propagation of multivalued variants (#39833)
* Allow `/` in git versions (#39398)
* Fetch & patch: actually acquire stage lock, and many more issues (#38903)
* Environment/depfile: better escaping of targets with Git versions (#37560)
* Prevent "spack external find" to error out on wrong permissions (#38755)
* lmod: allow core compiler to be specified with a version range (#37789)
## Spack community stats
* 7,469 total packages, 303 new since `v0.20.0`
* 150 new Python packages
* 34 new R packages
* 353 people contributed to this release
* 336 committers to packages
* 65 committers to core
# v0.20.3 (2023-10-31)
## Bugfixes
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
# v0.20.2 (2023-10-03)
## Features in this release
Spack now supports Python 3.12 (#40155)
## Bugfixes
- Improve escaping in Tcl module files (#38375)
- Make repo cache work on repositories with zero mtime (#39214)
- Ignore errors for newer, incompatible buildcache version (#40279)
- Print an error when git is required, but missing (#40254)
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
- Fix a bug triggered when file locking fails internally (#39188)
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
- Fix multiple performance regressions in environments (#38771)
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
# v0.20.1 (2023-07-10)
## Spack Bugfixes

View File

@@ -66,11 +66,10 @@ Resources:
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
* [**Github Discussions**](https://github.com/spack/spack/discussions):
for Q&A and discussions. Note the pinned discussions for announcements.
not just for discussions, also Q&A.
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us!
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
only for announcements. Please use other venues for discussions.
Contributing
------------------------

View File

@@ -229,11 +229,3 @@ config:
flags:
# Whether to keep -Werror flags active in package builds.
keep_werror: 'none'
# A mapping of aliases that can be used to define new commands. For instance,
# `sp: spec -I` will define a new command `sp` that will execute `spec` with
# the `-I` argument. Aliases cannot override existing commands.
aliases:
concretise: concretize
containerise: containerize
rm: remove

View File

@@ -50,4 +50,4 @@ packages:
# Apple bundles libuuid in libsystem_c version 1353.100.2,
# although the version number used here isn't critical
- spec: apple-libuuid@1353.100.2
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
prefix: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk

View File

@@ -1526,30 +1526,6 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Explicit binding of virtual dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are packages that provide more than just one virtual dependency. When interacting with them, users
might want to utilize just a subset of what they could provide, and use other providers for virtuals they
need.
It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
special syntax:
.. code-block:: console
$ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
Concretizing the spec above produces the following DAG:
.. figure:: images/strumpack_virtuals.svg
:scale: 60 %
:align: center
where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
and ``blas`` dependencies are satisfied by ``openblas``.
^^^^^^^^^^^^^^^^^^^^^^^^
Specifying Specs by Hash
^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -153,147 +153,18 @@ keyring, and trusting all downloaded keys.
List of popular build caches
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_'
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
-------------------
Build cache signing
-------------------
By default, Spack will add a cryptographic signature to each package pushed to
a build cache, and verifies the signature when installing from a build cache.
Keys for signing can be managed with the :ref:`spack gpg <cmd-spack-gpg>` command,
as well as ``spack buildcache keys`` as mentioned above.
You can disable signing when pushing with ``spack buildcache push --unsigned``,
and disable verification when installing from any build cache with
``spack install --no-check-signature``.
Alternatively, signing and verification can be enabled or disabled on a per build cache
basis:
.. code-block:: console
$ spack mirror add --signed <name> <url> # enable signing and verification
$ spack mirror add --unsigned <name> <url> # disable signing and verification
$ spack mirror set --signed <name> # enable signing and verification for an existing mirror
$ spack mirror set --unsigned <name> # disable signing and verification for an existing mirror
Or you can directly edit the ``mirrors.yaml`` configuration file:
.. code-block:: yaml
mirrors:
<name>:
url: <url>
signed: false # disable signing and verification
See also :ref:`mirrors`.
----------
Relocation
----------
When using buildcaches across different machines, it is likely that the install
root will be different from the one used to build the binaries.
To address this issue, Spack automatically relocates all paths encoded in binaries
and scripts to their new location upon install.
Note that there are some cases where this is not possible: if binaries are built in
a relatively short path, and then installed to a longer path, there may not be enough
space in the binary to encode the new path. In this case, Spack will fail to install
the package from the build cache, and a source build is required.
To reduce the likelihood of this happening, it is highly recommended to add padding to
the install root during the build, as specified in the :ref:`config <config-yaml>`
section of the configuration:
.. code-block:: yaml
config:
install_tree:
root: /opt/spack
padded_length: 128
.. _binary_caches_oci:
-----------------------------------------
OCI / Docker V2 registries as build cache
-----------------------------------------
Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
Github Packages, GitLab Container Registry, JFrog Artifactory, and others
as build caches. This is a convenient way to share binaries using public
infrastructure, or to cache Spack built binaries in Github Actions and
GitLab CI.
To get started, configure an OCI mirror using ``oci://`` as the scheme,
and optionally specify a username and password (or personal access token):
.. code-block:: console
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
Spack follows the naming conventions of Docker, with Dockerhub as the default
registry. To use Dockerhub, you can omit the registry domain:
.. code-block:: console
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
From here, you can use the mirror as any other build cache:
.. code-block:: console
$ spack buildcache push my_registry <specs...> # push to the registry
$ spack install <specs...> # install from the registry
A unique feature of buildcaches on top of OCI registries is that it's incredibly
easy to generate get a runnable container image with the binaries installed. This
is a great way to make applications available to users without requiring them to
install Spack -- all you need is Docker, Podman or any other OCI-compatible container
runtime.
To produce container images, all you need to do is add the ``--base-image`` flag
when pushing to the build cache:
.. code-block:: console
$ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
$ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
root@e4c2b6f6b3f4:/# ninja --version
1.11.1
If ``--base-image`` is not specified, distroless images are produced. In practice,
you won't be able to run these as containers, since they don't come with libc and
other system dependencies. However, they are still compatible with tools like
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
.. note::
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
``max depth exceeded`` error may be produced when pulling the image. There
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
------------------------------------
Spack build cache for GitHub Actions
------------------------------------
To significantly speed up Spack in GitHub Actions, binaries can be cached in
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
repository.
Spack offers a public build cache for GitHub Actions with a set of common packages,
which lets you get started quickly. See the following resources for more information:
* `spack/setup-spack <https://github.com/spack/setup-spack>`_ for setting up Spack in GitHub
Actions
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_ for
more details on the public build cache
Initial build and later installation do not necessarily happen at the same
location. Spack provides a relocation capability and corrects for RPATHs and
non-relocatable scripts. However, many packages compile paths into binary
artifacts directly. In such cases, the build instructions of this package would
need to be adjusted for better re-locatability.
.. _cmd-spack-buildcache:

View File

@@ -37,11 +37,7 @@ to enable reuse for a single installation, and you can use:
spack install --fresh <spec>
to do a fresh install if ``reuse`` is enabled by default.
``reuse: dependencies`` is the default.
.. seealso::
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
``reuse: true`` is the default.
------------------------------------------
Selection of the target microarchitectures
@@ -103,3 +99,551 @@ while `py-numpy` still needs an older version:
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
default behavior is ``duplicates:strategy:minimal``.
.. _build-settings:
================================
Package Settings (packages.yaml)
================================
Spack allows you to customize how your software is built through the
``packages.yaml`` file. Using it, you can make Spack prefer particular
implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
or you can make it prefer to build with particular compilers. You can
also tell Spack to use *external* software installations already
present on your system.
At a high level, the ``packages.yaml`` file is structured like this:
.. code-block:: yaml
packages:
package1:
# settings for package1
package2:
# settings for package2
# ...
all:
# settings that apply to all packages.
So you can either set build preferences specifically for *one* package,
or you can specify that certain settings should apply to *all* packages.
The types of settings you can customize are described in detail below.
Spack's build defaults are in the default
``etc/spack/defaults/packages.yaml`` file. You can override them in
``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
details on how this works, see :ref:`configuration-scopes`.
.. _sec-external-packages:
-----------------
External Packages
-----------------
Spack can be configured to use externally-installed
packages rather than building its own packages. This may be desirable
if machines ship with system packages, such as a customized MPI
that should be used instead of Spack building its own MPI.
External packages are configured through the ``packages.yaml`` file.
Here's an example of an external configuration:
.. code-block:: yaml
packages:
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with GCC,
one built with GCC and debug information, and another built with Intel.
If Spack is asked to build a package that uses one of these MPIs as a
dependency, it will use the pre-installed OpenMPI in
the given directory. Note that the specified path is the top-level
install prefix, not the ``bin`` subdirectory.
``packages.yaml`` can also be used to specify modules to load instead
of the installation prefixes. The following example says that module
``CMake/3.7.2`` provides cmake version 3.7.2.
.. code-block:: yaml
cmake:
externals:
- spec: cmake@3.7.2
modules:
- CMake/3.7.2
Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
by a list of package names. To specify externals, add an ``externals:``
attribute under the package name, which lists externals.
Each external should specify a ``spec:`` string that should be as
well-defined as reasonably possible. If a
package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based
on its most-favored packages, and it may guess incorrectly.
Each package version and compiler listed in an external should
have entries in Spack's packages and compiler configuration, even
though the package and compiler may not ever be built.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Prevent packages from being built from sources
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
but it does not prevent Spack from building packages from sources. In the above example,
Spack might choose for many valid reasons to start building and linking with the
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
To prevent this, the ``packages.yaml`` configuration also allows packages
to be flagged as non-buildable. The previous example could be modified to
be:
.. code-block:: yaml
packages:
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
OpenMPI.
.. note::
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
pre-built specs include specs already available from a local store, an upstream store, a registered
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
external specs in ``packages.yaml`` are included in the list of pre-built specs.
If an external module is specified as not buildable, then Spack will load the
external module into the build environment which can be used for linking.
The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Non-buildable virtual packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Virtual packages in Spack can also be specified as not buildable, and
external implementations can be provided. In the example above,
OpenMPI is configured as not buildable, but Spack will often prefer
other MPI implementations over the externally available OpenMPI. Spack
can be configured with every MPI provider not buildable individually,
but more conveniently:
.. code-block:: yaml
packages:
mpi:
buildable: False
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
Spack can then use any of the listed external implementations of MPI
to satisfy a dependency, and will choose depending on the compiler and
architecture.
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
(available via stores or buildcaches) are not wanted, Spack can be configured to require
specs matching only the available externals:
.. code-block:: yaml
packages:
mpi:
buildable: False
require:
- one_of: [
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
]
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
:ref:`package-requirements`.
.. _cmd-spack-external-find:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Automatically Find External Packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can run the :ref:`spack external find <spack-external-find>` command
to search for system-provided packages and add them to ``packages.yaml``.
After running this command your ``packages.yaml`` may include new entries:
.. code-block:: yaml
packages:
cmake:
externals:
- spec: cmake@3.17.2
prefix: /usr
Generally this is useful for detecting a small set of commonly-used packages;
for now this is generally limited to finding build-only dependencies.
Specific limitations include:
* Packages are not discoverable by default: For a package to be
discoverable with ``spack external find``, it needs to add special
logic. See :ref:`here <make-package-findable>` for more details.
* The logic does not search through module files, it can only detect
packages with executables defined in ``PATH``; you can help Spack locate
externals which use module files by loading any associated modules for
packages that you want Spack to know about before running
``spack external find``.
* Spack does not overwrite existing entries in the package configuration:
If there is an external defined for a spec at any configuration scope,
then Spack will not add a new external entry (``spack config blame packages``
can help locate all external entries).
.. _package-requirements:
--------------------
Package Requirements
--------------------
Spack can be configured to always use certain compilers, package
versions, and variants during concretization through package
requirements.
Package requirements are useful when you find yourself repeatedly
specifying the same constraints on the command line, and wish that
Spack respects these constraints whether you mention them explicitly
or not. Another use case is specifying constraints that should apply
to all root specs in an environment, without having to repeat the
constraint everywhere.
Apart from that, requirements config is more flexible than constraints
on the command line, because it can specify constraints on packages
*when they occur* as a dependency. In contrast, on the command line it
is not possible to specify constraints on dependencies while also keeping
those dependencies optional.
^^^^^^^^^^^^^^^^^^^
Requirements syntax
^^^^^^^^^^^^^^^^^^^
The package requirements configuration is specified in ``packages.yaml``,
keyed by package name and expressed using the Spec syntax. In the simplest
case you can specify attributes that you always want the package to have
by providing a single spec string to ``require``:
.. code-block:: yaml
packages:
libfabric:
require: "@1.13.2"
In the above example, ``libfabric`` will always build with version 1.13.2. If you
need to compose multiple configuration scopes ``require`` accepts a list of
strings:
.. code-block:: yaml
packages:
libfabric:
require:
- "@1.13.2"
- "%gcc"
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
as a compiler.
For more complex use cases, require accepts also a list of objects. These objects
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
and they can optionally have a ``when`` and a ``message`` attribute:
.. code-block:: yaml
packages:
openmpi:
require:
- any_of: ["@4.1.5", "%gcc"]
message: "in this example only 4.1.5 can build with other compilers"
``any_of`` is a list of specs. One of those specs must be satisfied
and it is also allowed for the concretized spec to match more than one.
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
not ``openmpi@3.9%clang``.
If a custom message is provided, and the requirement is not satisfiable,
Spack will print the custom error message:
.. code-block:: console
$ spack spec openmpi@3.9%clang
==> Error: in this example only 4.1.5 can build with other compilers
We could express a similar requirement using the ``when`` attribute:
.. code-block:: yaml
packages:
openmpi:
require:
- any_of: ["%gcc"]
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
constraint:
.. code-block:: yaml
packages:
openmpi:
require:
- spec: "%gcc"
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
This code snippet and the one before it are semantically equivalent.
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
concretized spec must match one and only one of them:
.. code-block:: yaml
packages:
mpich:
require:
- one_of: ["+cuda", "+rocm"]
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
.. note::
For ``any_of`` and ``one_of``, the order of specs indicates a
preference: items that appear earlier in the list are preferred
(note that these preferences can be ignored in favor of others).
.. note::
When using a conditional requirement, Spack is allowed to actively avoid the triggering
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
the optimization criteria. To check the current optimization criteria and their
priorities you can run ``spack solve zlib``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting default requirements
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can also set default requirements for all packages under ``all``
like this:
.. code-block:: yaml
packages:
all:
require: '%clang'
which means every spec will be required to use ``clang`` as a compiler.
Note that in this case ``all`` represents a *default set of requirements* -
if there are specific package requirements, then the default requirements
under ``all`` are disregarded. For example, with a configuration like this:
.. code-block:: yaml
packages:
all:
require: '%clang'
cmake:
require: '%gcc'
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
dependencies) to use ``clang``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting requirements on virtual specs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
This can be useful for fixing which virtual provider you want to use:
.. code-block:: yaml
packages:
mpi:
require: 'mvapich2 %gcc'
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
Requirements on the virtual spec and on the specific provider are both applied, if
present. For instance with a configuration like:
.. code-block:: yaml
packages:
mpi:
require: 'mvapich2 %gcc'
mvapich2:
require: '~cuda'
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-preferences:
-------------------
Package Preferences
-------------------
In some cases package requirements can be too strong, and package
preferences are the better option. Package preferences do not impose
constraints on packages for particular versions or variants values,
they rather only set defaults -- the concretizer is free to change
them if it must due to other constraints. Also note that package
preferences are of lower priority than reuse of already installed
packages.
Here's an example ``packages.yaml`` file that sets preferred packages:
.. code-block:: yaml
packages:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
all:
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
target: [sandybridge]
providers:
mpi: [mvapich2, mpich, openmpi]
At a high level, this example is specifying how packages are preferably
concretized. The opencv package should prefer using GCC 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich2 for
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
Package preferences accept the follow keys or components under
the specific package (or ``all``) section: ``compiler``, ``variants``,
``version``, ``providers``, and ``target``. Each component has an
ordered list of spec ``constraints``, with earlier entries in the
list being preferred over later entries.
Sometimes a package installation may have constraints that forbid
the first concretization rule, in which case Spack will use the first
legal concretization rule. Going back to the example, if a user
requests gperftools 2.3 or later, then Spack will install version 2.4
as the 2.4 version of gperftools is preferred over 2.3.
An explicit concretization rule in the preferred section will always
take preference over unlisted concretizations. In the above example,
xlc isn't listed in the compiler list. Every listed compiler from
gcc to pgi will thus be preferred over the xlc compiler.
The syntax for the ``provider`` section differs slightly from other
concretization rules. A provider lists a value that packages may
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
dependency.
.. _package_permissions:
-------------------
Package Permissions
-------------------
Spack can be configured to assign permissions to the files installed
by a package.
In the ``packages.yaml`` file under ``permissions``, the attributes
``read``, ``write``, and ``group`` control the package
permissions. These attributes can be set per-package, or for all
packages under ``all``. If permissions are set under ``all`` and for a
specific package, the package-specific settings take precedence.
The ``read`` and ``write`` attributes take one of ``user``, ``group``,
and ``world``.
.. code-block:: yaml
packages:
all:
permissions:
write: group
group: spack
my_app:
permissions:
read: group
group: my_team
The permissions settings describe the broadest level of access to
installations of the specified packages. The execute permissions of
the file are set to the same level as read permissions for those files
that are executable. The default setting for ``read`` is ``world``,
and for ``write`` is ``user``. In the example above, installations of
``my_app`` will be installed with user and group permissions but no
world permissions, and owned by the group ``my_team``. All other
packages will be installed with user and group write privileges, and
world read privileges. Those packages will be owned by the group
``spack``.
The ``group`` attribute assigns a Unix-style group to a package. All
files installed by the package will be owned by the assigned group,
and the sticky group bit will be set on the install prefix and all
directories inside the install prefix. This will ensure that even
manually placed files within the install prefix are owned by the
assigned group. If no group is assigned, Spack will allow the OS
default behavior to go as expected.
----------------------------
Assigning Package Attributes
----------------------------
You can assign class-level attributes in the configuration:
.. code-block:: yaml
packages:
mpileaks:
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
Attributes set this way will be accessible to any method executed
in the package.py file (e.g. the ``install()`` method). Values for these
attributes may be any value parseable by yaml.
These can only be applied to specific packages, not "all" or
virtual packages.

View File

@@ -82,7 +82,7 @@ class already contains:
.. code-block:: python
depends_on("cmake", type="build")
depends_on('cmake', type='build')
If you need to specify a particular version requirement, you can
@@ -90,7 +90,7 @@ override this in your package:
.. code-block:: python
depends_on("cmake@2.8.12:", type="build")
depends_on('cmake@2.8.12:', type='build')
^^^^^^^^^^^^^^^^^^^
@@ -137,10 +137,10 @@ and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
def cmake_args(self):
args = [
"-DWHATEVER:STRING=somevalue",
self.define("ENABLE_BROKEN_FEATURE", False),
self.define_from_variant("DETECT_HDF5", "hdf5"),
self.define_from_variant("THREADS"), # True if +threads
'-DWHATEVER:STRING=somevalue',
self.define('ENABLE_BROKEN_FEATURE', False),
self.define_from_variant('DETECT_HDF5', 'hdf5'),
self.define_from_variant('THREADS'), # True if +threads
]
return args
@@ -151,10 +151,10 @@ and CMake simply ignores the empty command line argument. For example the follow
.. code-block:: python
variant("example", default=True, when="@2.0:")
variant('example', default=True, when='@2.0:')
def cmake_args(self):
return [self.define_from_variant("EXAMPLE", "example")]
return [self.define_from_variant('EXAMPLE', 'example')]
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
@@ -193,9 +193,9 @@ a variant to control this:
.. code-block:: python
variant("build_type", default="RelWithDebInfo",
description="CMake build type",
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"))
variant('build_type', default='RelWithDebInfo',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are
@@ -205,9 +205,9 @@ package overrides the default variant with:
.. code-block:: python
variant("build_type", default="DebugRelease",
description="The build type to build",
values=("Debug", "Release", "DebugRelease"))
variant('build_type', default='DebugRelease',
description='The build type to build',
values=('Debug', 'Release', 'DebugRelease'))
For more information on ``CMAKE_BUILD_TYPE``, see:
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
.. code-block:: python
generator = "Ninja"
generator = 'Ninja'
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
@@ -258,7 +258,7 @@ Ninja generator, make sure to add:
.. code-block:: python
depends_on("ninja", type="build")
depends_on('ninja', type='build')
to the package as well. Aside from that, you shouldn't need to do
anything else. Spack will automatically detect that you are using
@@ -288,7 +288,7 @@ like so:
.. code-block:: python
root_cmakelists_dir = "src"
root_cmakelists_dir = 'src'
Note that this path is relative to the root of the extracted tarball,
@@ -304,7 +304,7 @@ different sub-directory, simply override ``build_directory`` like so:
.. code-block:: python
build_directory = "my-build"
build_directory = 'my-build'
^^^^^^^^^^^^^^^^^^^^^^^^^
Build and install targets
@@ -324,8 +324,8 @@ library or build the documentation, you can add these like so:
.. code-block:: python
build_targets = ["all", "docs"]
install_targets = ["install", "docs"]
build_targets = ['all', 'docs']
install_targets = ['install', 'docs']
^^^^^^^
Testing

View File

@@ -53,24 +53,18 @@ Install the oneAPI compilers::
Add the compilers to your ``compilers.yaml`` so spack can use them::
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
Verify that the compilers are available::
spack compiler list
Note that 2024 and later releases do not include ``icc``. Before 2024,
the package layout was different::
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
The ``intel-oneapi-compilers`` package includes 2 families of
compilers:
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
compilers. 2024 and later releases contain ``ifort``, but not
``icc`` and ``icpc``.
compilers.
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
compilers based on LLVM.
@@ -95,8 +89,8 @@ Install the oneAPI compilers::
Add the compilers to your ``compilers.yaml`` so Spack can use them::
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
Verify that the compilers are available::
@@ -152,7 +146,8 @@ Compilers
To use the compilers, add some information about the installation to
``compilers.yaml``. For most users, it is sufficient to do::
spack compiler add /opt/intel/oneapi/compiler/latest/bin
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
Adapt the paths above if you did not install the tools in the default
location. After adding the compilers, using them is the same
@@ -161,12 +156,6 @@ Another option is to manually add the configuration to
``compilers.yaml`` as described in :ref:`Compiler configuration
<compiler-config>`.
Before 2024, the directory structure was different::
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
Libraries
---------

View File

@@ -392,7 +392,7 @@ See section
:ref:`Configuration Scopes <configuration-scopes>`
for an explanation about the different files
and section
:ref:`Build customization <packages-config>`
:ref:`Build customization <build-settings>`
for specifics and examples for ``packages.yaml`` files.
.. If your system administrator did not provide modules for pre-installed Intel
@@ -934,9 +934,9 @@ a *virtual* ``mkl`` package is declared in Spack.
.. code-block:: python
# Examples for absolute and conditional dependencies:
depends_on("mkl")
depends_on("mkl", when="+mkl")
depends_on("mkl", when="fftw=mkl")
depends_on('mkl')
depends_on('mkl', when='+mkl')
depends_on('mkl', when='fftw=mkl')
The ``MKLROOT`` environment variable (part of the documented API) will be set
during all stages of client package installation, and is available to both
@@ -972,8 +972,8 @@ a *virtual* ``mkl`` package is declared in Spack.
def configure_args(self):
args = []
...
args.append("--with-blas=%s" % self.spec["blas"].libs.ld_flags)
args.append("--with-lapack=%s" % self.spec["lapack"].libs.ld_flags)
args.append('--with-blas=%s' % self.spec['blas'].libs.ld_flags)
args.append('--with-lapack=%s' % self.spec['lapack'].libs.ld_flags)
...
.. tip::
@@ -989,13 +989,13 @@ a *virtual* ``mkl`` package is declared in Spack.
.. code-block:: python
self.spec["blas"].headers.include_flags
self.spec['blas'].headers.include_flags
and to generate linker options (``-L<dir> -llibname ...``), use the same as above,
.. code-block:: python
self.spec["blas"].libs.ld_flags
self.spec['blas'].libs.ld_flags
See
:ref:`MakefilePackage <makefilepackage>`

View File

@@ -88,7 +88,7 @@ override the ``luarocks_args`` method like so:
.. code-block:: python
def luarocks_args(self):
return ["flag1", "flag2"]
return ['flag1', 'flag2']
One common use of this is to override warnings or flags for newer compilers, as in:

View File

@@ -48,8 +48,8 @@ class automatically adds the following dependencies:
.. code-block:: python
depends_on("java", type=("build", "run"))
depends_on("maven", type="build")
depends_on('java', type=('build', 'run'))
depends_on('maven', type='build')
In the ``pom.xml`` file, you may see sections like:
@@ -72,8 +72,8 @@ should add:
.. code-block:: python
depends_on("java@7:", type="build")
depends_on("maven@3.5.4:", type="build")
depends_on('java@7:', type='build')
depends_on('maven@3.5.4:', type='build')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -88,9 +88,9 @@ the build phase. For example:
def build_args(self):
return [
"-Pdist,native",
"-Dtar",
"-Dmaven.javadoc.skip=true"
'-Pdist,native',
'-Dtar',
'-Dmaven.javadoc.skip=true'
]

View File

@@ -86,8 +86,8 @@ the ``MesonPackage`` base class already contains:
.. code-block:: python
depends_on("meson", type="build")
depends_on("ninja", type="build")
depends_on('meson', type='build')
depends_on('ninja', type='build')
If you need to specify a particular version requirement, you can
@@ -95,8 +95,8 @@ override this in your package:
.. code-block:: python
depends_on("meson@0.43.0:", type="build")
depends_on("ninja", type="build")
depends_on('meson@0.43.0:', type='build')
depends_on('ninja', type='build')
^^^^^^^^^^^^^^^^^^^
@@ -121,7 +121,7 @@ override the ``meson_args`` method like so:
.. code-block:: python
def meson_args(self):
return ["--warnlevel=3"]
return ['--warnlevel=3']
This method can be used to pass flags as well as variables.

View File

@@ -118,7 +118,7 @@ so ``PerlPackage`` contains:
.. code-block:: python
extends("perl")
extends('perl')
If your package requires a specific version of Perl, you should
@@ -132,14 +132,14 @@ properly. If your package uses ``Makefile.PL`` to build, add:
.. code-block:: python
depends_on("perl-extutils-makemaker", type="build")
depends_on('perl-extutils-makemaker', type='build')
If your package uses ``Build.PL`` to build, add:
.. code-block:: python
depends_on("perl-module-build", type="build")
depends_on('perl-module-build', type='build')
^^^^^^^^^^^^^^^^^
@@ -165,11 +165,11 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
.. code-block:: python
def configure_args(self):
expat = self.spec["expat"].prefix
expat = self.spec['expat'].prefix
return [
"EXPATLIBPATH={0}".format(expat.lib),
"EXPATINCPATH={0}".format(expat.include),
'EXPATLIBPATH={0}'.format(expat.lib),
'EXPATINCPATH={0}'.format(expat.include),
]

View File

@@ -83,7 +83,7 @@ base class already contains:
.. code-block:: python
depends_on("qt", type="build")
depends_on('qt', type='build')
If you want to specify a particular version requirement, or need to
@@ -91,7 +91,7 @@ link to the ``qt`` libraries, you can override this in your package:
.. code-block:: python
depends_on("qt@5.6.0:")
depends_on('qt@5.6.0:')
^^^^^^^^^^^^^^^^^^^^^^^^^^
Passing arguments to qmake
@@ -103,7 +103,7 @@ override the ``qmake_args`` method like so:
.. code-block:: python
def qmake_args(self):
return ["-recursive"]
return ['-recursive']
This method can be used to pass flags as well as variables.
@@ -118,7 +118,7 @@ sub-directory by adding the following to the package:
.. code-block:: python
build_directory = "src"
build_directory = 'src'
^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -163,28 +163,28 @@ attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
.. code-block:: python
cran = "caret"
cran = 'caret'
is equivalent to:
.. code-block:: python
homepage = "https://cloud.r-project.org/package=caret"
url = "https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/caret"
homepage = 'https://cloud.r-project.org/package=caret'
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
Likewise, the following ``bioc`` attribute:
.. code-block:: python
bioc = "BiocVersion"
bioc = 'BiocVersion'
is equivalent to:
.. code-block:: python
homepage = "https://bioconductor.org/packages/BiocVersion/"
git = "https://git.bioconductor.org/packages/BiocVersion"
homepage = 'https://bioconductor.org/packages/BiocVersion/'
git = 'https://git.bioconductor.org/packages/BiocVersion'
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -200,7 +200,7 @@ base class contains:
.. code-block:: python
extends("r")
extends('r')
Take a close look at the homepage for ``caret``. If you look at the
@@ -209,7 +209,7 @@ You should add this to your package like so:
.. code-block:: python
depends_on("r@3.2.0:", type=("build", "run"))
depends_on('r@3.2.0:', type=('build', 'run'))
^^^^^^^^^^^^^^
@@ -227,7 +227,7 @@ and list all of their dependencies in the following sections:
* LinkingTo
As far as Spack is concerned, all 3 of these dependency types
correspond to ``type=("build", "run")``, so you don't have to worry
correspond to ``type=('build', 'run')``, so you don't have to worry
about the details. If you are curious what they mean,
https://github.com/spack/spack/issues/2951 has a pretty good summary:
@@ -330,7 +330,7 @@ the dependency:
.. code-block:: python
depends_on("r-lattice@0.20:", type=("build", "run"))
depends_on('r-lattice@0.20:', type=('build', 'run'))
^^^^^^^^^^^^^^^^^^
@@ -361,20 +361,20 @@ like so:
.. code-block:: python
def configure_args(self):
mpi_name = self.spec["mpi"].name
mpi_name = self.spec['mpi'].name
# The type of MPI. Supported values are:
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
if mpi_name == "openmpi":
Rmpi_type = "OPENMPI"
elif mpi_name == "mpich":
Rmpi_type = "MPICH2"
if mpi_name == 'openmpi':
Rmpi_type = 'OPENMPI'
elif mpi_name == 'mpich':
Rmpi_type = 'MPICH2'
else:
raise InstallError("Unsupported MPI type")
raise InstallError('Unsupported MPI type')
return [
"--with-Rmpi-type={0}".format(Rmpi_type),
"--with-mpi={0}".format(spec["mpi"].prefix),
'--with-Rmpi-type={0}'.format(Rmpi_type),
'--with-mpi={0}'.format(spec['mpi'].prefix),
]

View File

@@ -84,8 +84,8 @@ The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
summary = "An implementation of the AsciiDoc text processor and publishing toolchain"
description = "A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats."
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
Either of these can be used for the description of the Spack package.
@@ -98,7 +98,7 @@ The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
homepage = "https://asciidoctor.org"
homepage = 'https://asciidoctor.org'
This should be used as the official homepage of the Spack package.
@@ -112,21 +112,21 @@ the base class contains:
.. code-block:: python
extends("ruby")
extends('ruby')
The ``*.gemspec`` file may contain something like:
.. code-block:: ruby
required_ruby_version = ">= 2.3.0"
required_ruby_version = '>= 2.3.0'
This can be added to the Spack package using:
.. code-block:: python
depends_on("ruby@2.3.0:", type=("build", "run"))
depends_on('ruby@2.3.0:', type=('build', 'run'))
^^^^^^^^^^^^^^^^^

View File

@@ -124,7 +124,7 @@ are wrong, you can provide the names yourself by overriding
.. code-block:: python
import_modules = ["PyQt5"]
import_modules = ['PyQt5']
These tests often catch missing dependencies and non-RPATHed

View File

@@ -63,8 +63,8 @@ run package-specific unit tests.
.. code-block:: python
def installtest(self):
with working_dir("test"):
pytest = which("py.test")
with working_dir('test'):
pytest = which('py.test')
pytest()
@@ -93,7 +93,7 @@ the following dependency automatically:
.. code-block:: python
depends_on("python@2.5:", type="build")
depends_on('python@2.5:', type='build')
Waf only supports Python 2.5 and up.
@@ -113,7 +113,7 @@ phase, you can use:
args = []
if self.run_tests:
args.append("--test")
args.append('--test')
return args

View File

@@ -204,7 +204,6 @@ def setup(sphinx):
("py:class", "clingo.Control"),
("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@@ -304,17 +304,3 @@ To work properly, this requires your terminal to reset its title after
Spack has finished its work, otherwise Spack's status information will
remain in the terminal's title indefinitely. Most terminals should already
be set up this way and clear Spack's status information.
-----------
``aliases``
-----------
Aliases can be used to define new Spack commands. They can be either shortcuts
for longer commands or include specific arguments for convenience. For instance,
if users want to use ``spack install``'s ``-v`` argument all the time, they can
create a new alias called ``inst`` that will always call ``install -v``:
.. code-block:: yaml
aliases:
inst: install -v

View File

@@ -17,7 +17,7 @@ case you want to skip directly to specific docs:
* :ref:`config.yaml <config-yaml>`
* :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <packages-config>`
* :ref:`packages.yaml <build-settings>`
* :ref:`repos.yaml <repositories>`
You can also add any of these as inline configuration in the YAML
@@ -243,11 +243,9 @@ lower-precedence settings. Completely ignoring higher-level configuration
options is supported with the ``::`` notation for keys (see
:ref:`config-overrides` below).
There are also special notations for string concatenation and precendense override:
* ``+:`` will force *prepending* strings or lists. For lists, this is the default behavior.
* ``-:`` works similarly, but for *appending* values.
There are also special notations for string concatenation and precendense override.
Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
:ref:`config-prepend-append`
^^^^^^^^^^^

View File

@@ -24,16 +24,6 @@ image, or to set up a proper entrypoint to run the image. These tasks are
usually both necessary and repetitive, so Spack comes with a command
to generate recipes for container images starting from a ``spack.yaml``.
.. seealso::
This page is a reference for generating recipes to build container images.
It means that your environment is built from scratch inside the container
runtime.
Since v0.21, Spack can also create container images from existing package installations
on your host system. See :ref:`binary_caches_oci` for more information on
that topic.
--------------------
A Quick Introduction
--------------------

View File

@@ -9,42 +9,46 @@
Custom Extensions
=================
*Spack extensions* allow you to extend Spack capabilities by deploying your
*Spack extensions* permit you to extend Spack capabilities by deploying your
own custom commands or logic in an arbitrary location on your filesystem.
This might be extremely useful e.g. to develop and maintain a command whose purpose is
too specific to be considered for reintegration into the mainline or to
evolve a command through its early stages before starting a discussion to merge
it upstream.
From Spack's point of view an extension is any path in your filesystem which
respects the following naming and layout for files:
respects a prescribed naming and layout for files:
.. code-block:: console
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
├── pytest.ini # Optional file if the extension ships its own tests
├── scripting # Folder that may contain modules that are needed for the extension commands
│   ── cmd # Folder containing extension commands
│   │   └── filter.py # A new command that will be available
│   └── functions.py # Module with internal details
└── tests # Tests for this extension
│   ── cmd # Folder containing extension commands
│   └── filter.py # A new command that will be available
├── tests # Tests for this extension
│ ├── conftest.py
│ └── test_filter.py
└── templates # Templates that may be needed by the extension
In the example above, the extension is named *scripting*. It adds an additional command
(``spack filter``) and unit tests to verify its behavior.
In the example above the extension named *scripting* adds an additional command (``filter``)
and unit tests to verify its behavior. The code for this example can be
obtained by cloning the corresponding git repository:
The extension can import any core Spack module in its implementation. When loaded by
the ``spack`` command, the extension itself is imported as a Python package in the
``spack.extensions`` namespace. In the example above, since the extension is named
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
The code for this example extension can be obtained by cloning the corresponding git repository:
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
.. code-block:: console
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
$ cd ~/
$ mkdir tmp && cd tmp
$ git clone https://github.com/alalazo/spack-scripting.git
Cloning into 'spack-scripting'...
remote: Counting objects: 11, done.
remote: Compressing objects: 100% (7/7), done.
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
Receiving objects: 100% (11/11), done.
As you can see by inspecting the sources, Python modules that are part of the extension
can import any core Spack module.
---------------------------------
Configure Spack to Use Extensions
@@ -57,7 +61,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
config:
extensions:
- /tmp/spack-scripting
- ~/tmp/spack-scripting
is part of your configuration file. Once this is setup any command that the extension provides
will be available from the command line:
@@ -82,32 +86,37 @@ will be available from the command line:
--implicit select specs that are not installed or were installed implicitly
--output OUTPUT where to dump the result
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
The corresponding unit tests can be run giving the appropriate options
to ``spack unit-test``:
.. code-block:: console
$ spack unit-test --extension=scripting
========================================== test session starts ===========================================
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
rootdir: /home/culpo/github/spack-scripting
configfile: pytest.ini
testpaths: tests
plugins: xdist-3.5.0
============================================================== test session starts ===============================================================
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
collected 5 items
tests/test_filter.py ..... [100%]
tests/test_filter.py ...XX
============================================================ short test summary info =============================================================
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
========================================== slowest 30 durations ==========================================
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
(5 durations < 0.005s hidden. Use -vv to show these durations.)
=========================================== 5 passed in 5.06s ============================================
=========================================================== slowest 20 test durations ============================================================
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================

View File

@@ -1,77 +0,0 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
==========================
Frequently Asked Questions
==========================
This page contains answers to frequently asked questions about Spack.
If you have questions that are not answered here, feel free to ask on
`Slack <https://slack.spack.io>`_ or `GitHub Discussions
<https://github.com/spack/spack/discussions>`_. If you've learned the
answer to a question that you think should be here, please consider
contributing to this page.
.. _faq-concretizer-precedence:
-----------------------------------------------------
Why does Spack pick particular versions and variants?
-----------------------------------------------------
This question comes up in a variety of forms:
1. Why does Spack seem to ignore my package preferences from ``packages.yaml`` config?
2. Why does Spack toggle a variant instead of using the default from the ``package.py`` file?
The short answer is that Spack always picks an optimal configuration
based on a complex set of criteria\ [#f1]_. These criteria are more nuanced
than always choosing the latest versions or default variants.
.. note::
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
The following set of criteria (from lowest to highest precedence) explain
common cases where concretization output may seem surprising at first.
1. :ref:`Package preferences <package-preferences>` configured in ``packages.yaml``
override variant defaults from ``package.py`` files, and influence the optimal
ordering of versions. Preferences are specified as follows:
.. code-block:: yaml
packages:
foo:
version: [1.0, 1.1]
variants: ~mpi
2. :ref:`Reuse concretization <concretizer-options>` configured in ``concretizer.yaml``
overrides preferences, since it's typically faster to reuse an existing spec than to
build a preferred one from sources. When build caches are enabled, specs may be reused
from a remote location too. Reuse concretization is configured as follows:
.. code-block:: yaml
concretizer:
reuse: dependencies # other options are 'true' and 'false'
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
and constraints from the command line as well as ``package.py`` files override all
of the above. Requirements are specified as follows:
.. code-block:: yaml
packages:
foo:
require:
- "@1.2: +mpi"
Requirements and constraints restrict the set of possible solutions, while reuse
behavior and preferences influence what an optimal solution looks like.
.. rubric:: Footnotes
.. [#f1] The exact list of criteria can be retrieved with the ``spack solve`` command

View File

@@ -111,28 +111,3 @@ CUDA is split into fewer components and is simpler to specify:
prefix: /opt/cuda/cuda-11.0.2/
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
-----------------------------------
Using an External OpenGL API
-----------------------------------
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
.. code-block:: yaml
packages:
libglx:
require: [opengl]
opengl:
buildable: false
externals:
- prefix: /usr/
spec: opengl@4.6
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 296 KiB

View File

@@ -1,534 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><!-- Generated by graphviz version 2.40.1 (20161225.0304)
--><!-- Title: G Pages: 1 --><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="3044pt" height="1683pt" viewBox="0.00 0.00 3043.65 1682.80">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1678.8)">
<title>G</title>
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1678.8 3039.6456,-1678.8 3039.6456,4 -4,4"/>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="node1" class="node">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2407.965,-1198.3002C2407.965,-1198.3002 1948.1742,-1198.3002 1948.1742,-1198.3002 1942.1742,-1198.3002 1936.1742,-1192.3002 1936.1742,-1186.3002 1936.1742,-1186.3002 1936.1742,-1123.6998 1936.1742,-1123.6998 1936.1742,-1117.6998 1942.1742,-1111.6998 1948.1742,-1111.6998 1948.1742,-1111.6998 2407.965,-1111.6998 2407.965,-1111.6998 2413.965,-1111.6998 2419.965,-1117.6998 2419.965,-1123.6998 2419.965,-1123.6998 2419.965,-1186.3002 2419.965,-1186.3002 2419.965,-1192.3002 2413.965,-1198.3002 2407.965,-1198.3002"/>
<text text-anchor="middle" x="2178.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt</text>
</g>
<!-- o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="node8" class="node">
<title>o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M901.2032,-1039.5002C901.2032,-1039.5002 486.936,-1039.5002 486.936,-1039.5002 480.936,-1039.5002 474.936,-1033.5002 474.936,-1027.5002 474.936,-1027.5002 474.936,-964.8998 474.936,-964.8998 474.936,-958.8998 480.936,-952.8998 486.936,-952.8998 486.936,-952.8998 901.2032,-952.8998 901.2032,-952.8998 907.2032,-952.8998 913.2032,-958.8998 913.2032,-964.8998 913.2032,-964.8998 913.2032,-1027.5002 913.2032,-1027.5002 913.2032,-1033.5002 907.2032,-1039.5002 901.2032,-1039.5002"/>
<text text-anchor="middle" x="694.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">openblas@0.3.21%gcc@9.4.0/o524geb</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge10" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1936.1981,-1113.832C1933.0949,-1113.4088 1930.0059,-1112.9948 1926.9392,-1112.5915 1575.405,-1066.3348 1485.3504,-1074.0879 1131.9752,-1040.5955 1064.2267,-1034.1713 990.6114,-1026.9648 923.4066,-1020.2975"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1936.4684,-1111.8504C1933.3606,-1111.4265 1930.2716,-1111.0125 1927.2,-1110.6085 1575.2335,-1064.3422 1485.1789,-1072.0953 1132.164,-1038.6045 1064.4216,-1032.1808 990.8062,-1024.9744 923.604,-1018.3073"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="923.505,-1015.7853 913.2081,-1018.2801 922.8133,-1022.751 923.505,-1015.7853"/>
<text text-anchor="middle" x="1368.79" y="-1067.6346" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="node23" class="node">
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2767.3081,-1039.5002C2767.3081,-1039.5002 2166.8311,-1039.5002 2166.8311,-1039.5002 2160.8311,-1039.5002 2154.8311,-1033.5002 2154.8311,-1027.5002 2154.8311,-1027.5002 2154.8311,-964.8998 2154.8311,-964.8998 2154.8311,-958.8998 2160.8311,-952.8998 2166.8311,-952.8998 2166.8311,-952.8998 2767.3081,-952.8998 2767.3081,-952.8998 2773.3081,-952.8998 2779.3081,-958.8998 2779.3081,-964.8998 2779.3081,-964.8998 2779.3081,-1027.5002 2779.3081,-1027.5002 2779.3081,-1033.5002 2773.3081,-1039.5002 2767.3081,-1039.5002"/>
<text text-anchor="middle" x="2467.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge29" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2256.5586,-1110.7308C2294.3103,-1089.9869 2339.6329,-1065.083 2378.4976,-1043.7276"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2257.5217,-1112.4836C2295.2735,-1091.7397 2340.5961,-1066.8358 2379.4607,-1045.4804"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2381.116,-1047.4235 2388.1946,-1039.5403 2377.745,-1041.2886 2381.116,-1047.4235"/>
<text text-anchor="middle" x="2286.6606" y="-1079.8414" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="node27" class="node">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1539.1928,-1039.5002C1539.1928,-1039.5002 1152.9464,-1039.5002 1152.9464,-1039.5002 1146.9464,-1039.5002 1140.9464,-1033.5002 1140.9464,-1027.5002 1140.9464,-1027.5002 1140.9464,-964.8998 1140.9464,-964.8998 1140.9464,-958.8998 1146.9464,-952.8998 1152.9464,-952.8998 1152.9464,-952.8998 1539.1928,-952.8998 1539.1928,-952.8998 1545.1928,-952.8998 1551.1928,-958.8998 1551.1928,-964.8998 1551.1928,-964.8998 1551.1928,-1027.5002 1551.1928,-1027.5002 1551.1928,-1033.5002 1545.1928,-1039.5002 1539.1928,-1039.5002"/>
<text text-anchor="middle" x="1346.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">cmake@3.25.1%gcc@9.4.0/gguve5i</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge17" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1950.9968,-1111.6597C1829.5529,-1088.4802 1680.8338,-1060.0949 1561.2457,-1037.2697"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.7091,-1033.795 1551.2303,-1035.3581 1560.3967,-1040.6709 1561.7091,-1033.795"/>
</g>
<!-- i4avrindvhcamhurzbfdaggbj2zgsrrh -->
<g id="node2" class="node">
<title>i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1536.3649,-86.7002C1536.3649,-86.7002 1155.7743,-86.7002 1155.7743,-86.7002 1149.7743,-86.7002 1143.7743,-80.7002 1143.7743,-74.7002 1143.7743,-74.7002 1143.7743,-12.0998 1143.7743,-12.0998 1143.7743,-6.0998 1149.7743,-.0998 1155.7743,-.0998 1155.7743,-.0998 1536.3649,-.0998 1536.3649,-.0998 1542.3649,-.0998 1548.3649,-6.0998 1548.3649,-12.0998 1548.3649,-12.0998 1548.3649,-74.7002 1548.3649,-74.7002 1548.3649,-80.7002 1542.3649,-86.7002 1536.3649,-86.7002"/>
<text text-anchor="middle" x="1346.0696" y="-36.2" font-family="Monaco" font-size="24.00" fill="#000000">pkgconf@1.8.0%gcc@9.4.0/i4avrin</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="node3" class="node">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M849.3673,-721.9002C849.3673,-721.9002 480.7719,-721.9002 480.7719,-721.9002 474.7719,-721.9002 468.7719,-715.9002 468.7719,-709.9002 468.7719,-709.9002 468.7719,-647.2998 468.7719,-647.2998 468.7719,-641.2998 474.7719,-635.2998 480.7719,-635.2998 480.7719,-635.2998 849.3673,-635.2998 849.3673,-635.2998 855.3673,-635.2998 861.3673,-641.2998 861.3673,-647.2998 861.3673,-647.2998 861.3673,-709.9002 861.3673,-709.9002 861.3673,-715.9002 855.3673,-721.9002 849.3673,-721.9002"/>
<text text-anchor="middle" x="665.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">perl@5.36.0%gcc@9.4.0/ywrpvv2</text>
</g>
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx -->
<g id="node7" class="node">
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M392.4016,-563.1002C392.4016,-563.1002 19.7376,-563.1002 19.7376,-563.1002 13.7376,-563.1002 7.7376,-557.1002 7.7376,-551.1002 7.7376,-551.1002 7.7376,-488.4998 7.7376,-488.4998 7.7376,-482.4998 13.7376,-476.4998 19.7376,-476.4998 19.7376,-476.4998 392.4016,-476.4998 392.4016,-476.4998 398.4016,-476.4998 404.4016,-482.4998 404.4016,-488.4998 404.4016,-488.4998 404.4016,-551.1002 404.4016,-551.1002 404.4016,-557.1002 398.4016,-563.1002 392.4016,-563.1002"/>
<text text-anchor="middle" x="206.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">bzip2@1.0.8%gcc@9.4.0/h3ujmb3</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;h3ujmb3ts4kxxxv77knh2knuystuerbx -->
<g id="edge9" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M539.3189,-636.1522C477.7157,-614.8394 403.4197,-589.1353 340.5959,-567.4002"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M539.9728,-634.2622C478.3696,-612.9494 404.0736,-587.2452 341.2498,-565.5101"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="341.9365,-563.1023 331.3417,-563.1403 339.6478,-569.7176 341.9365,-563.1023"/>
</g>
<!-- uabgssx6lsgrevwbttslldnr5nzguprj -->
<g id="node19" class="node">
<title>uabgssx6lsgrevwbttslldnr5nzguprj</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1298.2296,-563.1002C1298.2296,-563.1002 937.9096,-563.1002 937.9096,-563.1002 931.9096,-563.1002 925.9096,-557.1002 925.9096,-551.1002 925.9096,-551.1002 925.9096,-488.4998 925.9096,-488.4998 925.9096,-482.4998 931.9096,-476.4998 937.9096,-476.4998 937.9096,-476.4998 1298.2296,-476.4998 1298.2296,-476.4998 1304.2296,-476.4998 1310.2296,-482.4998 1310.2296,-488.4998 1310.2296,-488.4998 1310.2296,-551.1002 1310.2296,-551.1002 1310.2296,-557.1002 1304.2296,-563.1002 1298.2296,-563.1002"/>
<text text-anchor="middle" x="1118.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">gdbm@1.23%gcc@9.4.0/uabgssx</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;uabgssx6lsgrevwbttslldnr5nzguprj -->
<g id="edge44" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;uabgssx6lsgrevwbttslldnr5nzguprj</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M788.523,-634.2635C849.3209,-612.9507 922.6457,-587.2465 984.6483,-565.5114"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M789.1847,-636.1509C849.9825,-614.8381 923.3073,-589.1339 985.3099,-567.3988"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="986.1559,-569.7515 994.435,-563.1403 983.8402,-563.1456 986.1559,-569.7515"/>
</g>
<!-- gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
<g id="node20" class="node">
<title>gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M896.1744,-563.1002C896.1744,-563.1002 433.9648,-563.1002 433.9648,-563.1002 427.9648,-563.1002 421.9648,-557.1002 421.9648,-551.1002 421.9648,-551.1002 421.9648,-488.4998 421.9648,-488.4998 421.9648,-482.4998 427.9648,-476.4998 433.9648,-476.4998 433.9648,-476.4998 896.1744,-476.4998 896.1744,-476.4998 902.1744,-476.4998 908.1744,-482.4998 908.1744,-488.4998 908.1744,-488.4998 908.1744,-551.1002 908.1744,-551.1002 908.1744,-557.1002 902.1744,-563.1002 896.1744,-563.1002"/>
<text text-anchor="middle" x="665.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
<g id="edge23" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M664.0696,-635.2072C664.0696,-616.1263 664.0696,-593.5257 664.0696,-573.4046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M666.0696,-635.2072C666.0696,-616.1263 666.0696,-593.5257 666.0696,-573.4046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="668.5697,-573.1403 665.0696,-563.1403 661.5697,-573.1404 668.5697,-573.1403"/>
</g>
<!-- nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="node24" class="node">
<title>nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2195.2248,-563.1002C2195.2248,-563.1002 1840.9144,-563.1002 1840.9144,-563.1002 1834.9144,-563.1002 1828.9144,-557.1002 1828.9144,-551.1002 1828.9144,-551.1002 1828.9144,-488.4998 1828.9144,-488.4998 1828.9144,-482.4998 1834.9144,-476.4998 1840.9144,-476.4998 1840.9144,-476.4998 2195.2248,-476.4998 2195.2248,-476.4998 2201.2248,-476.4998 2207.2248,-482.4998 2207.2248,-488.4998 2207.2248,-488.4998 2207.2248,-551.1002 2207.2248,-551.1002 2207.2248,-557.1002 2201.2248,-563.1002 2195.2248,-563.1002"/>
<text text-anchor="middle" x="2018.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">zlib@1.2.13%gcc@9.4.0/nizxi5u</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="edge4" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M861.3292,-654.5584C1116.9929,-624.5514 1561.4447,-572.3867 1818.5758,-542.2075"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M861.5624,-656.5447C1117.2261,-626.5378 1561.6778,-574.373 1818.8089,-544.1939"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1819.373,-546.6449 1828.8968,-542.003 1818.5569,-539.6926 1819.373,-546.6449"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id -->
<g id="node4" class="node">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2383.212,-1674.7002C2383.212,-1674.7002 1972.9272,-1674.7002 1972.9272,-1674.7002 1966.9272,-1674.7002 1960.9272,-1668.7002 1960.9272,-1662.7002 1960.9272,-1662.7002 1960.9272,-1600.0998 1960.9272,-1600.0998 1960.9272,-1594.0998 1966.9272,-1588.0998 1972.9272,-1588.0998 1972.9272,-1588.0998 2383.212,-1588.0998 2383.212,-1588.0998 2389.212,-1588.0998 2395.212,-1594.0998 2395.212,-1600.0998 2395.212,-1600.0998 2395.212,-1662.7002 2395.212,-1662.7002 2395.212,-1668.7002 2389.212,-1674.7002 2383.212,-1674.7002"/>
<text text-anchor="middle" x="2178.0696" y="-1624.2" font-family="Monaco" font-size="24.00" fill="#000000">strumpack@7.0.1%gcc@9.4.0/idvshq5</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="edge33" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2177.0696,-1587.8598C2177.0696,-1500.5185 2177.0696,-1304.1624 2177.0696,-1208.8885"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2179.0696,-1587.8598C2179.0696,-1500.5185 2179.0696,-1304.1624 2179.0696,-1208.8885"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2181.5697,-1208.611 2178.0696,-1198.611 2174.5697,-1208.611 2181.5697,-1208.611"/>
<text text-anchor="middle" x="2125.9224" y="-1397.5399" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge8" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6199,-1629.1097C1600.5855,-1621.4505 897.1143,-1596.5054 662.748,-1516.9469 459.8544,-1447.9506 281.1117,-1289.236 401.2427,-1111.0377 418.213,-1086.3492 472.759,-1062.01 530.3793,-1041.9698"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.6625,-1627.1101C1600.6564,-1619.4517 897.1852,-1594.5067 663.3912,-1515.0531 461.1823,-1446.4551 282.4397,-1287.7405 402.8965,-1112.1623 419.028,-1088.1757 473.574,-1063.8364 531.0362,-1043.8589"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.0142,-1046.1665 540.3395,-1039.6137 529.7449,-1039.5445 532.0142,-1046.1665"/>
<text text-anchor="middle" x="1175.5163" y="-1600.8866" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh -->
<g id="node12" class="node">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M3003.3872,-1357.1002C3003.3872,-1357.1002 2606.752,-1357.1002 2606.752,-1357.1002 2600.752,-1357.1002 2594.752,-1351.1002 2594.752,-1345.1002 2594.752,-1345.1002 2594.752,-1282.4998 2594.752,-1282.4998 2594.752,-1276.4998 2600.752,-1270.4998 2606.752,-1270.4998 2606.752,-1270.4998 3003.3872,-1270.4998 3003.3872,-1270.4998 3009.3872,-1270.4998 3015.3872,-1276.4998 3015.3872,-1282.4998 3015.3872,-1282.4998 3015.3872,-1345.1002 3015.3872,-1345.1002 3015.3872,-1351.1002 3009.3872,-1357.1002 3003.3872,-1357.1002"/>
<text text-anchor="middle" x="2805.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">parmetis@4.0.3%gcc@9.4.0/imopnxj</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;imopnxjmv7cwzyiecdw2saq42qvpnauh -->
<g id="edge51" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2393.6993,-1587.0809C2455.3565,-1569.7539 2521.1771,-1546.2699 2577.5864,-1515.1245 2649.1588,-1475.6656 2717.4141,-1409.6691 2759.9512,-1363.9364"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2394.2404,-1589.0062C2456.0286,-1571.6376 2521.8491,-1548.1536 2578.5528,-1516.8755 2650.5491,-1477.1034 2718.8043,-1411.107 2761.4156,-1365.2986"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2763.3454,-1366.8938 2767.5512,-1357.1695 2758.1992,-1362.1485 2763.3454,-1366.8938"/>
</g>
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="node13" class="node">
<title>ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2928.3784,-1198.3002C2928.3784,-1198.3002 2563.7608,-1198.3002 2563.7608,-1198.3002 2557.7608,-1198.3002 2551.7608,-1192.3002 2551.7608,-1186.3002 2551.7608,-1186.3002 2551.7608,-1123.6998 2551.7608,-1123.6998 2551.7608,-1117.6998 2557.7608,-1111.6998 2563.7608,-1111.6998 2563.7608,-1111.6998 2928.3784,-1111.6998 2928.3784,-1111.6998 2934.3784,-1111.6998 2940.3784,-1117.6998 2940.3784,-1123.6998 2940.3784,-1123.6998 2940.3784,-1186.3002 2940.3784,-1186.3002 2940.3784,-1192.3002 2934.3784,-1198.3002 2928.3784,-1198.3002"/>
<text text-anchor="middle" x="2746.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">metis@5.1.0%gcc@9.4.0/ern66gy</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="edge25" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2371.6269,-1587.103C2443.5875,-1567.249 2513.691,-1542.0963 2537.3223,-1515.3355 2611.3482,-1433.6645 2525.4748,-1364.8484 2585.2274,-1269.8608 2602.2478,-1243.3473 2627.3929,-1221.1402 2652.8797,-1203.3777"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2372.1589,-1589.0309C2444.2629,-1569.1315 2514.3664,-1543.9788 2538.8169,-1516.6645 2612.5989,-1432.1038 2526.7255,-1363.2878 2586.9118,-1270.9392 2603.5717,-1244.8464 2628.7168,-1222.6393 2654.0229,-1205.0188"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2655.7411,-1206.8749 2662.0621,-1198.3722 2651.8184,-1201.0773 2655.7411,-1206.8749"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
<g id="node14" class="node">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1964.017,-1357.1002C1964.017,-1357.1002 1532.1222,-1357.1002 1532.1222,-1357.1002 1526.1222,-1357.1002 1520.1222,-1351.1002 1520.1222,-1345.1002 1520.1222,-1345.1002 1520.1222,-1282.4998 1520.1222,-1282.4998 1520.1222,-1276.4998 1526.1222,-1270.4998 1532.1222,-1270.4998 1532.1222,-1270.4998 1964.017,-1270.4998 1964.017,-1270.4998 1970.017,-1270.4998 1976.017,-1276.4998 1976.017,-1282.4998 1976.017,-1282.4998 1976.017,-1345.1002 1976.017,-1345.1002 1976.017,-1351.1002 1970.017,-1357.1002 1964.017,-1357.1002"/>
<text text-anchor="middle" x="1748.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">butterflypack@2.2.2%gcc@9.4.0/nqiyrxl</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
<g id="edge26" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2118.5874,-1588.7094C2039.1194,-1530.0139 1897.9154,-1425.72 1814.4793,-1364.0937"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2119.7757,-1587.1006C2040.3076,-1528.4052 1899.1036,-1424.1112 1815.6675,-1362.485"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1817.0581,-1360.404 1806.9348,-1357.2781 1812.8992,-1366.0347 1817.0581,-1360.404"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
<g id="node16" class="node">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1106.2192,-1515.9002C1106.2192,-1515.9002 683.92,-1515.9002 683.92,-1515.9002 677.92,-1515.9002 671.92,-1509.9002 671.92,-1503.9002 671.92,-1503.9002 671.92,-1441.2998 671.92,-1441.2998 671.92,-1435.2998 677.92,-1429.2998 683.92,-1429.2998 683.92,-1429.2998 1106.2192,-1429.2998 1106.2192,-1429.2998 1112.2192,-1429.2998 1118.2192,-1435.2998 1118.2192,-1441.2998 1118.2192,-1441.2998 1118.2192,-1503.9002 1118.2192,-1503.9002 1118.2192,-1509.9002 1112.2192,-1515.9002 1106.2192,-1515.9002"/>
<text text-anchor="middle" x="895.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">slate@2022.07.00%gcc@9.4.0/4bu62ky</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
<g id="edge5" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6663,-1605.4991C1729.5518,-1576.8935 1365.2868,-1531.8075 1128.237,-1502.4673"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.912,-1603.5143C1729.7975,-1574.9086 1365.5325,-1529.8227 1128.4827,-1500.4825"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1128.5789,-1497.9754 1118.2247,-1500.2204 1127.719,-1504.9224 1128.5789,-1497.9754"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge20" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2395.1113,-1591.5061C2621.5772,-1545.7968 2953.3457,-1462.5053 3023.2362,-1356.6473 3049.986,-1316.785 3021.2047,-1131.5143 3003.3326,-1112.2759 2971.8969,-1077.7826 2884.3944,-1052.6467 2789.1441,-1034.9179"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2395.507,-1593.4665C2622.0642,-1547.7366 2953.8327,-1464.4452 3024.903,-1357.7527 3051.9623,-1316.478 3023.181,-1131.2073 3004.8066,-1110.9241 2972.4491,-1075.8603 2884.9466,-1050.7244 2789.5102,-1032.9517"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2789.9449,-1030.4898 2779.4781,-1032.132 2788.6845,-1037.3754 2789.9449,-1030.4898"/>
<text text-anchor="middle" x="2611.7445" y="-1537.8321" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
<g id="node25" class="node">
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1749.1952,-1515.9002C1749.1952,-1515.9002 1398.944,-1515.9002 1398.944,-1515.9002 1392.944,-1515.9002 1386.944,-1509.9002 1386.944,-1503.9002 1386.944,-1503.9002 1386.944,-1441.2998 1386.944,-1441.2998 1386.944,-1435.2998 1392.944,-1429.2998 1398.944,-1429.2998 1398.944,-1429.2998 1749.1952,-1429.2998 1749.1952,-1429.2998 1755.1952,-1429.2998 1761.1952,-1435.2998 1761.1952,-1441.2998 1761.1952,-1441.2998 1761.1952,-1503.9002 1761.1952,-1503.9002 1761.1952,-1509.9002 1755.1952,-1515.9002 1749.1952,-1515.9002"/>
<text text-anchor="middle" x="1574.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">zfp@0.5.5%gcc@9.4.0/7rzbmgo</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
<g id="edge36" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2012.7697,-1588.9743C1930.7903,-1567.4208 1831.729,-1541.3762 1748.4742,-1519.4874"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2013.2782,-1587.0401C1931.2989,-1565.4866 1832.2376,-1539.442 1748.9827,-1517.5531"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1749.477,-1515.0982 1738.9157,-1515.9403 1747.697,-1521.8681 1749.477,-1515.0982"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge3" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2229.2864,-1587.9836C2336.2076,-1492.3172 2562.5717,-1260.0833 2429.0696,-1111.6 2372.2327,-1048.3851 1860.8259,-1017.0375 1561.5401,-1003.9799"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.5673,-1000.4779 1551.4253,-1003.5421 1561.2645,-1007.4714 1561.5673,-1000.4779"/>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="node5" class="node">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M912.4048,-1198.3002C912.4048,-1198.3002 475.7344,-1198.3002 475.7344,-1198.3002 469.7344,-1198.3002 463.7344,-1192.3002 463.7344,-1186.3002 463.7344,-1186.3002 463.7344,-1123.6998 463.7344,-1123.6998 463.7344,-1117.6998 469.7344,-1111.6998 475.7344,-1111.6998 475.7344,-1111.6998 912.4048,-1111.6998 912.4048,-1111.6998 918.4048,-1111.6998 924.4048,-1117.6998 924.4048,-1123.6998 924.4048,-1123.6998 924.4048,-1186.3002 924.4048,-1186.3002 924.4048,-1192.3002 918.4048,-1198.3002 912.4048,-1198.3002"/>
<text text-anchor="middle" x="694.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">blaspp@2022.07.00%gcc@9.4.0/mujlx42</text>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge16" class="edge">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M693.0696,-1111.6072C693.0696,-1092.5263 693.0696,-1069.9257 693.0696,-1049.8046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M695.0696,-1111.6072C695.0696,-1092.5263 695.0696,-1069.9257 695.0696,-1049.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="697.5697,-1049.5403 694.0696,-1039.5403 690.5697,-1049.5404 697.5697,-1049.5403"/>
<text text-anchor="middle" x="657.8516" y="-1079.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge28" class="edge">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M872.2315,-1111.6072C960.9952,-1089.988 1068.311,-1063.8504 1158.3512,-1041.9204"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1159.2354,-1045.3074 1168.1232,-1039.5403 1157.5789,-1038.5062 1159.2354,-1045.3074"/>
</g>
<!-- htzjns66gmq6pjofohp26djmjnpbegho -->
<g id="node6" class="node">
<title>htzjns66gmq6pjofohp26djmjnpbegho</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M2663.3553,-880.7002C2663.3553,-880.7002 2270.7839,-880.7002 2270.7839,-880.7002 2264.7839,-880.7002 2258.7839,-874.7002 2258.7839,-868.7002 2258.7839,-868.7002 2258.7839,-806.0998 2258.7839,-806.0998 2258.7839,-800.0998 2264.7839,-794.0998 2270.7839,-794.0998 2270.7839,-794.0998 2663.3553,-794.0998 2663.3553,-794.0998 2669.3553,-794.0998 2675.3553,-800.0998 2675.3553,-806.0998 2675.3553,-806.0998 2675.3553,-868.7002 2675.3553,-868.7002 2675.3553,-874.7002 2669.3553,-880.7002 2663.3553,-880.7002"/>
<text text-anchor="middle" x="2467.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">patchelf@0.16.1%gcc@9.4.0/htzjns6</text>
</g>
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
<g id="node15" class="node">
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M394.2232,-404.3002C394.2232,-404.3002 17.916,-404.3002 17.916,-404.3002 11.916,-404.3002 5.916,-398.3002 5.916,-392.3002 5.916,-392.3002 5.916,-329.6998 5.916,-329.6998 5.916,-323.6998 11.916,-317.6998 17.916,-317.6998 17.916,-317.6998 394.2232,-317.6998 394.2232,-317.6998 400.2232,-317.6998 406.2232,-323.6998 406.2232,-329.6998 406.2232,-329.6998 406.2232,-392.3002 406.2232,-392.3002 406.2232,-398.3002 400.2232,-404.3002 394.2232,-404.3002"/>
<text text-anchor="middle" x="206.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">diffutils@3.8%gcc@9.4.0/xm3ldz3</text>
</g>
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx&#45;&gt;xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
<g id="edge1" class="edge">
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx-&gt;xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M206.0696,-476.4072C206.0696,-457.3263 206.0696,-434.7257 206.0696,-414.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-414.3403 206.0696,-404.3403 202.5697,-414.3404 209.5697,-414.3403"/>
</g>
<!-- o524gebsxavobkte3k5fglgwnedfkadf&#45;&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="edge11" class="edge">
<title>o524gebsxavobkte3k5fglgwnedfkadf-&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M690.0981,-952.705C684.8522,-895.2533 675.6173,-794.1153 669.9514,-732.0637"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="673.4345,-731.7184 669.0396,-722.0781 666.4635,-732.355 673.4345,-731.7184"/>
</g>
<!-- 4vsmjofkhntilgzh4zebluqak5mdsu3x -->
<g id="node9" class="node">
<title>4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1977.9121,-721.9002C1977.9121,-721.9002 1386.2271,-721.9002 1386.2271,-721.9002 1380.2271,-721.9002 1374.2271,-715.9002 1374.2271,-709.9002 1374.2271,-709.9002 1374.2271,-647.2998 1374.2271,-647.2998 1374.2271,-641.2998 1380.2271,-635.2998 1386.2271,-635.2998 1386.2271,-635.2998 1977.9121,-635.2998 1977.9121,-635.2998 1983.9121,-635.2998 1989.9121,-641.2998 1989.9121,-647.2998 1989.9121,-647.2998 1989.9121,-709.9002 1989.9121,-709.9002 1989.9121,-715.9002 1983.9121,-721.9002 1977.9121,-721.9002"/>
<text text-anchor="middle" x="1682.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow -->
<g id="node10" class="node">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M988.1824,-1357.1002C988.1824,-1357.1002 533.9568,-1357.1002 533.9568,-1357.1002 527.9568,-1357.1002 521.9568,-1351.1002 521.9568,-1345.1002 521.9568,-1345.1002 521.9568,-1282.4998 521.9568,-1282.4998 521.9568,-1276.4998 527.9568,-1270.4998 533.9568,-1270.4998 533.9568,-1270.4998 988.1824,-1270.4998 988.1824,-1270.4998 994.1824,-1270.4998 1000.1824,-1276.4998 1000.1824,-1282.4998 1000.1824,-1282.4998 1000.1824,-1345.1002 1000.1824,-1345.1002 1000.1824,-1351.1002 994.1824,-1357.1002 988.1824,-1357.1002"/>
<text text-anchor="middle" x="761.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">lapackpp@2022.07.00%gcc@9.4.0/xiro2z6</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="edge37" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M741.8402,-1270.7959C733.6789,-1251.4525 723.9915,-1228.4917 715.4149,-1208.1641"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M743.6829,-1270.0185C735.5216,-1250.675 725.8342,-1227.7143 717.2576,-1207.3866"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="719.4676,-1206.1933 712.3555,-1198.3403 713.0181,-1208.9144 719.4676,-1206.1933"/>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge35" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M597.2326,-1271.3826C534.1471,-1251.0571 472.8527,-1225.5904 454.2471,-1198.9688 432.1275,-1166.6075 433.5639,-1144.2113 454.2226,-1111.0684 472.6194,-1081.8657 500.3255,-1060.004 530.6572,-1043.4601"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M597.8458,-1269.4789C534.9144,-1249.2102 473.6201,-1223.7435 455.8921,-1197.8312 434.1234,-1166.7355 435.5598,-1144.3393 455.9166,-1112.1316 473.8583,-1083.4358 501.5644,-1061.5741 531.6142,-1045.2163"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.9062,-1047.362 540.1422,-1039.6231 529.6595,-1041.1605 532.9062,-1047.362"/>
<text text-anchor="middle" x="474.3109" y="-1250.2598" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge45" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M833.5823,-1270.3956C865.3249,-1250.0918 902.2709,-1224.6296 933.0696,-1198.4 973.2414,-1164.1878 969.8532,-1140.395 1014.0696,-1111.6 1058.5051,-1082.6623 1111.0286,-1060.0733 1161.029,-1042.8573"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1162.313,-1046.1177 1170.6621,-1039.5953 1160.0678,-1039.4876 1162.313,-1046.1177"/>
</g>
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="node11" class="node">
<title>j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1527.3625,-245.5002C1527.3625,-245.5002 1164.7767,-245.5002 1164.7767,-245.5002 1158.7767,-245.5002 1152.7767,-239.5002 1152.7767,-233.5002 1152.7767,-233.5002 1152.7767,-170.8998 1152.7767,-170.8998 1152.7767,-164.8998 1158.7767,-158.8998 1164.7767,-158.8998 1164.7767,-158.8998 1527.3625,-158.8998 1527.3625,-158.8998 1533.3625,-158.8998 1539.3625,-164.8998 1539.3625,-170.8998 1539.3625,-170.8998 1539.3625,-233.5002 1539.3625,-233.5002 1539.3625,-239.5002 1533.3625,-245.5002 1527.3625,-245.5002"/>
<text text-anchor="middle" x="1346.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">ncurses@6.4%gcc@9.4.0/j5rupoq</text>
</g>
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru&#45;&gt;i4avrindvhcamhurzbfdaggbj2zgsrrh -->
<g id="edge15" class="edge">
<title>j5rupoqliu7kasm6xndl7ui32wgawkru-&gt;i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-158.8072C1346.0696,-139.7263 1346.0696,-117.1257 1346.0696,-97.0046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-96.7403 1346.0696,-86.7403 1342.5697,-96.7404 1349.5697,-96.7403"/>
<text text-anchor="middle" x="1292.7436" y="-127.0482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=pkgconfig</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="edge19" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2788.0102,-1270.7555C2780.8234,-1251.412 2772.2926,-1228.4513 2764.7402,-1208.1236"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2789.885,-1270.0589C2782.6982,-1250.7155 2774.1674,-1227.7547 2766.615,-1207.4271"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2768.9358,-1206.4953 2762.1721,-1198.3403 2762.3741,-1208.9332 2768.9358,-1206.4953"/>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge12" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2907.2846,-1269.5018C2936.475,-1251.8137 2964.9158,-1228.1116 2981.1904,-1197.9236 2999.477,-1164.2363 3005.2125,-1141.4693 2981.289,-1112.225 2954.5472,-1078.5579 2876.5297,-1053.8974 2789.2983,-1036.3535"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2908.3216,-1271.2119C2937.7554,-1253.3501 2966.1962,-1229.648 2982.9488,-1198.8764 3001.4164,-1164.7249 3007.1519,-1141.9579 2982.8502,-1110.975 2955.15,-1076.6509 2877.1325,-1051.9904 2789.6927,-1034.3928"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2790.125,-1031.93 2779.6364,-1033.4269 2788.7692,-1038.7974 2790.125,-1031.93"/>
<text text-anchor="middle" x="2836.0561" y="-1059.5023" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge49" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2883.731,-1270.4691C2909.4451,-1251.9243 2934.9956,-1227.7144 2949.0696,-1198.4 2965.7663,-1163.6227 2975.3506,-1139.841 2949.0696,-1111.6 2925.7161,-1086.5049 1993.0368,-1031.9055 1561.3071,-1007.9103"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.3813,-1004.4092 1551.2026,-1007.3492 1560.9931,-1011.3984 1561.3813,-1004.4092"/>
</g>
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge50" class="edge">
<title>ern66gyp6qmhmpod4jaynxx4weoberfm-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2551.6031,-1113.7387C2547.0531,-1112.9948 2542.537,-1112.2802 2538.0696,-1111.6 2198.5338,-1059.8997 1800.8632,-1026.8711 1561.4583,-1009.9443"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.4619,-1006.436 1551.2407,-1009.2249 1560.9702,-1013.4187 1561.4619,-1006.436"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="edge34" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1865.2226,-1269.4691C1922.6966,-1248.2438 1991.964,-1222.6632 2050.6644,-1200.985"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1865.9154,-1271.3453C1923.3894,-1250.12 1992.6569,-1224.5394 2051.3572,-1202.8612"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2052.5441,-1205.088 2060.7123,-1198.3403 2050.119,-1198.5215 2052.5441,-1205.088"/>
<text text-anchor="middle" x="1910.9073" y="-1238.6056" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge52" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1519.9696,-1290.6844C1394.6018,-1273.3057 1237.6631,-1244.7294 1102.7507,-1199.3478 1021.8138,-1171.8729 1008.1992,-1149.8608 932.6248,-1112.4956 887.1715,-1089.9216 836.578,-1065.4054 793.6914,-1044.8018"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1520.2442,-1288.7034C1394.9601,-1271.3381 1238.0214,-1242.7618 1103.3885,-1197.4522 1023.5148,-1170.8208 1009.9002,-1148.8087 933.5144,-1110.7044 888.0436,-1088.1218 837.4502,-1063.6056 794.5574,-1042.999"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="795.6235,-1040.7377 785.0938,-1039.565 792.5939,-1047.0482 795.6235,-1040.7377"/>
<text text-anchor="middle" x="1046.8307" y="-1202.5988" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef -->
<g id="node21" class="node">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1547.9922,-1198.3002C1547.9922,-1198.3002 1144.147,-1198.3002 1144.147,-1198.3002 1138.147,-1198.3002 1132.147,-1192.3002 1132.147,-1186.3002 1132.147,-1186.3002 1132.147,-1123.6998 1132.147,-1123.6998 1132.147,-1117.6998 1138.147,-1111.6998 1144.147,-1111.6998 1144.147,-1111.6998 1547.9922,-1111.6998 1547.9922,-1111.6998 1553.9922,-1111.6998 1559.9922,-1117.6998 1559.9922,-1123.6998 1559.9922,-1123.6998 1559.9922,-1186.3002 1559.9922,-1186.3002 1559.9922,-1192.3002 1553.9922,-1198.3002 1547.9922,-1198.3002"/>
<text text-anchor="middle" x="1346.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">arpack-ng@3.8.0%gcc@9.4.0/lfh3aov</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;lfh3aovn65e66cs24qiehq3nd2ddojef -->
<g id="edge46" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;lfh3aovn65e66cs24qiehq3nd2ddojef</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1637.8539,-1271.3373C1584.2332,-1250.1557 1519.6324,-1224.6368 1464.827,-1202.9873"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1638.5887,-1269.4771C1584.968,-1248.2956 1520.3672,-1222.7767 1465.5618,-1201.1272"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1466.3716,-1198.7592 1455.785,-1198.3403 1463.7998,-1205.2696 1466.3716,-1198.7592"/>
</g>
<!-- 57joith2sqq6sehge54vlloyolm36mdu -->
<g id="node22" class="node">
<title>57joith2sqq6sehge54vlloyolm36mdu</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1906.2352,-1198.3002C1906.2352,-1198.3002 1589.904,-1198.3002 1589.904,-1198.3002 1583.904,-1198.3002 1577.904,-1192.3002 1577.904,-1186.3002 1577.904,-1186.3002 1577.904,-1123.6998 1577.904,-1123.6998 1577.904,-1117.6998 1583.904,-1111.6998 1589.904,-1111.6998 1589.904,-1111.6998 1906.2352,-1111.6998 1906.2352,-1111.6998 1912.2352,-1111.6998 1918.2352,-1117.6998 1918.2352,-1123.6998 1918.2352,-1123.6998 1918.2352,-1186.3002 1918.2352,-1186.3002 1918.2352,-1192.3002 1912.2352,-1198.3002 1906.2352,-1198.3002"/>
<text text-anchor="middle" x="1748.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">sed@4.8%gcc@9.4.0/57joith</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;57joith2sqq6sehge54vlloyolm36mdu -->
<g id="edge27" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;57joith2sqq6sehge54vlloyolm36mdu</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1748.0696,-1270.4072C1748.0696,-1251.3263 1748.0696,-1228.7257 1748.0696,-1208.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1751.5697,-1208.3403 1748.0696,-1198.3403 1744.5697,-1208.3404 1751.5697,-1208.3403"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge24" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1975.9734,-1301.684C2148.2819,-1288.3961 2365.6859,-1259.5384 2428.3689,-1197.6866 2466.9261,-1160.1438 2472.9783,-1095.7153 2471.5152,-1049.9701"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1976.1272,-1303.678C2148.5451,-1290.3788 2365.949,-1261.521 2429.7703,-1199.1134 2468.9173,-1160.3309 2474.9695,-1095.9024 2473.5142,-1049.9065"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2476.0078,-1049.7027 2472.0657,-1039.8686 2469.0147,-1050.0146 2476.0078,-1049.7027"/>
<text text-anchor="middle" x="2207.8884" y="-1273.0053" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge6" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1520.1614,-1301.6771C1362.9712,-1287.992 1173.582,-1259.0928 1123.0696,-1198.4 1098.3914,-1168.7481 1103.0165,-1144.5563 1123.0696,-1111.6 1140.5998,-1082.79 1167.9002,-1060.8539 1197.4647,-1044.2681"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1199.1408,-1047.3408 1206.2789,-1039.5114 1195.8163,-1041.1806 1199.1408,-1047.3408"/>
</g>
<!-- ogcucq2eod3xusvvied5ol2iobui4nsb -->
<g id="node18" class="node">
<title>ogcucq2eod3xusvvied5ol2iobui4nsb</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M400.2088,-245.5002C400.2088,-245.5002 11.9304,-245.5002 11.9304,-245.5002 5.9304,-245.5002 -.0696,-239.5002 -.0696,-233.5002 -.0696,-233.5002 -.0696,-170.8998 -.0696,-170.8998 -.0696,-164.8998 5.9304,-158.8998 11.9304,-158.8998 11.9304,-158.8998 400.2088,-158.8998 400.2088,-158.8998 406.2088,-158.8998 412.2088,-164.8998 412.2088,-170.8998 412.2088,-170.8998 412.2088,-233.5002 412.2088,-233.5002 412.2088,-239.5002 406.2088,-245.5002 400.2088,-245.5002"/>
<text text-anchor="middle" x="206.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">libiconv@1.17%gcc@9.4.0/ogcucq2</text>
</g>
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6&#45;&gt;ogcucq2eod3xusvvied5ol2iobui4nsb -->
<g id="edge47" class="edge">
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6-&gt;ogcucq2eod3xusvvied5ol2iobui4nsb</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M205.0696,-317.6072C205.0696,-298.5263 205.0696,-275.9257 205.0696,-255.8046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M207.0696,-317.6072C207.0696,-298.5263 207.0696,-275.9257 207.0696,-255.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-255.5403 206.0696,-245.5403 202.5697,-255.5404 209.5697,-255.5403"/>
<text text-anchor="middle" x="165.5739" y="-285.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=iconv</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="edge42" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M672.6614,-1430.2151C600.7916,-1411.3548 534.1254,-1386.9583 512.2667,-1357.7962 489.0909,-1326.029 493.54,-1304.0273 512.1928,-1269.9192 527.5256,-1242.0821 552.3382,-1220.1508 578.9347,-1203.0434"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M673.169,-1428.2806C601.4789,-1409.4766 534.8127,-1385.0802 513.8725,-1356.6038 491.0512,-1326.4254 495.5003,-1304.4237 513.9464,-1270.8808 528.8502,-1243.5806 553.6627,-1221.6493 580.016,-1204.7259"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="581.46,-1206.7724 588.1193,-1198.532 577.7747,-1200.8211 581.46,-1206.7724"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge43" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M680.4783,-1430.2246C600.8632,-1410.3933 522.8724,-1385.2921 493.3877,-1357.9314 411.1392,-1281.1573 374.1678,-1206.1582 435.2305,-1111.0561 454.3431,-1081.6726 482.5021,-1059.8261 513.5088,-1043.3725"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M680.9617,-1428.2839C601.476,-1408.4895 523.4851,-1383.3883 494.7515,-1356.4686 412.9331,-1280.273 375.9616,-1205.2739 436.9087,-1112.1439 455.569,-1083.2528 483.728,-1061.4063 514.4455,-1045.1396"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="515.8631,-1047.2236 523.1893,-1039.5699 512.6893,-1040.9844 515.8631,-1047.2236"/>
<text text-anchor="middle" x="453.0969" y="-1356.92" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;xiro2z6na56qdd4czjhj54eag3ekbiow -->
<g id="edge38" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;xiro2z6na56qdd4czjhj54eag3ekbiow</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M857.6892,-1429.8521C840.9235,-1409.9835 820.9375,-1386.2985 803.4466,-1365.5705"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M859.2178,-1428.5623C842.4521,-1408.6937 822.466,-1385.0087 804.9751,-1364.2807"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="806.7654,-1362.5258 797.6414,-1357.1403 801.4156,-1367.0402 806.7654,-1362.5258"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge13" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1118.1783,-1450.5735C1412.4221,-1422.447 1902.6188,-1374.0528 1984.8578,-1356.2227 2203.916,-1308.9943 2329.6342,-1377.1305 2461.2658,-1197.8052 2492.3675,-1156.1664 2488.743,-1094.1171 2480.3694,-1050.0521"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1118.3686,-1452.5644C1412.6186,-1424.4374 1902.8153,-1376.0432 1985.2814,-1358.1773 2202.963,-1310.7526 2328.6812,-1378.8889 2462.8734,-1198.9948 2494.3641,-1156.0498 2490.7395,-1094.0005 2482.3343,-1049.6791"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2484.7438,-1048.9818 2479.3189,-1039.8812 2477.8845,-1050.3784 2484.7438,-1048.9818"/>
<text text-anchor="middle" x="1820.4407" y="-1379.7188" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge32" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M947.2173,-1428.5496C968.7089,-1408.5917 992.2747,-1383.3345 1008.2117,-1356.6861 1067.0588,-1259.8646 1008.3745,-1197.6371 1084.3226,-1110.9351 1110.3076,-1081.7965 1144.7149,-1059.7578 1180.1804,-1043.0531"/>
<path fill="none" stroke="#daa520" stroke-width="2" d="M948.5783,-1430.0151C970.1712,-1409.9561 993.737,-1384.6989 1009.9275,-1357.7139 1068.5139,-1258.4924 1009.8295,-1196.2649 1085.8166,-1112.2649 1111.3864,-1083.4807 1145.7936,-1061.442 1181.0322,-1044.8626"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1182.4567,-1046.9607 1190.1008,-1039.6246 1179.5503,-1040.5926 1182.4567,-1046.9607"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo -->
<g id="node17" class="node">
<title>5xerf6imlgo4xlubacr4mljacc3edexo</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1822.3657,-880.7002C1822.3657,-880.7002 1437.7735,-880.7002 1437.7735,-880.7002 1431.7735,-880.7002 1425.7735,-874.7002 1425.7735,-868.7002 1425.7735,-868.7002 1425.7735,-806.0998 1425.7735,-806.0998 1425.7735,-800.0998 1431.7735,-794.0998 1437.7735,-794.0998 1437.7735,-794.0998 1822.3657,-794.0998 1822.3657,-794.0998 1828.3657,-794.0998 1834.3657,-800.0998 1834.3657,-806.0998 1834.3657,-806.0998 1834.3657,-868.7002 1834.3657,-868.7002 1834.3657,-874.7002 1828.3657,-880.7002 1822.3657,-880.7002"/>
<text text-anchor="middle" x="1630.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">openssl@1.1.1s%gcc@9.4.0/5xerf6i</text>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="edge22" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1425.7129,-803.7711C1262.7545,-776.9548 1035.5151,-739.5603 871.9084,-712.6373"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="872.1525,-709.1305 861.7169,-710.9602 871.0158,-716.0376 872.1525,-709.1305"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;4vsmjofkhntilgzh4zebluqak5mdsu3x -->
<g id="edge48" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1644.2788,-794.0072C1650.5843,-774.7513 1658.0636,-751.9107 1664.6976,-731.6514"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1668.0917,-732.533 1667.8776,-721.9403 1661.4393,-730.3546 1668.0917,-732.533"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="edge41" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1834.3289,-793.5645C1906.6817,-774.1673 1975.9199,-749.2273 1998.2925,-721.3707 2031.5218,-680.681 2032.1636,-617.9031 2027.044,-573.3921"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1834.8468,-795.4962C1907.3595,-776.0489 1976.5977,-751.1089 1999.8467,-722.6293 2033.5217,-680.7015 2034.1635,-617.9235 2029.0309,-573.1639"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2031.4885,-572.6712 2026.7474,-563.1964 2024.5451,-573.5598 2031.4885,-572.6712"/>
</g>
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
<g id="node26" class="node">
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1306.1776,-404.3002C1306.1776,-404.3002 929.9616,-404.3002 929.9616,-404.3002 923.9616,-404.3002 917.9616,-398.3002 917.9616,-392.3002 917.9616,-392.3002 917.9616,-329.6998 917.9616,-329.6998 917.9616,-323.6998 923.9616,-317.6998 929.9616,-317.6998 929.9616,-317.6998 1306.1776,-317.6998 1306.1776,-317.6998 1312.1776,-317.6998 1318.1776,-323.6998 1318.1776,-329.6998 1318.1776,-329.6998 1318.1776,-392.3002 1318.1776,-392.3002 1318.1776,-398.3002 1312.1776,-404.3002 1306.1776,-404.3002"/>
<text text-anchor="middle" x="1118.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">readline@8.2%gcc@9.4.0/v32wejd</text>
</g>
<!-- uabgssx6lsgrevwbttslldnr5nzguprj&#45;&gt;v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
<g id="edge7" class="edge">
<title>uabgssx6lsgrevwbttslldnr5nzguprj-&gt;v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1117.0696,-476.4072C1117.0696,-457.3263 1117.0696,-434.7257 1117.0696,-414.6046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1119.0696,-476.4072C1119.0696,-457.3263 1119.0696,-434.7257 1119.0696,-414.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1121.5697,-414.3403 1118.0696,-404.3403 1114.5697,-414.3404 1121.5697,-414.3403"/>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge14" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1167.6711,-1112.5788C1078.9073,-1090.9596 971.5916,-1064.822 881.5513,-1042.892"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1168.1444,-1110.6356C1079.3806,-1089.0165 972.0649,-1062.8788 882.0246,-1040.9488"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="882.5603,-1038.5062 872.016,-1039.5403 880.9038,-1045.3074 882.5603,-1038.5062"/>
<text text-anchor="middle" x="963.904" y="-1079.817" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge31" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1559.7922,-1112.1043C1562.8511,-1111.5975 1565.8904,-1111.1002 1568.9103,-1110.6128 1759.2182,-1079.8992 1973.2397,-1052.1328 2144.6143,-1031.5343"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1560.1191,-1114.0774C1563.1741,-1113.5712 1566.2134,-1113.0739 1569.2289,-1112.5872 1759.4755,-1081.8826 1973.497,-1054.1161 2144.8529,-1033.52"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2145.1529,-1036.002 2154.6648,-1031.3357 2144.3191,-1029.0518 2145.1529,-1036.002"/>
<text text-anchor="middle" x="1828.178" y="-1072.4692" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge21" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-1111.6072C1346.0696,-1092.5263 1346.0696,-1069.9257 1346.0696,-1049.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-1049.5403 1346.0696,-1039.5403 1342.5697,-1049.5404 1349.5697,-1049.5403"/>
</g>
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw&#45;&gt;htzjns66gmq6pjofohp26djmjnpbegho -->
<g id="edge30" class="edge">
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw-&gt;htzjns66gmq6pjofohp26djmjnpbegho</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2467.0696,-952.8072C2467.0696,-933.7263 2467.0696,-911.1257 2467.0696,-891.0046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2470.5697,-890.7403 2467.0696,-880.7403 2463.5697,-890.7404 2470.5697,-890.7403"/>
</g>
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge2" class="edge">
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1422.351,-1429.2133C1312.2528,-1388.8872 1171.1589,-1316.8265 1103.0696,-1198.4 1083.8409,-1164.956 1082.4563,-1144.2088 1103.0696,-1111.6 1121.4102,-1082.5864 1149.2483,-1060.7204 1179.6189,-1044.2895"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1181.4205,-1047.2977 1188.6801,-1039.5809 1178.1927,-1041.0863 1181.4205,-1047.2977"/>
</g>
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r&#45;&gt;j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="edge39" class="edge">
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r-&gt;j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1179.8001,-316.7866C1209.2065,-296.3053 1244.4355,-271.7686 1274.8343,-250.5961"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1180.9431,-318.4278C1210.3495,-297.9465 1245.5785,-273.4098 1275.9774,-252.2373"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1277.6375,-254.1277 1283.8429,-245.5403 1273.6367,-248.3836 1277.6375,-254.1277"/>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if&#45;&gt;j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="edge18" class="edge">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if-&gt;j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1345.0696,-952.7909C1345.0696,-891.6316 1345.0696,-776.6094 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-519.8 1345.0696,-426.9591 1345.0696,-318.8523 1345.0696,-255.7237"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1347.0696,-952.7909C1347.0696,-891.6316 1347.0696,-776.6094 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-519.8 1347.0696,-426.9591 1347.0696,-318.8523 1347.0696,-255.7237"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-255.6091 1346.0696,-245.6091 1342.5697,-255.6092 1349.5697,-255.6091"/>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if&#45;&gt;5xerf6imlgo4xlubacr4mljacc3edexo -->
<g id="edge40" class="edge">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if-&gt;5xerf6imlgo4xlubacr4mljacc3edexo</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1423.1858,-951.9344C1460.2844,-931.1905 1504.8229,-906.2866 1543.0151,-884.9312"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1424.1619,-953.68C1461.2605,-932.9361 1505.799,-908.0322 1543.9912,-886.6769"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1545.5391,-888.6757 1552.5592,-880.7403 1542.1228,-882.5659 1545.5391,-888.6757"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 58 KiB

View File

@@ -55,7 +55,6 @@ or refer to the full manual below.
getting_started
basic_usage
replace_conda_homebrew
frequently_asked_questions
.. toctree::
:maxdepth: 2
@@ -71,7 +70,7 @@ or refer to the full manual below.
configuration
config_yaml
packages_yaml
bootstrapping
build_settings
environments
containers
@@ -79,7 +78,6 @@ or refer to the full manual below.
module_file_support
repositories
binary_caches
bootstrapping
command_index
chain
extensions

View File

@@ -519,11 +519,11 @@ inspections and customize them per-module-set.
modules:
prefix_inspections:
./bin:
bin:
- PATH
./man:
man:
- MANPATH
./:
'':
- CMAKE_PREFIX_PATH
Prefix inspections are only applied if the relative path inside the
@@ -579,7 +579,7 @@ the view.
view_relative_modules:
use_view: my_view
prefix_inspections:
./bin:
bin:
- PATH
view:
my_view:

View File

@@ -1,591 +0,0 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _packages-config:
================================
Package Settings (packages.yaml)
================================
Spack allows you to customize how your software is built through the
``packages.yaml`` file. Using it, you can make Spack prefer particular
implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
or you can make it prefer to build with particular compilers. You can
also tell Spack to use *external* software installations already
present on your system.
At a high level, the ``packages.yaml`` file is structured like this:
.. code-block:: yaml
packages:
package1:
# settings for package1
package2:
# settings for package2
# ...
all:
# settings that apply to all packages.
So you can either set build preferences specifically for *one* package,
or you can specify that certain settings should apply to *all* packages.
The types of settings you can customize are described in detail below.
Spack's build defaults are in the default
``etc/spack/defaults/packages.yaml`` file. You can override them in
``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
details on how this works, see :ref:`configuration-scopes`.
.. _sec-external-packages:
-----------------
External Packages
-----------------
Spack can be configured to use externally-installed
packages rather than building its own packages. This may be desirable
if machines ship with system packages, such as a customized MPI
that should be used instead of Spack building its own MPI.
External packages are configured through the ``packages.yaml`` file.
Here's an example of an external configuration:
.. code-block:: yaml
packages:
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with GCC,
one built with GCC and debug information, and another built with Intel.
If Spack is asked to build a package that uses one of these MPIs as a
dependency, it will use the pre-installed OpenMPI in
the given directory. Note that the specified path is the top-level
install prefix, not the ``bin`` subdirectory.
``packages.yaml`` can also be used to specify modules to load instead
of the installation prefixes. The following example says that module
``CMake/3.7.2`` provides cmake version 3.7.2.
.. code-block:: yaml
cmake:
externals:
- spec: cmake@3.7.2
modules:
- CMake/3.7.2
Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
by a list of package names. To specify externals, add an ``externals:``
attribute under the package name, which lists externals.
Each external should specify a ``spec:`` string that should be as
well-defined as reasonably possible. If a
package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based
on its most-favored packages, and it may guess incorrectly.
Each package version and compiler listed in an external should
have entries in Spack's packages and compiler configuration, even
though the package and compiler may not ever be built.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Prevent packages from being built from sources
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
but it does not prevent Spack from building packages from sources. In the above example,
Spack might choose for many valid reasons to start building and linking with the
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
To prevent this, the ``packages.yaml`` configuration also allows packages
to be flagged as non-buildable. The previous example could be modified to
be:
.. code-block:: yaml
packages:
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
OpenMPI.
.. note::
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
pre-built specs include specs already available from a local store, an upstream store, a registered
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
external specs in ``packages.yaml`` are included in the list of pre-built specs.
If an external module is specified as not buildable, then Spack will load the
external module into the build environment which can be used for linking.
The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Non-buildable virtual packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Virtual packages in Spack can also be specified as not buildable, and
external implementations can be provided. In the example above,
OpenMPI is configured as not buildable, but Spack will often prefer
other MPI implementations over the externally available OpenMPI. Spack
can be configured with every MPI provider not buildable individually,
but more conveniently:
.. code-block:: yaml
packages:
mpi:
buildable: False
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
Spack can then use any of the listed external implementations of MPI
to satisfy a dependency, and will choose depending on the compiler and
architecture.
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
(available via stores or buildcaches) are not wanted, Spack can be configured to require
specs matching only the available externals:
.. code-block:: yaml
packages:
mpi:
buildable: False
require:
- one_of: [
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
]
openmpi:
externals:
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.4.3
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
prefix: /opt/openmpi-1.4.3-debug
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
prefix: /opt/openmpi-1.6.5-intel
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
:ref:`package-requirements`.
.. _cmd-spack-external-find:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Automatically Find External Packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can run the :ref:`spack external find <spack-external-find>` command
to search for system-provided packages and add them to ``packages.yaml``.
After running this command your ``packages.yaml`` may include new entries:
.. code-block:: yaml
packages:
cmake:
externals:
- spec: cmake@3.17.2
prefix: /usr
Generally this is useful for detecting a small set of commonly-used packages;
for now this is generally limited to finding build-only dependencies.
Specific limitations include:
* Packages are not discoverable by default: For a package to be
discoverable with ``spack external find``, it needs to add special
logic. See :ref:`here <make-package-findable>` for more details.
* The logic does not search through module files, it can only detect
packages with executables defined in ``PATH``; you can help Spack locate
externals which use module files by loading any associated modules for
packages that you want Spack to know about before running
``spack external find``.
* Spack does not overwrite existing entries in the package configuration:
If there is an external defined for a spec at any configuration scope,
then Spack will not add a new external entry (``spack config blame packages``
can help locate all external entries).
.. _package-requirements:
--------------------
Package Requirements
--------------------
Spack can be configured to always use certain compilers, package
versions, and variants during concretization through package
requirements.
Package requirements are useful when you find yourself repeatedly
specifying the same constraints on the command line, and wish that
Spack respects these constraints whether you mention them explicitly
or not. Another use case is specifying constraints that should apply
to all root specs in an environment, without having to repeat the
constraint everywhere.
Apart from that, requirements config is more flexible than constraints
on the command line, because it can specify constraints on packages
*when they occur* as a dependency. In contrast, on the command line it
is not possible to specify constraints on dependencies while also keeping
those dependencies optional.
.. seealso::
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
^^^^^^^^^^^^^^^^^^^
Requirements syntax
^^^^^^^^^^^^^^^^^^^
The package requirements configuration is specified in ``packages.yaml``,
keyed by package name and expressed using the Spec syntax. In the simplest
case you can specify attributes that you always want the package to have
by providing a single spec string to ``require``:
.. code-block:: yaml
packages:
libfabric:
require: "@1.13.2"
In the above example, ``libfabric`` will always build with version 1.13.2. If you
need to compose multiple configuration scopes ``require`` accepts a list of
strings:
.. code-block:: yaml
packages:
libfabric:
require:
- "@1.13.2"
- "%gcc"
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
as a compiler.
For more complex use cases, require accepts also a list of objects. These objects
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
and they can optionally have a ``when`` and a ``message`` attribute:
.. code-block:: yaml
packages:
openmpi:
require:
- any_of: ["@4.1.5", "%gcc"]
message: "in this example only 4.1.5 can build with other compilers"
``any_of`` is a list of specs. One of those specs must be satisfied
and it is also allowed for the concretized spec to match more than one.
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
not ``openmpi@3.9%clang``.
If a custom message is provided, and the requirement is not satisfiable,
Spack will print the custom error message:
.. code-block:: console
$ spack spec openmpi@3.9%clang
==> Error: in this example only 4.1.5 can build with other compilers
We could express a similar requirement using the ``when`` attribute:
.. code-block:: yaml
packages:
openmpi:
require:
- any_of: ["%gcc"]
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
constraint:
.. code-block:: yaml
packages:
openmpi:
require:
- spec: "%gcc"
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
This code snippet and the one before it are semantically equivalent.
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
concretized spec must match one and only one of them:
.. code-block:: yaml
packages:
mpich:
require:
- one_of: ["+cuda", "+rocm"]
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
.. note::
For ``any_of`` and ``one_of``, the order of specs indicates a
preference: items that appear earlier in the list are preferred
(note that these preferences can be ignored in favor of others).
.. note::
When using a conditional requirement, Spack is allowed to actively avoid the triggering
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
the optimization criteria. To check the current optimization criteria and their
priorities you can run ``spack solve zlib``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting default requirements
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can also set default requirements for all packages under ``all``
like this:
.. code-block:: yaml
packages:
all:
require: '%clang'
which means every spec will be required to use ``clang`` as a compiler.
Requirements on variants for all packages are possible too, but note that they
are only enforced for those packages that define these variants, otherwise they
are disregarded. For example:
.. code-block:: yaml
packages:
all:
require:
- "+shared"
- "+cuda"
will just enforce ``+shared`` on ``zlib``, which has a boolean ``shared`` variant but
no ``cuda`` variant.
Constraints in a single spec literal are always considered as a whole, so in a case like:
.. code-block:: yaml
packages:
all:
require: "+shared +cuda"
the default requirement will be enforced only if a package has both a ``cuda`` and
a ``shared`` variant, and will never be partially enforced.
Finally, ``all`` represents a *default set of requirements* -
if there are specific package requirements, then the default requirements
under ``all`` are disregarded. For example, with a configuration like this:
.. code-block:: yaml
packages:
all:
require:
- 'build_type=Debug'
- '%clang'
cmake:
require:
- 'build_type=Debug'
- '%gcc'
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
dependencies) to use ``clang``. If enforcing ``build_type=Debug`` is needed also
on ``cmake``, it must be repeated in the specific ``cmake`` requirements.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting requirements on virtual specs
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
This can be useful for fixing which virtual provider you want to use:
.. code-block:: yaml
packages:
mpi:
require: 'mvapich2 %gcc'
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
Requirements on the virtual spec and on the specific provider are both applied, if
present. For instance with a configuration like:
.. code-block:: yaml
packages:
mpi:
require: 'mvapich2 %gcc'
mvapich2:
require: '~cuda'
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-preferences:
-------------------
Package Preferences
-------------------
In some cases package requirements can be too strong, and package
preferences are the better option. Package preferences do not impose
constraints on packages for particular versions or variants values,
they rather only set defaults. The concretizer is free to change
them if it must, due to other constraints, and also prefers reusing
installed packages over building new ones that are a better match for
preferences.
.. seealso::
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
Most package preferences (``compilers``, ``target`` and ``providers``)
can only be set globally under the ``all`` section of ``packages.yaml``:
.. code-block:: yaml
packages:
all:
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
target: [x86_64_v3]
providers:
mpi: [mvapich2, mpich, openmpi]
These preferences override Spack's default and effectively reorder priorities
when looking for the best compiler, target or virtual package provider. Each
preference takes an ordered list of spec constraints, with earlier entries in
the list being preferred over later entries.
In the example above all packages prefer to be compiled with ``gcc@12.2.0``,
to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they
depend on ``mpi``.
The ``variants`` and ``version`` preferences can be set under
package specific sections of the ``packages.yaml`` file:
.. code-block:: yaml
packages:
opencv:
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
In this case, the preference for ``opencv`` is to build with debug options, while
``gperftools`` prefers version 2.2 over 2.4.
Any preference can be overwritten on the command line if explicitly requested.
Preferences cannot overcome explicit constraints, as they only set a preferred
ordering among homogeneous attribute values. Going back to the example, if
``gperftools@2.3:`` was requested, then Spack will install version 2.4
since the most preferred version 2.2 is prohibited by the version constraint.
.. _package_permissions:
-------------------
Package Permissions
-------------------
Spack can be configured to assign permissions to the files installed
by a package.
In the ``packages.yaml`` file under ``permissions``, the attributes
``read``, ``write``, and ``group`` control the package
permissions. These attributes can be set per-package, or for all
packages under ``all``. If permissions are set under ``all`` and for a
specific package, the package-specific settings take precedence.
The ``read`` and ``write`` attributes take one of ``user``, ``group``,
and ``world``.
.. code-block:: yaml
packages:
all:
permissions:
write: group
group: spack
my_app:
permissions:
read: group
group: my_team
The permissions settings describe the broadest level of access to
installations of the specified packages. The execute permissions of
the file are set to the same level as read permissions for those files
that are executable. The default setting for ``read`` is ``world``,
and for ``write`` is ``user``. In the example above, installations of
``my_app`` will be installed with user and group permissions but no
world permissions, and owned by the group ``my_team``. All other
packages will be installed with user and group write privileges, and
world read privileges. Those packages will be owned by the group
``spack``.
The ``group`` attribute assigns a Unix-style group to a package. All
files installed by the package will be owned by the assigned group,
and the sticky group bit will be set on the install prefix and all
directories inside the install prefix. This will ensure that even
manually placed files within the install prefix are owned by the
assigned group. If no group is assigned, Spack will allow the OS
default behavior to go as expected.
----------------------------
Assigning Package Attributes
----------------------------
You can assign class-level attributes in the configuration:
.. code-block:: yaml
packages:
mpileaks:
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
Attributes set this way will be accessible to any method executed
in the package.py file (e.g. the ``install()`` method). Values for these
attributes may be any value parseable by yaml.
These can only be applied to specific packages, not "all" or
virtual packages.

View File

@@ -2337,7 +2337,7 @@ window while a batch job is running ``spack install`` on the same or
overlapping dependencies without any process trying to re-do the work of
another.
For example, if you are using Slurm, you could launch an installation
For example, if you are using SLURM, you could launch an installation
of ``mpich`` using the following command:
.. code-block:: console
@@ -2352,7 +2352,7 @@ the following at the command line of a bash shell:
.. code-block:: console
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
.. note::
@@ -2688,6 +2688,60 @@ appear in the package file (or in this case, in the list).
right version. If two packages depend on ``binutils`` patched *the
same* way, they can both use a single installation of ``binutils``.
.. _setup-dependent-environment:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Influence how dependents are built or run
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack provides a mechanism for dependencies to influence the
environment of their dependents by overriding the
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
or the
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
methods.
The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment
:linenos:
to set the ``QTDIR`` environment variable so that packages
that depend on a particular Qt installation will find it.
Another good example of how a dependency can influence
the build environment of dependents is the Python package:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_build_environment
:linenos:
In the method above it is ensured that any package that depends on Python
will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
variables set appropriately before starting the installation. To make things
even simpler the ``python setup.py`` command is also inserted into the module
scope of dependents by overriding a third method called
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package
:linenos:
This allows most python packages to have a very simple install procedure,
like the following:
.. code-block:: python
def install(self, spec, prefix):
setup_py("install", "--prefix={0}".format(prefix))
Finally the Python package takes also care of the modifications to ``PYTHONPATH``
to allow dependencies to run correctly:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_run_environment
:linenos:
.. _packaging_conflicts:
@@ -2832,70 +2886,6 @@ variant(s) are selected. This may be accomplished with conditional
extends("python", when="+python")
...
.. _setup-environment:
--------------------------------------------
Runtime and build time environment variables
--------------------------------------------
Spack provides a few methods to help package authors set up the required environment variables for
their package. Environment variables typically depend on how the package is used: variables that
make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
it makes sense to let a dependency set the environment variables for its dependents. To allow all
this, Spack provides four different methods that can be overridden in a package:
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment
:linenos:
to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
installation will find it.
The following diagram will give you an idea when each of these methods is called in a build
context:
.. image:: images/setup_env.png
:align: center
Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
This means that the former should only be used if the environment variables depend on the dependent
package, whereas the latter should be used if the environment variables depend only on the package
itself.
--------------------------------
Setting package module variables
--------------------------------
Apart from modifying environment variables of the dependent package, you can also define Python
variables to be used by the dependent. This is done by implementing
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
example of this can be found in the ``Python`` package:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package
:linenos:
This allows Python packages to directly use these variables:
.. code-block:: python
def install(self, spec, prefix):
...
install("script.py", python_platlib)
.. note::
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
global variables are coming from when editing a ``package.py`` file.
-----
Views
-----
@@ -2974,33 +2964,6 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
can be used to satisfy the dependency of any package that
``depends_on("mpi")``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Providing multiple virtuals simultaneously
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
there are subsets of those virtuals that need to be provided together by the same package.
A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
``openblas`` at all. It cannot pick one or the other.
To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
.. code-block:: python
provides('blas', 'lapack')
This makes it impossible to select ``openblas`` as a provider for one of the two
virtual dependencies and not for the other. If you try to, Spack will report an error:
.. code-block:: console
$ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
==> Error: concretization failed for the following reasons:
1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
^^^^^^^^^^^^^^^^^^^^
Versioned Interfaces
^^^^^^^^^^^^^^^^^^^^
@@ -3503,56 +3466,6 @@ is equivalent to:
Constraints from nested context managers are also combined together, but they are rarely
needed or recommended.
.. _default_args:
------------------------
Common default arguments
------------------------
Similarly, if directives have a common set of default arguments, you can
group them together in a ``with default_args()`` block:
.. code-block:: python
class PyExample(PythonPackage):
with default_args(type=("build", "run")):
depends_on("py-foo")
depends_on("py-foo@2:", when="@2:")
depends_on("py-bar")
depends_on("py-bz")
The above is short for:
.. code-block:: python
class PyExample(PythonPackage):
depends_on("py-foo", type=("build", "run"))
depends_on("py-foo@2:", when="@2:", type=("build", "run"))
depends_on("py-bar", type=("build", "run"))
depends_on("py-bz", type=("build", "run"))
.. note::
The ``with when()`` context manager is composable, while ``with default_args()``
merely overrides the default. For example:
.. code-block:: python
with default_args(when="+feature"):
depends_on("foo")
depends_on("bar")
depends_on("baz", when="+baz")
is equivalent to:
.. code-block:: python
depends_on("foo", when="+feature")
depends_on("bar", when="+feature")
depends_on("baz", when="+baz") # Note: not when="+feature+baz"
.. _install-method:
------------------
@@ -3852,7 +3765,7 @@ Similarly, ``spack install example +feature build_system=autotools`` will pick
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
Dependencies are always specified in the package class. When some dependencies
depend on the choice of the build system, it is possible to use when conditions as
depend on the choice of the build system, it is possible to use when conditions as
usual:
.. code-block:: python
@@ -3870,7 +3783,7 @@ usual:
depends_on("cmake@3.18:", when="@2.0:", type="build")
depends_on("cmake@3:", type="build")
# Specify extra build dependencies used only in the configure script
# Specify extra build dependencies used only in the configure script
with when("build_system=autotools"):
depends_on("perl", type="build")
depends_on("pkgconfig", type="build")
@@ -6918,58 +6831,25 @@ the adapter role is to "emulate" a method resolution order like the one represen
Specifying License Information
------------------------------
Most of the software in Spack is open source, and most open source software is released
under one or more `common open source licenses <https://opensource.org/licenses/>`_.
Specifying the license that a package is released under in a project's
`package.py` is good practice. To specify a license, find the `SPDX identifier
<https://spdx.org/licenses/>`_ for a project and then add it using the license
directive:
A significant portion of software that Spack packages is open source. Most open
source software is released under one or more common open source licenses.
Specifying the specific license that a package is released under in a project's
`package.py` is good practice. To specify a license, find the SPDX identifier for
a project and then add it using the license directive:
.. code-block:: python
license("<SPDX Identifier HERE>")
For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
so you'd write:
.. code-block:: python
license("Apache-2.0")
Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
<https://spdx.github.io/spdx-spec/v2-draft/SPDX-license-expressions/>`_ with both of its
licenses:
.. code-block:: python
license("Apache-2.0 OR MIT")
Note that specifying a license without a when clause makes it apply to all
versions and variants of the package, which might not actually be the case.
For example, a project might have switched licenses at some point or have
certain build configurations that include files that are licensed differently.
Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
in version ``0.12`` in 2018.
You can specify when a ``license()`` directive applies using with a ``when=``
clause, just like other directives. For example, to specify that a specific
license identifier should only apply to versions up to ``0.11``, but another
license should apply for later versions, you could write:
To account for this, you can specify when licenses should be applied. For
example, to specify that a specific license identifier should only apply
to versionup to and including 1.5, you could write the following directive:
.. code-block:: python
license("LGPL-2.1", when="@:0.11")
license("Apache-2.0 OR MIT", when="@0.12:")
license("...", when="@:1.5")
Note that unlike for most other directives, the ``when=`` constraints in the
``license()`` directive can't intersect. Spack needs to be able to resolve
exactly one license identifier expression for any given version. To specify
*multiple* licenses, use SPDX expressions and operators as above. The operators
you probably care most about are:
* ``OR``: user chooses one license to adhere to; and
* ``AND``: user has to adhere to all the licenses.
You may also care about `license exceptions
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
e.g. ``Apache-2.0 WITH LLVM-exception``.

View File

@@ -1,13 +1,13 @@
sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
sphinx-rtd-theme==1.3.0
python-levenshtein==0.23.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.1.0
pytest==7.4.3
docutils==0.18.1
pygments==2.16.1
urllib3==2.0.7
pytest==7.4.2
isort==5.12.0
black==23.11.0
black==23.9.1
flake8==6.1.0
mypy==1.7.1
mypy==1.6.1

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
astunparse
----------------

View File

@@ -1,2 +1,2 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.2"
__version__ = "0.2.1"

View File

@@ -2318,26 +2318,6 @@
]
}
},
"power10": {
"from": ["power9"],
"vendor": "IBM",
"generation": 10,
"features": [],
"compilers": {
"gcc": [
{
"versions": "11.1:",
"flags": "-mcpu={name} -mtune={name}"
}
],
"clang": [
{
"versions": "11.0:",
"flags": "-mcpu={name} -mtune={name}"
}
]
}
},
"ppc64le": {
"from": [],
"vendor": "generic",
@@ -2425,29 +2405,6 @@
]
}
},
"power10le": {
"from": ["power9le"],
"vendor": "IBM",
"generation": 10,
"features": [],
"compilers": {
"gcc": [
{
"name": "power10",
"versions": "11.1:",
"flags": "-mcpu={name} -mtune={name}"
}
],
"clang": [
{
"versions": "11.0:",
"family": "ppc64le",
"name": "power10",
"flags": "-mcpu={name} -mtune={name}"
}
]
}
},
"aarch64": {
"from": [],
"vendor": "generic",
@@ -2635,37 +2592,6 @@
]
}
},
"armv9.0a": {
"from": ["armv8.5a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "12:",
"flags": "-march=armv9-a -mtune=generic"
}
],
"clang": [
{
"versions": "14:",
"flags": "-march=armv9-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv9-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv9-a -mtune=generic"
}
]
}
},
"thunderx2": {
"from": ["armv8.1a"],
"vendor": "Cavium",
@@ -2887,12 +2813,8 @@
],
"arm" : [
{
"versions": "20:21.9",
"versions": "20:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
},
{
"versions": "22:",
"flags" : "-mcpu=neoverse-n1"
}
],
"nvhpc" : [
@@ -3020,7 +2942,7 @@
},
{
"versions": "22:",
"flags" : "-mcpu=neoverse-v1"
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
}
],
"nvhpc" : [
@@ -3032,126 +2954,6 @@
]
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"paca",
"pacg",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16",
"dgh",
"bti"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-v2",
"flags": "-tp {name}"
}
]
}
},
"m1": {
"from": ["armv8.4a"],
"vendor": "Apple",

View File

@@ -1047,9 +1047,9 @@ def __bool__(self):
"""Whether any exceptions were handled."""
return bool(self.exceptions)
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
def forward(self, context: str) -> "GroupedExceptionForwarder":
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
return GroupedExceptionForwarder(context, self, base)
return GroupedExceptionForwarder(context, self)
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
self.exceptions.append((context, exc, tb))
@@ -1072,18 +1072,15 @@ class GroupedExceptionForwarder:
"""A contextmanager to capture exceptions and forward them to a
GroupedExceptionHandler."""
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
def __init__(self, context: str, handler: GroupedExceptionHandler):
self._context = context
self._handler = handler
self._base = base
def __enter__(self):
return None
def __exit__(self, exc_type, exc_value, tb):
if exc_value is not None:
if not issubclass(exc_type, self._base):
return False
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
# Suppress any exception from being re-raised:

View File

@@ -211,7 +211,6 @@ def info(message, *args, **kwargs):
stream.write(line + "\n")
else:
stream.write(indent + _output_filter(str(arg)) + "\n")
stream.flush()
def verbose(message, *args, **kwargs):

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.22.0.dev0"
__version__ = "0.21.0.dev0"
spack_version = __version__

View File

@@ -40,7 +40,6 @@ def _search_duplicate_compilers(error_cls):
import collections.abc
import glob
import inspect
import io
import itertools
import pathlib
import pickle
@@ -55,7 +54,6 @@ def _search_duplicate_compilers(error_cls):
import spack.repo
import spack.spec
import spack.util.crypto
import spack.util.spack_yaml as syaml
import spack.variant
#: Map an audit tag to a list of callables implementing checks
@@ -252,88 +250,6 @@ def _search_duplicate_specs_in_externals(error_cls):
return errors
@config_packages
def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(attribute_name, config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
syaml.dump_config(dict_view, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
if "all" in packages_yaml and "version" in packages_yaml["all"]:
summary = "Using the deprecated 'version' attribute under 'packages:all'"
errors.append(make_error("version", packages_yaml["all"], summary))
for package_name in packages_yaml:
if package_name == "all":
continue
package_conf = packages_yaml[package_name]
for attribute in ("compiler", "providers", "target"):
if attribute not in package_conf:
continue
summary = (
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
)
errors.append(make_error(attribute, package_conf, summary))
return errors
@config_packages
def _avoid_mismatched_variants(error_cls):
"""Warns if variant preferences have mismatched types or names."""
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
syaml.dump_config(config_data, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
for pkg_name in packages_yaml:
# 'all:' must be more forgiving, since it is setting defaults for everything
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
continue
preferences = packages_yaml[pkg_name]["variants"]
if not isinstance(preferences, list):
preferences = [preferences]
for variants in preferences:
current_spec = spack.spec.Spec(variants)
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for variant in current_spec.variants.values():
# Variant does not exist at all
if variant.name not in pkg_cls.variants:
summary = (
f"Setting a preference for the '{pkg_name}' package to the "
f"non-existing variant '{variant.name}'"
)
errors.append(make_error(preferences, summary))
continue
# Variant cannot accept this value
s = spack.spec.Spec(pkg_name)
try:
s.update_variant_validate(variant.name, variant.value)
except Exception:
summary = (
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
f"to the invalid value '{str(variant)}'"
)
errors.append(make_error(preferences, summary))
return errors
#: Sanity checks on package directives
package_directives = AuditClass(
group="packages",
@@ -726,37 +642,13 @@ def _unknown_variants_in_directives(pkgs, error_cls):
@package_directives
def _issues_in_depends_on_directive(pkgs, error_cls):
"""Reports issues with 'depends_on' directives.
Issues might be unknown dependencies, unknown variants or variant values, or declaration
of nested dependencies.
"""
def _unknown_variants_in_dependencies(pkgs, error_cls):
"""Report unknown dependencies and wrong variants for dependencies"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
for dependency_name, dependency_data in pkg_cls.dependencies.items():
# Check if there are nested dependencies declared. We don't want directives like:
#
# depends_on('foo+bar ^fee+baz')
#
# but we'd like to have two dependencies listed instead.
for when, dependency_edge in dependency_data.items():
dependency_spec = dependency_edge.spec
nested_dependencies = dependency_spec.dependencies()
if nested_dependencies:
summary = (
f"{pkg_name}: invalid nested dependency "
f"declaration '{str(dependency_spec)}'"
)
details = [
f"split depends_on('{str(dependency_spec)}', when='{str(when)}') "
f"into {len(nested_dependencies) + 1} directives",
f"in {filename}",
]
errors.append(error_cls(summary=summary, details=details))
# No need to analyze virtual packages
if spack.repo.PATH.is_virtual(dependency_name):
continue
@@ -884,7 +776,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
)
except Exception:
summary = (
"{0}: dependency on {1} cannot be satisfied by known versions of {1.name}"
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
).format(pkg_name, s)
details = ["happening in " + filename]
if dependency_pkg_cls is not None:
@@ -926,53 +818,6 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
return errors
@package_directives
def _named_specs_in_when_arguments(pkgs, error_cls):
"""Reports named specs in the 'when=' attribute of a directive.
Note that 'conflicts' is the only directive allowing that.
"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
def _extracts_errors(triggers, summary):
_errors = []
for trigger in list(triggers):
when_spec = spack.spec.Spec(trigger)
if when_spec.name is not None and when_spec.name != pkg_name:
details = [f"using '{trigger}', should be '^{trigger}'"]
_errors.append(error_cls(summary=summary, details=details))
return _errors
for dname, triggers in pkg_cls.dependencies.items():
summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency"
errors.extend(_extracts_errors(triggers, summary))
for vname, (variant, triggers) in pkg_cls.variants.items():
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
errors.extend(_extracts_errors(triggers, summary))
for provided, triggers in pkg_cls.provided.items():
summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual"
errors.extend(_extracts_errors(triggers, summary))
for _, triggers in pkg_cls.requirements.items():
triggers = [when_spec for when_spec, _, _ in triggers]
summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive"
errors.extend(_extracts_errors(triggers, summary))
triggers = list(pkg_cls.patches)
summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives"
errors.extend(_extracts_errors(triggers, summary))
triggers = list(pkg_cls.resources)
summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives"
errors.extend(_extracts_errors(triggers, summary))
return llnl.util.lang.dedupe(errors)
#: Sanity checks on package directives
external_detection = AuditClass(
group="externals",

View File

@@ -5,13 +5,11 @@
import codecs
import collections
import errno
import hashlib
import io
import itertools
import json
import os
import pathlib
import re
import shutil
import sys
@@ -25,7 +23,7 @@
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from typing import Dict, List, NamedTuple, Optional, Tuple, Union
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
@@ -33,7 +31,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
import spack.caches
import spack.cmd
import spack.config as config
import spack.database as spack_db
@@ -41,9 +38,6 @@
import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.oci.image
import spack.oci.oci
import spack.oci.opener
import spack.platforms
import spack.relocate as relocate
import spack.repo
@@ -53,7 +47,6 @@
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
@@ -66,9 +59,8 @@
from spack.stage import Stage
from spack.util.executable import which
BUILD_CACHE_RELATIVE_PATH = "build_cache"
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1
_build_cache_relative_path = "build_cache"
_build_cache_keys_relative_path = "_pgp"
class BuildCacheDatabase(spack_db.Database):
@@ -132,25 +124,25 @@ class BinaryCacheIndex:
mean we should have paid the price to update the cache earlier?
"""
def __init__(self, cache_root: Optional[str] = None):
self._index_cache_root: str = cache_root or binary_index_location()
def __init__(self, cache_root):
self._index_cache_root = cache_root
# the key associated with the serialized _local_index_cache
self._index_contents_key = "contents.json"
# a FileCache instance storing copies of remote binary cache indices
self._index_file_cache: Optional[file_cache.FileCache] = None
self._index_file_cache = None
# stores a map of mirror URL to index hash and cache key (index path)
self._local_index_cache: Optional[dict] = None
self._local_index_cache = None
# hashes of remote indices already ingested into the concrete spec
# cache (_mirrors_for_spec)
self._specs_already_associated: Set[str] = set()
self._specs_already_associated = set()
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
# whether the fetch succeeded or not.
self._last_fetch_times: Dict[str, float] = {}
self._last_fetch_times = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found.
@@ -160,7 +152,7 @@ def __init__(self, cache_root: Optional[str] = None):
# - the concrete spec itself, keyed by ``spec`` (including the
# full hash, since the dag hash may match but we want to
# use the updated source if available)
self._mirrors_for_spec: Dict[str, dict] = {}
self._mirrors_for_spec = {}
def _init_local_index_cache(self):
if not self._index_file_cache:
@@ -230,11 +222,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
)
return
spec_list = [
s
for s in db.query_local(installed=any, in_buildcache=any)
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
]
spec_list = db.query_local(installed=False, in_buildcache=True)
for indexed_spec in spec_list:
dag_hash = indexed_spec.dag_hash()
@@ -483,18 +471,14 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
FetchIndexError
"""
# TODO: get rid of this request, handle 404 better
scheme = urllib.parse.urlparse(mirror_url).scheme
if scheme != "oci" and not web_util.url_exists(
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
if not web_util.url_exists(
url_util.join(mirror_url, _build_cache_relative_path, "index.json")
):
return False
if scheme == "oci":
# TODO: Actually etag and OCI are not mutually exclusive...
fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None))
elif cache_entry.get("etag"):
fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"])
etag = cache_entry.get("etag", None)
if etag:
fetcher = EtagIndexFetcher(mirror_url, etag)
else:
fetcher = DefaultIndexFetcher(
mirror_url, local_hash=cache_entry.get("index_hash", None)
@@ -535,8 +519,15 @@ def binary_index_location():
return spack.util.path.canonicalize_path(cache_root)
#: Default binary cache index instance
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
def _binary_index():
"""Get the singleton store instance."""
return BinaryCacheIndex(binary_index_location())
#: Singleton binary_index instance
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
_binary_index
)
class NoOverwriteException(spack.error.SpackError):
@@ -605,10 +596,6 @@ def __init__(self, msg):
super().__init__(msg)
class InvalidMetadataFile(spack.error.SpackError):
pass
class UnsignedPackageException(spack.error.SpackError):
"""
Raised if installation of unsigned package is attempted without
@@ -623,11 +610,11 @@ def compute_hash(data):
def build_cache_relative_path():
return BUILD_CACHE_RELATIVE_PATH
return _build_cache_relative_path
def build_cache_keys_relative_path():
return BUILD_CACHE_KEYS_RELATIVE_PATH
return _build_cache_keys_relative_path
def build_cache_prefix(prefix):
@@ -635,14 +622,21 @@ def build_cache_prefix(prefix):
def buildinfo_file_name(prefix):
"""Filename of the binary package meta-data file"""
return os.path.join(prefix, ".spack", "binary_distribution")
"""
Filename of the binary package meta-data file
"""
return os.path.join(prefix, ".spack/binary_distribution")
def read_buildinfo_file(prefix):
"""Read buildinfo file"""
with open(buildinfo_file_name(prefix), "r") as f:
return syaml.load(f)
"""
Read buildinfo file
"""
filename = buildinfo_file_name(prefix)
with open(filename, "r") as inputfile:
content = inputfile.read()
buildinfo = syaml.load(content)
return buildinfo
class BuildManifestVisitor(BaseDirectoryVisitor):
@@ -825,6 +819,18 @@ def tarball_path_name(spec, ext):
return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
def checksum_tarball(file):
# calculate sha256 hash of tar file
block_size = 65536
hasher = hashlib.sha256()
with open(file, "rb") as tfile:
buf = tfile.read(block_size)
while len(buf) > 0:
hasher.update(buf)
buf = tfile.read(block_size)
return hasher.hexdigest()
def select_signing_key(key=None):
if key is None:
keys = spack.util.gpg.signing_keys()
@@ -1141,17 +1147,14 @@ def gzip_compressed_tarfile(path):
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
name="", mode="w", fileobj=outer_checksum
) as tar:
yield tar, inner_checksum, outer_checksum
with open(path, "wb") as fileobj, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
yield tar
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
"""Compute tarfile entry name as the relative path from the (system) root."""
return _path(*_path(absolute_path).parts[1:]).as_posix()
def _tarinfo_name(p: str):
return p.lstrip("/")
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
@@ -1231,88 +1234,8 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
class ChecksumWriter(io.BufferedIOBase):
"""Checksum writer computes a checksum while writing to a file."""
myfileobj = None
def __init__(self, fileobj, algorithm=hashlib.sha256):
self.fileobj = fileobj
self.hasher = algorithm()
self.length = 0
def hexdigest(self):
return self.hasher.hexdigest()
def write(self, data):
if isinstance(data, (bytes, bytearray)):
length = len(data)
else:
data = memoryview(data)
length = data.nbytes
if length > 0:
self.fileobj.write(data)
self.hasher.update(data)
self.length += length
return length
def read(self, size=-1):
raise OSError(errno.EBADF, "read() on write-only object")
def read1(self, size=-1):
raise OSError(errno.EBADF, "read1() on write-only object")
def peek(self, n):
raise OSError(errno.EBADF, "peek() on write-only object")
@property
def closed(self):
return self.fileobj is None
def close(self):
fileobj = self.fileobj
if fileobj is None:
return
self.fileobj.close()
self.fileobj = None
def flush(self):
self.fileobj.flush()
def fileno(self):
return self.fileobj.fileno()
def rewind(self):
raise OSError("Can't rewind while computing checksum")
def readable(self):
return False
def writable(self):
return True
def seekable(self):
return True
def tell(self):
return self.fileobj.tell()
def seek(self, offset, whence=io.SEEK_SET):
# In principle forward seek is possible with b"0" padding,
# but this is not implemented.
if offset == 0 and whence == io.SEEK_CUR:
return
raise OSError("Can't seek while computing checksum")
def readline(self, size=-1):
raise OSError(errno.EBADF, "readline() on write-only object")
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
with gzip_compressed_tarfile(tarfile_path) as tar:
# Tarball the install prefix
tarfile_of_spec_prefix(tar, binaries_dir)
@@ -1324,8 +1247,6 @@ def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
tarinfo.mode = 0o644
tar.addfile(tarinfo, io.BytesIO(bstring))
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
class PushOptions(NamedTuple):
#: Overwrite existing tarball/metadata files in buildcache
@@ -1401,16 +1322,20 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec)
checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
_do_create_tarball(tarfile_path, binaries_dir, buildinfo)
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
# add sha256 checksum to spec.json
with open(spec_file, "r") as inputfile:
content = inputfile.read()
if spec_file.endswith(".json"):
spec_dict = sjson.load(content)
else:
raise ValueError("{0} not a valid spec file type".format(spec_file))
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
with open(specfile_path, "w") as outfile:
@@ -1446,21 +1371,10 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
return None
class NotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
def __init__(self, specs: List[Spec]):
super().__init__(
"Cannot push non-installed packages",
", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs),
)
def specs_to_be_packaged(
specs: List[Spec], root: bool = True, dependencies: bool = True
) -> List[Spec]:
"""Return the list of nodes to be packaged, given a list of specs.
Raises NotInstalledError if a spec is not installed but picked to be packaged.
Args:
specs: list of root specs to be processed
@@ -1468,35 +1382,19 @@ def specs_to_be_packaged(
dependencies: include the dependencies of each
spec in the nodes
"""
if not root and not dependencies:
return []
elif dependencies:
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
else:
nodes = set(specs)
# Filter packageable roots
# Limit to installed non-externals.
packageable = lambda n: not n.external and n.installed
# Mass install check
with spack.store.STORE.db.read_transaction():
if root:
# Error on uninstalled roots, when roots are requested
uninstalled_roots = list(s for s in specs if not s.installed)
if uninstalled_roots:
raise NotInstalledError(uninstalled_roots)
roots = specs
else:
roots = []
if dependencies:
# Error on uninstalled deps, when deps are requested
deps = list(
traverse.traverse_nodes(
specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash
)
)
uninstalled_deps = list(s for s in deps if not s.installed)
if uninstalled_deps:
raise NotInstalledError(uninstalled_deps)
else:
deps = []
return [s for s in itertools.chain(roots, deps) if not s.external]
return list(filter(packageable, nodes))
def push(spec: Spec, mirror_url: str, options: PushOptions):
@@ -1569,50 +1467,14 @@ def _delete_staged_downloads(download_result):
download_result["specfile_stage"].destroy()
def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, int]:
"""Read and validate a spec file, returning the spec dict with its layout version, or raising
InvalidMetadataFile if invalid."""
try:
with open(path, "rb") as f:
binary_content = f.read()
except OSError:
raise InvalidMetadataFile(f"No such file: {path}")
# In the future we may support transparently decompressing compressed spec files.
if binary_content[:2] == b"\x1f\x8b":
raise InvalidMetadataFile("Compressed spec files are not supported")
try:
as_string = binary_content.decode("utf-8")
if path.endswith(".json.sig"):
spec_dict = Spec.extract_json_from_clearsig(as_string)
else:
spec_dict = json.loads(as_string)
except Exception as e:
raise InvalidMetadataFile(f"Could not parse {path} due to: {e}") from e
# Ensure this version is not too new.
try:
layout_version = int(spec_dict.get("buildcache_layout_version", 0))
except ValueError as e:
raise InvalidMetadataFile("Could not parse layout version") from e
if layout_version > max_supported_layout:
raise InvalidMetadataFile(
f"Layout version {layout_version} is too new for this version of Spack"
)
return spec_dict, layout_version
def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None):
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
"""
Download binary tarball for given package into stage area, returning
path to downloaded tarball if successful, None otherwise.
Args:
spec (spack.spec.Spec): Concrete spec
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
unsigned (bool): Whether or not to require signed binaries
mirrors_for_spec (list): Optional list of concrete specs and mirrors
obtained by calling binary_distribution.get_mirrors_for_spec().
These will be checked in order first before looking in other
@@ -1633,15 +1495,15 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
"signature_verified": "true-if-binary-pkg-was-already-verified"
}
"""
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
binary=True
).values()
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
if not configured_mirrors:
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
tarball = tarball_path_name(spec, ".spack")
specfile_prefix = tarball_name(spec, ".spec")
mirrors_to_try = []
# Note on try_first and try_next:
# mirrors_for_spec mostly likely came from spack caching remote
# mirror indices locally and adding their specs to a local data
@@ -1653,156 +1515,64 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
# mirror for the spec twice though.
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
mirror_urls = try_first + try_next
# TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here.
def fetch_url_to_mirror(url):
for mirror in configured_mirrors:
if mirror.fetch_url == url:
return mirror
return spack.mirror.Mirror(url)
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
for url in try_first + try_next:
mirrors_to_try.append(
{
"specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
"spackfile": url_util.join(url, _build_cache_relative_path, tarball),
}
)
tried_to_verify_sigs = []
# Assumes we care more about finding a spec file by preferred ext
# than by mirrory priority. This can be made less complicated as
# we remove support for deprecated spec formats and buildcache layouts.
for try_signed in (True, False):
for mirror in mirrors:
# Override mirror's default if
currently_unsigned = unsigned if unsigned is not None else not mirror.signed
for ext in ["json.sig", "json"]:
for mirror_to_try in mirrors_to_try:
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
spackfile_url = mirror_to_try["spackfile"]
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
# If it's an OCI index, do things differently, since we cannot compose URLs.
fetch_url = mirror.fetch_url
if ext.endswith(".sig") and not unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
tried_to_verify_sigs.append(specfile_url)
signature_verified = try_verify(local_specfile_path)
if not signature_verified:
tty.warn("Failed to verify: {0}".format(specfile_url))
# TODO: refactor this to some "nice" place.
if fetch_url.startswith("oci://"):
ref = spack.oci.image.ImageReference.from_string(
fetch_url[len("oci://") :]
).with_tag(spack.oci.image.default_tag(spec))
if unsigned or signature_verified or not ext.endswith(".sig"):
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
# found a spec.json.sig and verified the signature already
# 3. neither of the first two cases are true, but this file
# is *not* a signed json (not a spec.json.sig file). That
# means we already looked at all the mirrors and either didn't
# find any .sig files or couldn't verify any of them. But it
# is still possible to find an old style binary package where
# the signature is a detached .asc file in the outer archive
# of the tarball, and in that case, the only way to know is to
# download the tarball. This is a deprecated use case, so if
# something goes wrong during the extraction process (can't
# verify signature, checksum doesn't match) we will fail at
# that point instead of trying to download more tarballs from
# the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": signature_verified,
}
# Fetch the manifest
try:
response = spack.oci.opener.urlopen(
urllib.request.Request(
url=ref.manifest_url(),
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except Exception:
continue
# Download the config = spec.json and the relevant tarball
try:
manifest = json.loads(response.read())
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
tarball_digest = spack.oci.image.Digest.from_string(
manifest["layers"][-1]["digest"]
)
except Exception:
continue
with spack.oci.oci.make_stage(
ref.blob_url(spec_digest), spec_digest, keep=True
) as local_specfile_stage:
try:
local_specfile_stage.fetch()
local_specfile_stage.check()
try:
_get_valid_spec_file(
local_specfile_stage.save_filename,
CURRENT_BUILD_CACHE_LAYOUT_VERSION,
)
except InvalidMetadataFile as e:
tty.warn(
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
f"from {fetch_url} due to invalid metadata file: {e}"
)
local_specfile_stage.destroy()
continue
except Exception:
continue
local_specfile_stage.cache_local()
with spack.oci.oci.make_stage(
ref.blob_url(tarball_digest), tarball_digest, keep=True
) as tarball_stage:
try:
tarball_stage.fetch()
tarball_stage.check()
except Exception:
continue
tarball_stage.cache_local()
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": False,
"signature_required": not currently_unsigned,
}
else:
ext = "json.sig" if try_signed else "json"
specfile_path = url_util.join(
fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix
)
specfile_url = f"{specfile_path}.{ext}"
spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball)
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
try:
_get_valid_spec_file(
local_specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION
)
except InvalidMetadataFile as e:
tty.warn(
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
f"from {fetch_url} due to invalid metadata file: {e}"
)
local_specfile_stage.destroy()
continue
if try_signed and not currently_unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
tried_to_verify_sigs.append(specfile_url)
signature_verified = try_verify(local_specfile_path)
if not signature_verified:
tty.warn(f"Failed to verify: {specfile_url}")
if currently_unsigned or signature_verified or not try_signed:
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
# found a spec.json.sig and verified the signature already
# 3. neither of the first two cases are true, but this file
# is *not* a signed json (not a spec.json.sig file). That
# means we already looked at all the mirrors and either didn't
# find any .sig files or couldn't verify any of them. But it
# is still possible to find an old style binary package where
# the signature is a detached .asc file in the outer archive
# of the tarball, and in that case, the only way to know is to
# download the tarball. This is a deprecated use case, so if
# something goes wrong during the extraction process (can't
# verify signature, checksum doesn't match) we will fail at
# that point instead of trying to download more tarballs from
# the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": signature_verified,
"signature_required": not currently_unsigned,
}
local_specfile_stage.destroy()
local_specfile_stage.destroy()
# Falling through the nested loops meeans we exhaustively searched
# for all known kinds of spec files on all mirrors and did not find
@@ -1998,7 +1768,7 @@ def is_backup_file(file):
relocate.relocate_text(text_names, prefix_to_prefix_text)
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
stagepath = os.path.dirname(filename)
spackfile_name = tarball_name(spec, ".spack")
spackfile_path = os.path.join(stagepath, spackfile_name)
@@ -2018,7 +1788,7 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
else:
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
if signature_required:
if not unsigned:
if os.path.exists("%s.asc" % specfile_path):
suppress = config.get("config:suppress_gpg_warnings", False)
try:
@@ -2035,7 +1805,7 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
)
# compute the sha256 checksum of the tarball
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
local_checksum = checksum_tarball(tarfile_path)
expected = remote_checksum["hash"]
# if the checksums don't match don't install
@@ -2067,7 +1837,7 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
m.linkname = m.linkname[result.end() :]
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
"""
extract binary tarball for given package into install area
"""
@@ -2087,30 +1857,34 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
)
specfile_path = download_result["specfile_stage"].save_filename
spec_dict, layout_version = _get_valid_spec_file(
specfile_path, CURRENT_BUILD_CACHE_LAYOUT_VERSION
)
bchecksum = spec_dict["binary_cache_checksum"]
with open(specfile_path, "r") as inputfile:
content = inputfile.read()
if specfile_path.endswith(".json.sig"):
spec_dict = Spec.extract_json_from_clearsig(content)
else:
spec_dict = sjson.load(content)
bchecksum = spec_dict["binary_cache_checksum"]
filename = download_result["tarball_stage"].save_filename
signature_verified: bool = download_result["signature_verified"]
signature_required: bool = download_result["signature_required"]
signature_verified = download_result["signature_verified"]
tmpdir = None
if layout_version == 0:
if (
"buildcache_layout_version" not in spec_dict
or int(spec_dict["buildcache_layout_version"]) < 1
):
# Handle the older buildcache layout where the .spack file
# contains a spec json, maybe an .asc file (signature),
# and another tarball containing the actual install tree.
tmpdir = tempfile.mkdtemp()
try:
tarfile_path = _extract_inner_tarball(
spec, filename, tmpdir, signature_required, bchecksum
)
tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
except Exception as e:
_delete_staged_downloads(download_result)
shutil.rmtree(tmpdir)
raise e
elif layout_version == 1:
else:
# Newer buildcache layout: the .spack file contains just
# in the install tree, the signature, if it exists, is
# wrapped around the spec.json at the root. If sig verify
@@ -2118,14 +1892,13 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
# the tarball.
tarfile_path = filename
if signature_required and not signature_verified:
if not unsigned and not signature_verified:
raise UnsignedPackageException(
"To install unsigned packages, use the --no-check-signature option, "
"or configure the mirror with signed: false."
"To install unsigned packages, use the --no-check-signature option."
)
# compute the sha256 checksum of the tarball
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
local_checksum = checksum_tarball(tarfile_path)
expected = bchecksum["hash"]
# if the checksums don't match don't install
@@ -2135,6 +1908,7 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
raise NoChecksumException(
tarfile_path, size, contents, "sha256", expected, local_checksum
)
try:
with closing(tarfile.open(tarfile_path, "r")) as tar:
# Remove install prefix from tarfil to extract directly into spec.prefix
@@ -2234,7 +2008,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
# don't print long padded paths while extracting/relocating binaries
with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
extract_tarball(spec, download_result, unsigned, force)
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
@@ -2265,10 +2039,10 @@ def try_direct_fetch(spec, mirrors=None):
for mirror in binary_mirrors:
buildcache_fetch_url_json = url_util.join(
mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_name
mirror.fetch_url, _build_cache_relative_path, specfile_name
)
buildcache_fetch_url_signed_json = url_util.join(
mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name
)
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
@@ -2330,7 +2104,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured")
return {}
results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# The index may be out-of-date. If we aren't only considering indices, try
# to fetch directly since we know where the file should be.
@@ -2339,7 +2113,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
# We found a spec by the direct fetch approach, we might as well
# add it to our mapping.
if results:
BINARY_INDEX.update_spec(spec, results)
binary_index.update_spec(spec, results)
return results
@@ -2355,12 +2129,12 @@ def update_cache_and_get_specs():
Throws:
FetchCacheError
"""
BINARY_INDEX.update()
return BINARY_INDEX.get_all_built_specs()
binary_index.update()
return binary_index.get_all_built_specs()
def clear_spec_cache():
BINARY_INDEX.clear()
binary_index.clear()
def get_keys(install=False, trust=False, force=False, mirrors=None):
@@ -2373,7 +2147,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
for mirror in mirror_collection.values():
fetch_url = mirror.fetch_url
keys_url = url_util.join(
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
fetch_url, _build_cache_relative_path, _build_cache_keys_relative_path
)
keys_index = url_util.join(keys_url, "index.json")
@@ -2438,7 +2212,7 @@ def push_keys(*mirrors, **kwargs):
for mirror in mirrors:
push_url = getattr(mirror, "push_url", mirror)
keys_url = url_util.join(
push_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
push_url, _build_cache_relative_path, _build_cache_keys_relative_path
)
keys_local = url_util.local_file_path(keys_url)
@@ -2576,11 +2350,11 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
)
if mirror_url:
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirror.MirrorCollection(binary=True).values():
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
if _download_buildcache_entry(mirror_root, file_descriptions):
return True
@@ -2671,7 +2445,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
def get_remote_hash(self):
# Failure to fetch index.json.hash is not fatal
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
url_index_hash = url_util.join(self.url, _build_cache_relative_path, "index.json.hash")
try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
except urllib.error.URLError:
@@ -2683,7 +2457,7 @@ def get_remote_hash(self):
return None
return remote_hash.decode("utf-8")
def conditional_fetch(self) -> FetchIndexResult:
def conditional_fetch(self):
# Do an intermediate fetch for the hash
# and a conditional fetch for the contents
@@ -2692,17 +2466,17 @@ def conditional_fetch(self) -> FetchIndexResult:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise, download index.json
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
url_index = url_util.join(self.url, _build_cache_relative_path, "index.json")
try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except urllib.error.URLError as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
try:
result = codecs.getreader("utf-8")(response).read()
except ValueError as e:
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
return FetchCacheError("Remote index {} is invalid".format(url_index), e)
computed_hash = compute_hash(result)
@@ -2734,9 +2508,9 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
self.etag = etag
self.urlopen = urlopen
def conditional_fetch(self) -> FetchIndexResult:
def conditional_fetch(self):
# Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
url = url_util.join(self.url, _build_cache_relative_path, "index.json")
headers = {
"User-Agent": web_util.SPACK_USER_AGENT,
"If-None-Match": '"{}"'.format(self.etag),
@@ -2765,59 +2539,3 @@ def conditional_fetch(self) -> FetchIndexResult:
data=result,
fresh=False,
)
class OCIIndexFetcher:
def __init__(self, url: str, local_hash, urlopen=None) -> None:
self.local_hash = local_hash
# Remove oci:// prefix
assert url.startswith("oci://")
self.ref = spack.oci.image.ImageReference.from_string(url[6:])
self.urlopen = urlopen or spack.oci.opener.urlopen
def conditional_fetch(self) -> FetchIndexResult:
"""Download an index from an OCI registry type mirror."""
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
try:
response = self.urlopen(
urllib.request.Request(
url=url_manifest,
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except urllib.error.URLError as e:
raise FetchIndexError(
"Could not fetch manifest from {}".format(url_manifest), e
) from e
try:
manifest = json.loads(response.read())
except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Get first blob hash, which should be the index.json
try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Fresh?
if index_digest.digest == self.local_hash:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise fetch the blob / index.json
response = self.urlopen(
urllib.request.Request(
url=self.ref.blob_url(index_digest),
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
)
)
result = codecs.getreader("utf-8")(response).read()
# Make sure the blob we download has the advertised hash
if compute_hash(result) != index_digest.digest:
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)

View File

@@ -16,7 +16,6 @@
import llnl.util.filesystem as fs
from llnl.util import tty
import spack.platforms
import spack.store
import spack.util.environment
import spack.util.executable
@@ -207,19 +206,16 @@ def _root_spec(spec_str: str) -> str:
"""Add a proper compiler and target to a spec used during bootstrapping.
Args:
spec_str: spec to be bootstrapped. Must be without compiler and target.
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
"""
# Add a compiler requirement to the root spec.
platform = str(spack.platforms.host())
if platform == "darwin":
# Add a proper compiler hint to the root spec. We use GCC for
# everything but MacOS and Windows.
if str(spack.platforms.host()) == "darwin":
spec_str += " %apple-clang"
elif platform == "windows":
# TODO (johnwparent): Remove version constraint when clingo patch is up
spec_str += " %msvc@:19.37"
elif platform == "linux":
elif str(spack.platforms.host()) == "windows":
spec_str += " %msvc"
else:
spec_str += " %gcc"
elif platform == "freebsd":
spec_str += " %clang"
target = archspec.cpu.host().family
spec_str += f" target={target}"

View File

@@ -143,9 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
new_compilers = spack.compilers.find_new_compilers()
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)

View File

@@ -214,7 +214,7 @@ def _install_and_test(
with spack.config.override(self.mirror_scope):
# This index is currently needed to get the compiler used to build some
# specs that we know by dag hash.
spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
spack.binary_distribution.binary_index.regenerate_spec_cache()
index = spack.binary_distribution.update_cache_and_get_specs()
if not index:
@@ -291,10 +291,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
# This is needed to help the old concretizer taking the `setuptools` dependency
# only when bootstrapping from sources on Python 3.12
if spec_for_current_python() == "python@3.12":
concrete_spec.constrain("+force_setuptools")
if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
@@ -386,7 +382,7 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
exception_handler = GroupedExceptionHandler()
for current_config in bootstrapping_sources():
with exception_handler.forward(current_config["name"], Exception):
with exception_handler.forward(current_config["name"]):
source_is_enabled_or_raise(current_config)
current_bootstrapper = create_bootstrapper(current_config)
if current_bootstrapper.try_import(module, abstract_spec):
@@ -441,7 +437,7 @@ def ensure_executables_in_path_or_raise(
exception_handler = GroupedExceptionHandler()
for current_config in bootstrapping_sources():
with exception_handler.forward(current_config["name"], Exception):
with exception_handler.forward(current_config["name"]):
source_is_enabled_or_raise(current_config)
current_bootstrapper = create_bootstrapper(current_config)
if current_bootstrapper.try_search_path(executables, abstract_spec):

View File

@@ -19,6 +19,7 @@
import spack.tengine
import spack.util.cpus
import spack.util.executable
from spack.environment import depfile
from ._common import _root_spec
from .config import root_path, spec_for_current_python, store_path
@@ -85,9 +86,12 @@ def __init__(self) -> None:
super().__init__(self.environment_root())
def update_installations(self) -> None:
"""Update the installations of this environment."""
log_enabled = tty.is_debug() or tty.is_verbose()
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
"""Update the installations of this environment.
The update is done using a depfile on Linux and macOS, and using the ``install_all``
method of environments on Windows.
"""
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
specs = self.concretize()
if specs:
colorized_specs = [
@@ -96,9 +100,11 @@ def update_installations(self) -> None:
]
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
self.write(regenerate=False)
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
if sys.platform == "win32":
self.install_all()
self.write(regenerate=True)
else:
self._install_with_depfile()
self.write(regenerate=True)
def update_syspath_and_environ(self) -> None:
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
@@ -116,6 +122,25 @@ def update_syspath_and_environ(self) -> None:
+ [str(x) for x in self.pythonpaths()]
)
def _install_with_depfile(self) -> None:
model = depfile.MakefileModel.from_env(self)
template = spack.tengine.make_environment().get_template(
os.path.join("depfile", "Makefile")
)
makefile = self.environment_root() / "Makefile"
makefile.write_text(template.render(model.to_dict()))
make = spack.util.executable.which("make")
kwargs = {}
if not tty.is_debug():
kwargs = {"output": os.devnull, "error": os.devnull}
make(
"-C",
str(self.environment_root()),
"-j",
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
**kwargs,
)
def _write_spack_yaml_file(self) -> None:
tty.msg(
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
@@ -136,7 +161,7 @@ def _write_spack_yaml_file(self) -> None:
def isort_root_spec() -> str:
"""Return the root spec used to bootstrap isort"""
return _root_spec("py-isort@5")
return _root_spec("py-isort@4.3.5:")
def mypy_root_spec() -> str:

View File

@@ -66,6 +66,7 @@ def _core_requirements() -> List[RequiredResponseType]:
_core_system_exes = {
"make": _missing("make", "required to build software from sources"),
"patch": _missing("patch", "required to patch source code before building"),
"bash": _missing("bash", "required for Spack compiler wrapper"),
"tar": _missing("tar", "required to manage code archives"),
"gzip": _missing("gzip", "required to compress/decompress code archives"),
"unzip": _missing("unzip", "required to compress/decompress code archives"),

View File

@@ -324,29 +324,19 @@ def set_compiler_environment_variables(pkg, env):
# ttyout, ttyerr, etc.
link_dir = spack.paths.build_env_path
# Set SPACK compiler variables so that our wrapper knows what to
# call. If there is no compiler configured then use a default
# wrapper which will emit an error if it is used.
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
env.set("SPACK_CC", compiler.cc)
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
else:
env.set("CC", os.path.join(link_dir, "cc"))
if compiler.cxx:
env.set("SPACK_CXX", compiler.cxx)
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
else:
env.set("CC", os.path.join(link_dir, "c++"))
if compiler.f77:
env.set("SPACK_F77", compiler.f77)
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
else:
env.set("F77", os.path.join(link_dir, "f77"))
if compiler.fc:
env.set("SPACK_FC", compiler.fc)
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
else:
env.set("FC", os.path.join(link_dir, "fc"))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
@@ -753,23 +743,28 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
# Platform specific setup goes before package specific setup. This is for setting
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies")
env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies")
if context == Context.TEST:
# architecture specific setup
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
if context == Context.BUILD:
tty.debug("setup_package: setup build environment for root")
builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
if (not dirty) and (not env_mods.is_unset("CPATH")):
tty.debug(
"A dependency has updated CPATH, this may lead pkg-"
"config to assume that the package is part of the system"
" includes and omit it when invoked with '--cflags'."
)
elif context == Context.TEST:
env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
tty.debug(
"A dependency has updated CPATH, this may lead pkg-config to assume that the package "
"is part of the system includes and omit it when invoked with '--cflags'."
)
# First apply the clean environment changes
env_base.apply_modifications()
@@ -958,11 +953,8 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
reversed(specs_with_type), lambda t: t[0].external
)
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
self.should_setup_run_env = (
UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
)
self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0)
if context == Context.RUN or context == Context.TEST:
self.should_be_runnable |= UseMode.ROOT
@@ -1002,9 +994,8 @@ def get_env_modifications(self) -> EnvironmentModifications:
- Updating PATH for packages that are required at runtime
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies (when context=build)
- Running custom package environment modifications: setup_run_environment,
setup_dependent_run_environment, setup_build_environment,
setup_dependent_build_environment.
- Running custom package environment modifications (setup_run_environment,
setup_dependent_build_environment, setup_dependent_run_environment)
The (partial) order imposed on the specs is externals first, then topological
from leaf to root. That way externals cannot contribute search paths that would shadow
@@ -1017,27 +1008,19 @@ def get_env_modifications(self) -> EnvironmentModifications:
if self.should_setup_dependent_build_env & flag:
self._make_buildtime_detectable(dspec, env)
for root in self.specs: # there is only one root in build context
spack.builder.create(pkg).setup_dependent_build_environment(env, root)
if self.should_setup_build_env & flag:
spack.builder.create(pkg).setup_build_environment(env)
for spec in self.specs:
builder = spack.builder.create(pkg)
builder.setup_dependent_build_environment(env, spec)
if self.should_be_runnable & flag:
self._make_runnable(dspec, env)
if self.should_setup_run_env & flag:
run_env_mods = EnvironmentModifications()
for spec in dspec.dependents(deptype=dt.LINK | dt.RUN):
# TODO: remove setup_dependent_run_environment...
for spec in dspec.dependents(deptype=dt.RUN):
if id(spec) in self.nodes_in_subdag:
pkg.setup_dependent_run_environment(run_env_mods, spec)
pkg.setup_run_environment(run_env_mods)
if self.context == Context.BUILD:
# Don't let the runtime environment of comiler like dependencies leak into the
# build env
run_env_mods.drop("CC", "CXX", "F77", "FC")
env.extend(run_env_mods)
pkg.setup_dependent_run_environment(env, spec)
pkg.setup_run_environment(env)
return env
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):

View File

@@ -9,7 +9,6 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.build_environment
import spack.builder
from .cmake import CMakeBuilder, CMakePackage
@@ -35,11 +34,6 @@ def cmake_cache_option(name, boolean_value, comment="", force=False):
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
def cmake_cache_filepath(name, value, comment=""):
"""Generate a string for a cmake cache variable of type FILEPATH"""
return 'set({0} "{1}" CACHE FILEPATH "{2}")\n'.format(name, value, comment)
class CachedCMakeBuilder(CMakeBuilder):
#: Phases of a Cached CMake package
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
@@ -263,15 +257,6 @@ def initconfig_hardware_entries(self):
entries.append(
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
)
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
@@ -286,29 +271,13 @@ def initconfig_hardware_entries(self):
def std_initconfig_entries(self):
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
cmake_rpaths_path = ";".join(cmake_rpaths_env)
complete_rpath_list = cmake_rpaths_path
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
return [
"#------------------{0}".format("-" * 60),
"# !!!! This is a generated file, edit at own risk !!!!",
"#------------------{0}".format("-" * 60),
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
"#------------------{0}\n".format("-" * 60),
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
]

View File

@@ -1,89 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs
import spack.builder
import spack.package_base
from spack.directives import build_system, depends_on
from spack.multimethod import when
from ._checks import BaseBuilder, execute_install_time_tests
class CargoPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "CargoPackage"
build_system("cargo")
with when("build_system=cargo"):
depends_on("rust", type="build")
@spack.builder.builder("cargo")
class CargoBuilder(BaseBuilder):
"""The Cargo builder encodes the most common way of building software with
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
1. :py:meth:`~.CargoBuilder.build`
2. :py:meth:`~.CargoBuilder.install`
For a finer tuning you may override:
+-----------------------------------------------+----------------------+
| **Method** | **Purpose** |
+===============================================+======================+
| :py:meth:`~.CargoBuilder.build_args` | Specify arguments |
| | to ``cargo install`` |
+-----------------------------------------------+----------------------+
| :py:meth:`~.CargoBuilder.check_args` | Specify arguments |
| | to ``cargo test`` |
+-----------------------------------------------+----------------------+
"""
phases = ("build", "install")
#: Callback names for install-time test
install_time_test_callbacks = ["check"]
@property
def build_directory(self):
"""Return the directory containing the main Cargo.toml."""
return self.pkg.stage.source_path
@property
def build_args(self):
"""Arguments for ``cargo build``."""
return []
@property
def check_args(self):
"""Argument for ``cargo test`` during check phase"""
return []
def build(self, pkg, spec, prefix):
"""Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory):
inspect.getmodule(pkg).cargo(
"install", "--root", "out", "--path", ".", *self.build_args
)
def install(self, pkg, spec, prefix):
"""Copy build files into package prefix."""
with fs.working_dir(self.build_directory):
fs.install_tree("out", prefix)
spack.builder.run_after("install")(execute_install_time_tests)
def check(self):
"""Run "cargo test"."""
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).cargo("test", *self.check_args)

View File

@@ -136,12 +136,12 @@ def cuda_flags(arch_list):
conflicts("%gcc@11:", when="+cuda ^cuda@:11.4.0")
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.1")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -1,98 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs
import spack.builder
import spack.package_base
from spack.directives import build_system, extends
from spack.multimethod import when
from ._checks import BaseBuilder, execute_install_time_tests
class GoPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using the Go toolchain."""
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "GoPackage"
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "go"
build_system("go")
with when("build_system=go"):
# TODO: this seems like it should be depends_on, see
# setup_dependent_build_environment in go for why I kept it like this
extends("go@1.14:", type="build")
@spack.builder.builder("go")
class GoBuilder(BaseBuilder):
"""The Go builder encodes the most common way of building software with
a golang go.mod file. It has two phases that can be overridden, if need be:
1. :py:meth:`~.GoBuilder.build`
2. :py:meth:`~.GoBuilder.install`
For a finer tuning you may override:
+-----------------------------------------------+--------------------+
| **Method** | **Purpose** |
+===============================================+====================+
| :py:meth:`~.GoBuilder.build_args` | Specify arguments |
| | to ``go build`` |
+-----------------------------------------------+--------------------+
| :py:meth:`~.GoBuilder.check_args` | Specify arguments |
| | to ``go test`` |
+-----------------------------------------------+--------------------+
"""
phases = ("build", "install")
#: Callback names for install-time test
install_time_test_callbacks = ["check"]
def setup_build_environment(self, env):
env.set("GO111MODULE", "on")
env.set("GOTOOLCHAIN", "local")
@property
def build_directory(self):
"""Return the directory containing the main go.mod."""
return self.pkg.stage.source_path
@property
def build_args(self):
"""Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
@property
def check_args(self):
"""Argument for ``go test`` during check phase"""
return []
def build(self, pkg, spec, prefix):
"""Runs ``go build`` in the source directory"""
with fs.working_dir(self.build_directory):
inspect.getmodule(pkg).go("build", *self.build_args)
def install(self, pkg, spec, prefix):
"""Install built binaries into prefix bin."""
with fs.working_dir(self.build_directory):
fs.mkdirp(prefix.bin)
fs.install(pkg.name, prefix.bin)
spack.builder.run_after("install")(execute_install_time_tests)
def check(self):
"""Run ``go test .`` in the source directory"""
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).go("test", *self.check_args)

View File

@@ -7,12 +7,13 @@
import os
import platform
import shutil
from os.path import basename, isdir
from os.path import basename, dirname, isdir
from llnl.util.filesystem import HeaderList, find_libraries, join_path, mkdirp
from llnl.util.filesystem import find_headers, find_libraries, join_path
from llnl.util.link_tree import LinkTree
from spack.directives import conflicts, variant
from spack.package import mkdirp
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -55,21 +56,10 @@ def component_dir(self):
"""Subdirectory for this component in the install prefix."""
raise NotImplementedError
@property
def v2_layout_versions(self):
"""Version that implements the v2 directory layout."""
raise NotImplementedError
@property
def v2_layout(self):
"""Returns true if this version implements the v2 directory layout."""
return self.spec.satisfies(self.v2_layout_versions)
@property
def component_prefix(self):
"""Path to component <prefix>/<component>/<version>."""
v = self.spec.version.up_to(2) if self.v2_layout else self.spec.version
return self.prefix.join(self.component_dir).join(str(v))
return self.prefix.join(join_path(self.component_dir, self.spec.version))
@property
def env_script_args(self):
@@ -123,9 +113,8 @@ def install_component(self, installer_path):
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing
install_dir = self.component_prefix
if not isdir(install_dir):
raise RuntimeError("install failed to directory: {0}".format(install_dir))
if not isdir(join_path(self.prefix, self.component_dir)):
raise RuntimeError("install failed")
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
@@ -140,7 +129,7 @@ def setup_run_environment(self, env):
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
)
)
@@ -179,40 +168,16 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
"""
def header_directories(self, dirs):
h = HeaderList([])
h.directories = dirs
return h
@property
def headers(self):
return self.header_directories(
[self.component_prefix.include, self.component_prefix.include.join(self.component_dir)]
)
include_path = join_path(self.component_prefix, "include")
return find_headers("*", include_path, recursive=True)
@property
def libs(self):
# for v2_layout all libraries are in the top level, v1 sometimes put them in intel64
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
"""Base class for Intel oneAPI library packages with SDK components.
Contains some convenient default implementations for libraries
that expose functionality in sdk subdirectories.
Implement the method directly in the package if something
different is needed.
"""
@property
def headers(self):
return self.header_directories([self.component_prefix.sdk.include])
@property
def libs(self):
return find_libraries("*", self.component_prefix.sdk.lib64)
lib_path = join_path(self.component_prefix, "lib", "intel64")
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
return find_libraries("*", root=lib_path, shared=True, recursive=True)
class IntelOneApiStaticLibraryList:
@@ -247,7 +212,3 @@ def link_flags(self):
@property
def ld_flags(self):
return "{0} {1}".format(self.search_flags, self.link_flags)
#: Tuple of Intel math libraries, exported to packages
INTEL_MATH_LIBRARIES = ("intel-mkl", "intel-oneapi-mkl", "intel-parallel-studio")

View File

@@ -6,14 +6,14 @@
import os
import re
import shutil
from typing import Iterable, List, Mapping, Optional
import stat
from typing import Optional
import archspec
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList
import spack.builder
import spack.config
@@ -24,38 +24,19 @@
import spack.spec
import spack.store
from spack.directives import build_system, depends_on, extends, maintainers
from spack.error import NoHeadersError, NoLibrariesError
from spack.error import NoHeadersError, NoLibrariesError, SpecError
from spack.install_test import test_part
from spack.spec import Spec
from spack.util.prefix import Prefix
from spack.util.executable import Executable
from spack.version import Version
from ._checks import BaseBuilder, execute_install_time_tests
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
"""Iterable that yields KEY=VALUE paths through a dictionary.
Args:
dictionary: Possibly nested dictionary of arbitrary keys and values.
Yields:
A single path through the dictionary.
"""
for key, item in dictionary.items():
if isinstance(item, dict):
# Recursive case
for value in _flatten_dict(item):
yield f"{key}={value}"
else:
# Base case
yield f"{key}={item}"
class PythonExtension(spack.package_base.PackageBase):
maintainers("adamjstewart")
@property
def import_modules(self) -> Iterable[str]:
def import_modules(self):
"""Names of modules that the Python package provides.
These are used to test whether or not the installation succeeded.
@@ -70,7 +51,7 @@ def import_modules(self) -> Iterable[str]:
detected, this property can be overridden by the package.
Returns:
List of strings of module names.
list: list of strings of module names
"""
modules = []
pkg = self.spec["python"].package
@@ -107,14 +88,14 @@ def import_modules(self) -> Iterable[str]:
return modules
@property
def skip_modules(self) -> Iterable[str]:
def skip_modules(self):
"""Names of modules that should be skipped when running tests.
These are a subset of import_modules. If a module has submodules,
they are skipped as well (meaning a.b is skipped if a is contained).
Returns:
List of strings of module names.
list: list of strings of module names
"""
return []
@@ -190,12 +171,12 @@ def remove_files_from_view(self, view, merge_map):
view.remove_files(to_remove)
def test_imports(self) -> None:
def test_imports(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
python = inspect.getmodule(self).python # type: ignore[union-attr]
python = inspect.getmodule(self).python
for module in self.import_modules:
with test_part(
self,
@@ -320,27 +301,24 @@ class PythonPackage(PythonExtension):
py_namespace: Optional[str] = None
@lang.classproperty
def homepage(cls) -> Optional[str]: # type: ignore[override]
def homepage(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/project/{name}/"
return None
return "https://pypi.org/project/" + name + "/"
@lang.classproperty
def url(cls) -> Optional[str]:
def url(cls):
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
@lang.classproperty
def list_url(cls) -> Optional[str]: # type: ignore[override]
def list_url(cls):
if cls.pypi:
name = cls.pypi.split("/")[0]
return f"https://pypi.org/simple/{name}/"
return None
return "https://pypi.org/simple/" + name + "/"
@property
def headers(self) -> HeaderList:
def headers(self):
"""Discover header files in platlib."""
# Remove py- prefix in package name
@@ -358,7 +336,7 @@ def headers(self) -> HeaderList:
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
def libs(self) -> LibraryList:
def libs(self):
"""Discover libraries in platlib."""
# Remove py- prefix in package name
@@ -375,6 +353,51 @@ def libs(self) -> LibraryList:
raise NoLibrariesError(msg.format(self.spec.name, root))
def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes):
# Recurse into the install prefix and fixup shebangs
exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
dirs = [path]
hardlinks = set()
while dirs:
with os.scandir(dirs.pop()) as entries:
for entry in entries:
if entry.is_dir(follow_symlinks=False):
dirs.append(entry.path)
continue
# Only consider files, not symlinks
if not entry.is_file(follow_symlinks=False):
continue
lstat = entry.stat(follow_symlinks=False)
# Skip over files that are not executable
if not (lstat.st_mode & exe):
continue
# Don't modify hardlinks more than once
if lstat.st_nlink > 1:
key = (lstat.st_ino, lstat.st_dev)
if key in hardlinks:
continue
hardlinks.add(key)
# Finally replace shebangs if any.
with open(entry.path, "rb+") as f:
contents = f.read(2)
if contents != b"#!":
continue
contents += f.read()
if old_interpreter not in contents:
continue
f.seek(0)
f.write(contents.replace(old_interpreter, new_interpreter))
f.truncate()
@spack.builder.builder("python_pip")
class PythonPipBuilder(BaseBuilder):
phases = ("install",)
@@ -386,13 +409,13 @@ class PythonPipBuilder(BaseBuilder):
legacy_long_methods = ("install_options", "global_options", "config_settings")
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
legacy_attributes = ("build_directory", "install_time_test_callbacks")
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
@staticmethod
def std_args(cls) -> List[str]:
def std_args(cls):
return [
# Verbose
"-vvv",
@@ -417,7 +440,7 @@ def std_args(cls) -> List[str]:
]
@property
def build_directory(self) -> str:
def build_directory(self):
"""The root directory of the Python package.
This is usually the directory containing one of the following files:
@@ -428,76 +451,127 @@ def build_directory(self) -> str:
"""
return self.pkg.stage.source_path
def config_settings(self, spec: Spec, prefix: Prefix) -> Mapping[str, object]:
def config_settings(self, spec, prefix):
"""Configuration settings to be passed to the PEP 517 build backend.
Requires pip 22.1 or newer for keys that appear only a single time,
or pip 23.1 or newer if the same key appears multiple times.
Requires pip 22.1 or newer.
Args:
spec: Build spec.
prefix: Installation prefix.
spec (spack.spec.Spec): build spec
prefix (spack.util.prefix.Prefix): installation prefix
Returns:
Possibly nested dictionary of KEY, VALUE settings.
dict: dictionary of KEY, VALUE settings
"""
return {}
def install_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
def install_options(self, spec, prefix):
"""Extra arguments to be supplied to the setup.py install command.
Requires pip 23.0 or older.
Args:
spec: Build spec.
prefix: Installation prefix.
spec (spack.spec.Spec): build spec
prefix (spack.util.prefix.Prefix): installation prefix
Returns:
List of options.
list: list of options
"""
return []
def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
def global_options(self, spec, prefix):
"""Extra global options to be supplied to the setup.py call before the install
or bdist_wheel command.
Deprecated in pip 23.1.
Args:
spec: Build spec.
prefix: Installation prefix.
spec (spack.spec.Spec): build spec
prefix (spack.util.prefix.Prefix): installation prefix
Returns:
List of options.
list: list of options
"""
return []
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
@property
def _build_venv_path(self):
"""Return the path to the virtual environment used for building when
python is external."""
return os.path.join(self.spec.package.stage.path, "build_env")
@property
def _build_venv_python(self) -> Executable:
"""Return the Python executable in the build virtual environment when
python is external."""
return Executable(os.path.join(self._build_venv_path, "bin", "python"))
def install(self, pkg, spec, prefix):
"""Install everything from build directory."""
python: Executable = spec["python"].command
# Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot
# execute hooks from user and system site-packages.
if spec["python"].external:
# There are no environment variables to disable the system site-packages, so we use a
# virtual environment instead. The downside of this approach is that pip produces
# incorrect shebangs that refer to the virtual environment, which we have to fix up.
python("-m", "venv", "--without-pip", self._build_venv_path)
pip = self._build_venv_python
else:
# For a Spack managed Python, system site-packages is empty/unused by design, so it
# suffices to disable user site-packages, for which there is an environment variable.
pip = python
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
for key, value in self.config_settings(spec, prefix).items():
if spec["py-pip"].version < Version("22.1"):
raise SpecError(
"'{}' package uses 'config_settings' which is only supported by "
"pip 22.1+. Add the following line to the package to fix this:\n\n"
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
)
args.append("--config-settings={}={}".format(key, value))
for setting in _flatten_dict(self.config_settings(spec, prefix)):
args.append(f"--config-settings={setting}")
for option in self.install_options(spec, prefix):
args.append(f"--install-option={option}")
args.append("--install-option=" + option)
for option in self.global_options(spec, prefix):
args.append(f"--global-option={option}")
args.append("--global-option=" + option)
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
args.append(pkg.stage.archive_file)
else:
args.append(".")
pip = spec["python"].command
# Hide user packages, since we don't have build isolation. This is
# necessary because pip / setuptools may run hooks from arbitrary
# packages during the build. There is no equivalent variable to hide
# system packages, so this is not reliable for external Python.
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
with fs.working_dir(self.build_directory):
pip(*args)
@spack.builder.run_after("install")
def fixup_shebangs_pointing_to_build(self):
"""When installing a package using an external python, we use a temporary virtual
environment which improves build isolation. The downside is that pip produces shebangs
that point to the temporary virtual environment. This method fixes them up to point to the
underlying Python."""
# No need to fixup shebangs if no build venv was used. (this post install function also
# runs when install was overridden in another package, so check existence of the venv path)
if not os.path.exists(self._build_venv_path):
return
# Use sys.executable, since that's what pip uses.
interpreter = (
lambda python: python("-c", "import sys; print(sys.executable)", output=str)
.strip()
.encode("utf-8")
)
fixup_shebangs(
path=self.spec.prefix,
old_interpreter=interpreter(self._build_venv_python),
new_interpreter=interpreter(self.spec["python"].command),
)
spack.builder.run_after("install")(execute_install_time_tests)

View File

@@ -108,8 +108,6 @@ class ROCmPackage(PackageBase):
"gfx90a:xnack+",
"gfx90c",
"gfx940",
"gfx941",
"gfx942",
"gfx1010",
"gfx1011",
"gfx1012",
@@ -170,8 +168,6 @@ def hip_flags(amdgpu_target):
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx941")
depends_on("llvm-amdgpu@5.7.0:", when="amdgpu_target=gfx942")
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")

View File

@@ -25,7 +25,6 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.tty.color import cescape, colorize
import spack
import spack.binary_distribution as bindist
@@ -46,22 +45,7 @@
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import build_stamp as cdash_build_stamp
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
JOB_RETRY_CONDITIONS = [
# "always",
"unknown_failure",
"script_failure",
"api_failure",
"stuck_or_timeout_failure",
"runner_system_failure",
"runner_unsupported",
"stale_schedule",
# "job_execution_timeout",
"archived_failure",
"unmet_prerequisites",
"scheduler_failure",
"data_integrity_failure",
]
JOB_RETRY_CONDITIONS = ["always"]
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
@@ -113,6 +97,15 @@ def _remove_reserved_tags(tags):
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def _get_spec_string(spec):
format_elements = ["{name}{@version}", "{%compiler}"]
if spec.architecture:
format_elements.append(" {arch=architecture}")
return spec.format("".join(format_elements))
def _spec_deps_key(s):
return "{0}/{1}".format(s.name, s.dag_hash(7))
@@ -217,22 +210,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
tty.msg("Staging summary ([x] means a job needs rebuilding):")
for stage_index, stage in enumerate(stages):
tty.msg(f" stage {stage_index} ({len(stage)} jobs):")
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)):
for job in sorted(stage):
s = spec_labels[job]
rebuild = rebuild_decisions[job].rebuild
reason = rebuild_decisions[job].reason
reason_msg = f" ({reason})" if reason else ""
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
if rebuild_decisions[job].rebuild:
status = colorize("@*g{[x]} ")
msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}"
else:
msg = f"{s.format(spec_fmt)}{reason_msg}"
if rebuild_decisions[job].mirrors:
msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]"
msg = colorize(f" @K - {cescape(msg)}@.")
tty.msg(msg)
reason_msg = " ({0})".format(reason) if reason else ""
tty.msg(
" [{1}] {0} -> {2}{3}".format(
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
)
)
if rebuild_decisions[job].mirrors:
tty.msg(" found on the following mirrors:")
for murl in rebuild_decisions[job].mirrors:
tty.msg(" {0}".format(murl))
def _compute_spec_deps(spec_list):
@@ -939,7 +932,7 @@ def generate_gitlab_ci_yaml(
# Speed up staging by first fetching binary indices from all mirrors
try:
bindist.BINARY_INDEX.update()
bindist.binary_index.update()
except bindist.FetchCacheError as e:
tty.warn(e)
@@ -2265,13 +2258,13 @@ def build_name(self):
spec.architecture,
self.build_group,
)
tty.debug(
tty.verbose(
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
)
return build_name
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
return build_name
@property # type: ignore
@@ -2285,11 +2278,11 @@ def build_stamp(self):
Returns: (str) current CDash build stamp"""
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
if build_stamp:
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
return build_stamp
build_stamp = cdash_build_stamp(self.build_group, time.time())
tty.debug("Generated new build stamp ({0})".format(build_stamp))
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
return build_stamp
@property # type: ignore

View File

@@ -6,8 +6,10 @@
import argparse
import os
import re
import shlex
import sys
from typing import List, Union
from textwrap import dedent
from typing import List, Match, Tuple
import llnl.string
import llnl.util.tty as tty
@@ -145,37 +147,89 @@ def get_command(cmd_name):
return getattr(get_module(cmd_name), pname)
def quote_kvp(string: str) -> str:
"""For strings like ``name=value`` or ``name==value``, quote and escape the value if needed.
class _UnquotedFlags:
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
This is a compromise to respect quoting of key-value pairs on the CLI. The shell
strips quotes from quoted arguments, so we cannot know *exactly* how CLI arguments
were quoted. To compensate, we re-add quotes around anything staritng with ``name=``
or ``name==``, and we assume the rest of the argument is the value. This covers the
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
message explaining how to quote multiple flags correctly can be generated with
`.report()`.
"""
match = re.match(spack.parser.SPLIT_KVP, string)
if not match:
return string
key, delim, value = match.groups()
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
flags_arg_pattern = re.compile(
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
)
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
self._flag_pairs = all_unquoted_flag_pairs
def __bool__(self) -> bool:
return bool(self._flag_pairs)
@classmethod
def extract(cls, sargs: str) -> "_UnquotedFlags":
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
prev_flags_arg = None
for arg in shlex.split(sargs):
if prev_flags_arg is not None:
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
prev_flags_arg = cls.flags_arg_pattern.match(arg)
return cls(all_unquoted_flag_pairs)
def report(self) -> str:
single_errors = [
"({0}) {1} {2} => {3}".format(
i + 1,
match.group(0),
next_arg,
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
)
for i, (match, next_arg) in enumerate(self._flag_pairs)
]
return dedent(
"""\
Some compiler or linker flags were provided without quoting their arguments,
which now causes spack to try to parse the *next* argument as a spec component
such as a variant instead of an additional compiler or linker flag. If the
intent was to set multiple flags, try quoting them together as described below.
Possible flag quotation errors (with the correctly-quoted version after the =>):
{0}"""
).format("\n".join(single_errors))
def parse_specs(
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
) -> List[spack.spec.Spec]:
def parse_specs(args, **kwargs):
"""Convenience function for parsing arguments from specs. Handles common
exceptions and dies if there are errors.
"""
args = [args] if isinstance(args, str) else args
arg_string = " ".join([quote_kvp(arg) for arg in args])
concretize = kwargs.get("concretize", False)
normalize = kwargs.get("normalize", False)
tests = kwargs.get("tests", False)
specs = spack.parser.parse(arg_string)
for spec in specs:
if concretize:
spec.concretize(tests=tests)
return specs
sargs = args
if not isinstance(args, str):
sargs = " ".join(args)
unquoted_flags = _UnquotedFlags.extract(sargs)
try:
specs = spack.parser.parse(sargs)
for spec in specs:
if concretize:
spec.concretize(tests=tests) # implies normalize
elif normalize:
spec.normalize(tests=tests)
return specs
except spack.error.SpecError as e:
msg = e.message
if e.long_message:
msg += e.long_message
# Unquoted flags will be read as a variant or hash
if unquoted_flags and ("variant" in msg or "hash" in msg):
msg += "\n\n"
msg += unquoted_flags.report()
raise spack.error.SpackError(msg) from e
def matching_spec_from_env(spec):

View File

@@ -6,7 +6,7 @@
import llnl.util.tty as tty
import spack.cmd
from spack.cmd.common import arguments
import spack.cmd.common.arguments as arguments
description = "add a spec to an environment"
section = "environments"

View File

@@ -2,8 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import warnings
import llnl.util.tty as tty
import llnl.util.tty.colify
import llnl.util.tty.color as cl
@@ -54,10 +52,8 @@ def setup_parser(subparser):
def configs(parser, args):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
reports = spack.audit.run_group(args.subcommand)
_process_reports(reports)
reports = spack.audit.run_group(args.subcommand)
_process_reports(reports)
def packages(parser, args):

View File

@@ -15,13 +15,13 @@
import spack.bootstrap
import spack.bootstrap.config
import spack.bootstrap.core
import spack.cmd.common.arguments
import spack.config
import spack.main
import spack.mirror
import spack.spec
import spack.stage
import spack.util.path
from spack.cmd.common import arguments
description = "manage bootstrap configuration"
section = "system"
@@ -68,8 +68,12 @@
def _add_scope_option(parser):
scopes = spack.config.scopes()
parser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
help="configuration scope to read/modify",
)
@@ -102,7 +106,7 @@ def setup_parser(subparser):
disable.add_argument("name", help="name of the source to be disabled", nargs="?", default=None)
reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
arguments.add_common_arguments(reset, ["yes_to_all"])
spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
root = sp.add_parser("root", help="get/set the root bootstrap directory")
_add_scope_option(root)

View File

@@ -3,59 +3,36 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import copy
import glob
import hashlib
import json
import multiprocessing.pool
import os
import shutil
import sys
import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple
from typing import List
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.string import plural
from llnl.util.lang import elide_list
import spack.binary_distribution as bindist
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.error
import spack.hash_types as ht
import spack.mirror
import spack.oci.oci
import spack.oci.opener
import spack.relocate
import spack.repo
import spack.spec
import spack.stage
import spack.store
import spack.user_environment
import spack.util.crypto
import spack.util.url as url_util
import spack.util.web as web_util
from spack.build_environment import determine_number_of_jobs
from spack.cmd import display_specs
from spack.cmd.common import arguments
from spack.oci.image import (
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
upload_blob_with_retry,
upload_manifest_with_retry,
)
from spack.spec import Spec, save_dependency_specfiles
from spack.stage import Stage
description = "create, download and install binary packages"
section = "packaging"
@@ -76,26 +53,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
)
push_sign = push.add_mutually_exclusive_group(required=False)
push_sign.add_argument(
"--unsigned",
"-u",
action="store_false",
dest="signed",
default=None,
help="push unsigned buildcache tarballs",
)
push_sign.add_argument(
"--signed",
action="store_true",
dest="signed",
default=None,
help="push signed buildcache tarballs",
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
)
push_sign.add_argument(
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
)
push.add_argument(
"mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL"
)
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
push.add_argument(
"--update-index",
"--rebuild-index",
@@ -121,10 +84,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
push.add_argument(
"--base-image", default=None, help="specify the base image for the buildcache. "
)
arguments.add_common_arguments(push, ["specs", "jobs"])
arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn)
install = subparsers.add_parser("install", help=install_fn.__doc__)
@@ -194,22 +154,23 @@ def setup_parser(subparser: argparse.ArgumentParser):
)
# used to construct scope arguments below
scopes = spack.config.scopes()
check.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_modify_scope(),
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=spack.config.default_modify_scope(),
help="configuration scope containing mirrors to check",
)
# Unfortunately there are 3 ways to do the same thing here:
check_specs = check.add_mutually_exclusive_group()
check_specs.add_argument(
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
check_spec_or_specfile.add_argument(
"-s", "--spec", help="check single spec instead of release specs file"
)
check_specs.add_argument(
check_spec_or_specfile.add_argument(
"--spec-file",
help="check single spec from json or yaml file instead of release specs file",
)
arguments.add_common_arguments(check, ["specs"])
check.set_defaults(func=check_fn)
@@ -307,22 +268,7 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
def _progress(i: int, total: int):
if total > 1:
digits = len(str(total))
return f"[{i+1:{digits}}/{total}] "
return ""
def _make_pool():
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
def push_fn(args):
def push_fn(args: argparse.Namespace):
"""create a binary package and push it to a mirror"""
if args.spec_file:
tty.warn(
@@ -335,38 +281,15 @@ def push_fn(args):
else:
specs = spack.cmd.require_active_env("buildcache push").all_specs()
mirror = arguments.mirror_name_or_url(args.mirror)
if args.allow_root:
tty.warn(
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
)
mirror: spack.mirror.Mirror = args.mirror
url = mirror.push_url
# Check if this is an OCI image.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
image_ref = None
push_url = mirror.push_url
# When neither --signed, --unsigned nor --key are specified, use the mirror's default.
if args.signed is None and not args.key:
unsigned = not mirror.signed
else:
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
if image_ref:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
# This is a list of installed, non-external specs.
specs = bindist.specs_to_be_packaged(
specs,
root="package" in args.things_to_install,
@@ -376,47 +299,45 @@ def push_fn(args):
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
failed = []
# TODO: unify this logic in the future.
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
else:
skipped = []
# tty printing
color = clr.get_color_when()
format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
total_specs = len(specs)
digits = len(str(total_specs))
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
push_url,
bindist.PushOptions(
force=args.force,
unsigned=unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
url,
bindist.PushOptions(
force=args.force,
unsigned=args.unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
if len(specs) == 1:
msg += f" to {push_url}"
tty.info(msg)
if total_specs > 1:
msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
else:
msg = f"Pushed {format_spec(spec)} to {url}"
except bindist.NoOverwriteException:
skipped.append(_format_spec(spec))
tty.info(msg)
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(
e, (bindist.PickKeyException, bindist.NoKeyException)
):
raise
failed.append((_format_spec(spec), e))
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
raise
failed.append((format_spec(spec), e))
if skipped:
if len(specs) == 1:
@@ -443,341 +364,6 @@ def push_fn(args):
),
)
# Update the index if requested
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
# not once per spec.
if image_ref and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
"""Get the spack tarball layer digests and size if it exists"""
try:
manifest, config = get_manifest_and_config_with_retry(image_ref)
return spack.oci.oci.Blob(
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
size=manifest["layers"][-1]["size"],
)
except Exception:
return None
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
# Create an oci.image.layer aka tarball of the package
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
blob = spack.oci.oci.Blob(
Digest.from_sha256(compressed_tarfile_checksum),
Digest.from_sha256(tarfile_checksum),
os.path.getsize(filename),
)
# Upload the blob
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
# delete the file
os.unlink(filename)
return blob
def _retrieve_env_dict_from_config(config: dict) -> dict:
"""Retrieve the environment variables from the image config file.
Sets a default value for PATH if it is not present.
Args:
config (dict): The image config file.
Returns:
dict: The environment variables.
"""
env = {"PATH": "/bin:/usr/bin"}
if "Env" in config.get("config", {}):
for entry in config["config"]["Env"]:
key, value = entry.split("=", 1)
env[key] = value
return env
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
name = spec.target.family.name
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
return name_map.get(name, name)
def _put_manifest(
base_images: Dict[str, Tuple[dict, dict]],
checksums: Dict[str, spack.oci.oci.Blob],
spec: spack.spec.Spec,
image_ref: ImageReference,
tmpdir: str,
):
architecture = _archspec_to_gooarch(spec)
dependencies = list(
reversed(
list(
s
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
if not s.external
)
)
)
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
# Create an oci.image.config file
config = copy.deepcopy(base_config)
# Add the diff ids of the dependencies
for s in dependencies:
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
# From the OCI v1.0 spec:
# > Any extra fields in the Image JSON struct are considered implementation
# > specific and MUST be ignored by any implementations which are unable to
# > interpret them.
# We use this to store the Spack spec, so we can use it to create an index.
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
}
config.update(spec_dict)
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, config_file)
)
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
"digest": str(config_file_checksum),
"size": os.path.getsize(config_file),
},
"layers": [
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
for s in dependencies
),
],
"annotations": {"org.opencontainers.image.description": spec.format()},
}
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
# Finally upload the manifest
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
# delete the config file
os.unlink(config_file)
return image_ref_for_spec
def _push_oci(
args,
image_ref: ImageReference,
installed_specs_with_deps: List[Spec],
tmpdir: str,
pool: multiprocessing.pool.Pool,
) -> List[str]:
"""Push specs to an OCI registry
Args:
args: The command line arguments.
image_ref: The image reference.
installed_specs_with_deps: The installed specs to push, excluding externals,
including deps, ordered from roots to leaves.
Returns:
List[str]: The list of skipped specs (already in the buildcache).
"""
# Reverse the order
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
# The base image to use for the package. When not set, we use
# the OCI registry only for storage, and do not use any base image.
base_image_ref: Optional[ImageReference] = (
ImageReference.from_string(args.base_image) if args.base_image else None
)
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
# arch -> (manifest, config)
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped = []
if not args.force:
tty.info("Checking for existing specs in the buildcache")
to_be_uploaded = []
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
if maybe_blob is not None:
checksums[spec.dag_hash()] = maybe_blob
skipped.append(_format_spec(spec))
else:
to_be_uploaded.append(spec)
else:
to_be_uploaded = installed_specs_with_deps
if not to_be_uploaded:
return skipped
tty.info(
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
)
# Upload blobs
new_blobs = pool.starmap(
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
)
# And update the spec to blob mapping
for spec, blob in zip(to_be_uploaded, new_blobs):
checksums[spec.dag_hash()] = blob
# Copy base image layers, probably fine to do sequentially.
for spec in to_be_uploaded:
architecture = _archspec_to_gooarch(spec)
# Get base image details, if we don't have them yet
if architecture in base_images:
continue
if base_image_ref is None:
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
else:
base_images[architecture] = copy_missing_layers_with_retry(
base_image_ref, image_ref, architecture
)
# Upload manifests
tty.info("Uploading manifests")
pushed_image_ref = pool.starmap(
_put_manifest,
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
)
# Print the image names of the top-level specs
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
return skipped
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
# Don't allow recursion here, since Spack itself always uploads
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
# Do very basic validation: if "spec" is a key in the config, it
# must be a Spec object too.
return config if "spec" in config else None
def _update_index_oci(
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
) -> None:
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
spack.oci.opener.ensure_status(response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel
spec_dicts = pool.starmap(
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
)
# Populate the database
db_root_dir = os.path.join(tmpdir, "db_root")
db = bindist.BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = Spec.from_dict(spec_dict)
db.add(spec, directory_layout=None)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
empty_config_json_path = os.path.join(tmpdir, "config.json")
with open(empty_config_json_path, "wb") as f:
f.write(b"{}")
# Upload the index.json file
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
# Upload the config.json file
empty_config_digest = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
)
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
# Push a manifest file that references the index.json file as a layer
# Notice that we push this as if it is an image, which it of course is not.
# When the ORAS spec becomes official, we can use that instead of a fake image.
# For now we just use the OCI image spec, so that we don't run into issues with
# automatic garbage collection of blobs that are not referenced by any image manifest.
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
# Config is just an empty {} file for now, and irrelevant
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"digest": str(empty_config_digest),
"size": os.path.getsize(empty_config_json_path),
},
# The buildcache index is the only layer, and is not a tarball, we lie here.
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(index_shasum),
"size": os.path.getsize(index_json_path),
}
],
}
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
def install_fn(args):
"""install from a binary package"""
@@ -835,24 +421,15 @@ def check_fn(args: argparse.Namespace):
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
"""
if args.spec_file:
specs_arg = (
args.spec_file if os.path.sep in args.spec_file else os.path.join(".", args.spec_file)
)
tty.warn(
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
f"Use `spack buildcache check {specs_arg}` instead."
"Use --spec instead."
)
elif args.spec:
specs_arg = args.spec
tty.warn(
"The flag `--spec` is deprecated and will be removed in Spack 0.23. "
f"Use `spack buildcache check {specs_arg}` instead."
)
else:
specs_arg = args.specs
if specs_arg:
specs = _matching_specs(spack.cmd.parse_specs(specs_arg))
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
if specs:
specs = _matching_specs(specs)
else:
specs = spack.cmd.require_active_env("buildcache check").all_specs()
@@ -945,7 +522,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
local_path = os.path.join(tmpdir, os.path.basename(src_url))
try:
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
temp_stage = Stage(src_url, path=os.path.dirname(local_path))
try:
temp_stage.create()
temp_stage.fetch()
@@ -1039,20 +616,6 @@ def manifest_copy(manifest_file_list):
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
# Special case OCI images for now.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
image_ref = None
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
return
# Otherwise, assume a normal mirror.
url = mirror.push_url
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd
from spack.cmd.common import arguments
import spack.cmd.common.arguments as arguments
description = "change an existing spec in an environment"
section = "environments"

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import re
import sys
@@ -20,6 +21,7 @@
from spack.package_base import PackageBase, deprecated_version, preferred_version
from spack.util.editor import editor
from spack.util.format import get_version_lines
from spack.util.naming import valid_fully_qualified_module_name
from spack.version import Version
description = "checksum available versions of a package"
@@ -35,30 +37,30 @@ def setup_parser(subparser):
help="don't clean up staging area when command completes",
)
subparser.add_argument(
"--batch",
"-b",
"--batch",
action="store_true",
default=False,
help="don't ask which versions to checksum",
)
subparser.add_argument(
"--latest",
"-l",
"--latest",
action="store_true",
default=False,
help="checksum the latest available version",
)
subparser.add_argument(
"--preferred",
"-p",
"--preferred",
action="store_true",
default=False,
help="checksum the known Spack preferred version",
)
modes_parser = subparser.add_mutually_exclusive_group()
modes_parser.add_argument(
"--add-to-package",
"-a",
"--add-to-package",
action="store_true",
default=False,
help="add new versions to package",
@@ -66,26 +68,27 @@ def setup_parser(subparser):
modes_parser.add_argument(
"--verify", action="store_true", default=False, help="verify known package checksums"
)
subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)")
arguments.add_common_arguments(subparser, ["package", "jobs"])
subparser.add_argument(
"versions",
nargs="*",
help="checksum these specific versions (if omitted, Spack searches for remote versions)",
)
arguments.add_common_arguments(subparser, ["jobs"])
subparser.epilog = (
"examples:\n"
" `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n"
" `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n"
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
)
def checksum(parser, args):
spec = spack.spec.Spec(args.package)
# Did the user pass 'package@version' string?
if len(args.versions) == 0 and "@" in args.package:
args.versions = [args.package.split("@")[1]]
args.package = args.package.split("@")[0]
# Make sure the user provided a package and not a URL
if not valid_fully_qualified_module_name(args.package):
tty.die("`spack checksum` accepts package names, not URLs.")
# Get the package we're going to generate checksums for
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
pkg = pkg_cls(spack.spec.Spec(args.package))
# Build a list of versions to checksum
versions = [Version(v) for v in args.versions]
# Define placeholder for remote versions.
@@ -149,10 +152,7 @@ def checksum(parser, args):
tty.die(f"Could not find any remote versions for {pkg.name}")
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
filtered_url_dict = spack.stage.interactive_version_filter(
url_dict,
pkg.versions,
url_changes=url_changed_for_version,
initial_verion_filter=spec.versions,
url_dict, pkg.versions, url_changes=url_changed_for_version
)
if not filtered_url_dict:
exit(0)

View File

@@ -16,7 +16,6 @@
import spack.cmd.buildcache as buildcache
import spack.config as cfg
import spack.environment as ev
import spack.environment.depfile
import spack.hash_types as ht
import spack.mirror
import spack.util.gpg as gpg_util
@@ -607,9 +606,7 @@ def ci_rebuild(args):
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
"{name}-{version}-{hash}"
)
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
),
],
spack_cmd + ["install"] + root_install_args,

View File

@@ -12,13 +12,13 @@
import spack.bootstrap
import spack.caches
import spack.cmd.common.arguments as arguments
import spack.cmd.test
import spack.config
import spack.repo
import spack.stage
import spack.store
import spack.util.path
from spack.cmd.common import arguments
from spack.paths import lib_path, var_path
description = "remove temporary build files and/or downloaded archives"

View File

@@ -796,9 +796,7 @@ def names(args: Namespace, out: IO) -> None:
commands = copy.copy(spack.cmd.all_commands())
if args.aliases:
aliases = spack.config.get("config:aliases")
if aliases:
commands.extend(aliases.keys())
commands.extend(spack.main.aliases.keys())
colify(commands, output=out)
@@ -814,10 +812,8 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
aliases_config = spack.config.get("config:aliases")
if aliases_config:
aliases = ";".join(f"{key}:{val}" for key, val in aliases_config.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)

View File

@@ -124,33 +124,6 @@ def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, deptype)
class ConfigScope(argparse.Action):
"""Pick the currently configured config scopes."""
def __init__(self, *args, **kwargs) -> None:
kwargs.setdefault("metavar", spack.config.SCOPES_METAVAR)
super().__init__(*args, **kwargs)
@property
def default(self):
return self._default() if callable(self._default) else self._default
@default.setter
def default(self, value):
self._default = value
@property
def choices(self):
return spack.config.scopes().keys()
@choices.setter
def choices(self, value):
pass
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
def _cdash_reporter(namespace):
"""Helper function to create a CDash reporter. This function gets an early reference to the
argparse namespace under construction, so it can later use it to create the object.
@@ -384,11 +357,10 @@ def install_status():
"--install-status",
action="store_true",
default=True,
help=(
"show install status of packages\n"
"[+] installed [^] installed in an upstream\n"
" - not installed [-] missing dep of installed package\n"
),
help="show install status of packages\n\npackages can be: "
"installed [+], missing and needed by an installed package [-], "
"installed in an upstream instance [^], "
"or not installed (no annotation)",
)
@@ -571,7 +543,7 @@ def add_concretizer_args(subparser):
)
def add_connection_args(subparser, add_help):
def add_s3_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
)
@@ -587,8 +559,6 @@ def add_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
)
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
def use_buildcache(cli_arg_value):

View File

@@ -1,30 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from typing import List
import llnl.util.tty as tty
import spack.cmd
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
def confirm_action(specs: List[spack.spec.Spec], participle: str, noun: str):
"""Display the list of specs to be acted on and ask for confirmation.
Args:
specs: specs to be removed
participle: action expressed as a participle, e.g. "uninstalled"
noun: action expressed as a noun, e.g. "uninstallation"
"""
tty.msg(f"The following {len(specs)} packages will be {participle}:\n")
spack.cmd.display_specs(specs, **display_args)
print("")
answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not answer:
tty.msg(f"Aborting {noun}")
sys.exit(0)

View File

@@ -8,13 +8,13 @@
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.error
import spack.paths
import spack.spec
import spack.store
from spack import build_environment, traverse
from spack.cmd.common import arguments
from spack.context import Context
from spack.util.environment import dump_environment, pickle_environment

View File

@@ -14,7 +14,6 @@
import spack.compilers
import spack.config
import spack.spec
from spack.cmd.common import arguments
description = "manage compilers"
section = "system"
@@ -24,30 +23,20 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
scopes = spack.config.scopes()
# Find
find_parser = sp.add_parser(
"find",
aliases=["add"],
help="search the system for compilers to add to Spack configuration",
)
mixed_toolchain_group = find_parser.add_mutually_exclusive_group()
mixed_toolchain_group.add_argument(
"--mixed-toolchain",
action="store_true",
default=sys.platform == "darwin",
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
)
mixed_toolchain_group.add_argument(
"--no-mixed-toolchain",
action="store_false",
dest="mixed_toolchain",
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
)
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_modify_scope("compilers"),
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
@@ -58,15 +47,20 @@ def setup_parser(subparser):
)
remove_parser.add_argument("compiler_spec")
remove_parser.add_argument(
"--scope", action=arguments.ConfigScope, default=None, help="configuration scope to modify"
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=None,
help="configuration scope to modify",
)
# List
list_parser = sp.add_parser("list", help="list available compilers")
list_parser.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_list_scope(),
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)
@@ -75,8 +69,9 @@ def setup_parser(subparser):
info_parser.add_argument("compiler_spec")
info_parser.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_list_scope(),
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)
@@ -91,9 +86,7 @@ def compiler_find(args):
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
n = len(new_compilers)

View File

@@ -3,7 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.cmd.common import arguments
import spack.config
from spack.cmd.compiler import compiler_list
description = "list available compilers"
@@ -12,8 +12,13 @@
def setup_parser(subparser):
scopes = spack.config.scopes()
subparser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
help="configuration scope to read/modify",
)

View File

@@ -10,6 +10,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.cmd.common.arguments
import spack.config
import spack.environment as ev
import spack.repo
@@ -17,7 +18,6 @@
import spack.schema.packages
import spack.store
import spack.util.spack_yaml as syaml
from spack.cmd.common import arguments
from spack.util.editor import editor
description = "get and set configuration options"
@@ -26,9 +26,14 @@
def setup_parser(subparser):
scopes = spack.config.scopes()
# User can only choose one
subparser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
help="configuration scope to read/modify",
)
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="config_command")
@@ -96,13 +101,13 @@ def setup_parser(subparser):
setup_parser.add_parser = add_parser
update = sp.add_parser("update", help="update configuration files to the latest format")
arguments.add_common_arguments(update, ["yes_to_all"])
spack.cmd.common.arguments.add_common_arguments(update, ["yes_to_all"])
update.add_argument("section", help="section to update")
revert = sp.add_parser(
"revert", help="revert configuration files to their state before update"
)
arguments.add_common_arguments(revert, ["yes_to_all"])
spack.cmd.common.arguments.add_common_arguments(revert, ["yes_to_all"])
revert.add_argument("section", help="section to update")
@@ -402,9 +407,7 @@ def config_prefer_upstream(args):
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": []})
all = pkgs.get("all", {"compiler": []})
pkgs["all"] = all
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
@@ -415,8 +418,8 @@ def config_prefer_upstream(args):
pkg["version"].append(version)
compiler = str(spec.compiler)
if compiler not in all["compiler"]:
all["compiler"].append(compiler)
if compiler not in pkg["compiler"]:
pkg["compiler"].append(compiler)
# Get and list all the variants that differ from the default.
variants = []

View File

@@ -64,7 +64,6 @@ class {class_name}({base_class_name}):
# maintainers("github_user1", "github_user2")
# FIXME: Add the SPDX identifier of the project's license below.
# See https://spdx.org/licenses/ for a list.
license("UNKNOWN")
{versions}
@@ -172,14 +171,6 @@ def configure_args(self):
return args"""
class CargoPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for cargo-based packages"""
base_class_name = "CargoPackage"
body_def = ""
class CMakePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for CMake-based packages"""
@@ -194,14 +185,6 @@ def cmake_args(self):
return args"""
class GoPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Go-module-based packages"""
base_class_name = "GoPackage"
body_def = ""
class LuaPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for LuaRocks-based packages"""
@@ -591,30 +574,28 @@ def __init__(self, name, *args, **kwargs):
templates = {
"autoreconf": AutoreconfPackageTemplate,
"autotools": AutotoolsPackageTemplate,
"bazel": BazelPackageTemplate,
"bundle": BundlePackageTemplate,
"cargo": CargoPackageTemplate,
"autoreconf": AutoreconfPackageTemplate,
"cmake": CMakePackageTemplate,
"generic": PackageTemplate,
"go": GoPackageTemplate,
"intel": IntelPackageTemplate,
"lua": LuaPackageTemplate,
"makefile": MakefilePackageTemplate,
"maven": MavenPackageTemplate,
"meson": MesonPackageTemplate,
"octave": OctavePackageTemplate,
"perlbuild": PerlbuildPackageTemplate,
"perlmake": PerlmakePackageTemplate,
"python": PythonPackageTemplate,
"bundle": BundlePackageTemplate,
"qmake": QMakePackageTemplate,
"maven": MavenPackageTemplate,
"scons": SconsPackageTemplate,
"waf": WafPackageTemplate,
"bazel": BazelPackageTemplate,
"python": PythonPackageTemplate,
"r": RPackageTemplate,
"racket": RacketPackageTemplate,
"perlmake": PerlmakePackageTemplate,
"perlbuild": PerlbuildPackageTemplate,
"octave": OctavePackageTemplate,
"ruby": RubyPackageTemplate,
"scons": SconsPackageTemplate,
"makefile": MakefilePackageTemplate,
"intel": IntelPackageTemplate,
"meson": MesonPackageTemplate,
"lua": LuaPackageTemplate,
"sip": SIPPackageTemplate,
"waf": WafPackageTemplate,
"generic": PackageTemplate,
}
@@ -697,8 +678,6 @@ def __call__(self, stage, url):
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),

View File

@@ -1,103 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import sys
from typing import List
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.confirmation as confirmation
import spack.environment as ev
import spack.spec
from spack.cmd.common import arguments
description = "remove specs from the concretized lockfile of an environment"
section = "environments"
level = "long"
# Arguments for display_specs when we find ambiguity
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
def setup_parser(subparser):
subparser.add_argument(
"--root", action="store_true", help="deconcretize only specific environment roots"
)
arguments.add_common_arguments(subparser, ["yes_to_all", "specs"])
subparser.add_argument(
"-a",
"--all",
action="store_true",
dest="all",
help="deconcretize ALL specs that match each supplied spec",
)
def get_deconcretize_list(
args: argparse.Namespace, specs: List[spack.spec.Spec], env: ev.Environment
) -> List[spack.spec.Spec]:
"""
Get list of environment roots to deconcretize
"""
env_specs = [s for _, s in env.concretized_specs()]
to_deconcretize = []
errors = []
for s in specs:
if args.root:
# find all roots matching given spec
to_deconc = [e for e in env_specs if e.satisfies(s)]
else:
# find all roots matching or depending on a matching spec
to_deconc = [e for e in env_specs if any(d.satisfies(s) for d in e.traverse())]
if len(to_deconc) < 1:
tty.warn(f"No matching specs to deconcretize for {s}")
elif len(to_deconc) > 1 and not args.all:
errors.append((s, to_deconc))
to_deconcretize.extend(to_deconc)
if errors:
for spec, matching in errors:
tty.error(f"{spec} matches multiple concrete specs:")
sys.stderr.write("\n")
spack.cmd.display_specs(matching, output=sys.stderr, **display_args)
sys.stderr.write("\n")
sys.stderr.flush()
tty.die("Use '--all' to deconcretize all matching specs, or be more specific")
return to_deconcretize
def deconcretize_specs(args, specs):
env = spack.cmd.require_active_env(cmd_name="deconcretize")
if args.specs:
deconcretize_list = get_deconcretize_list(args, specs, env)
else:
deconcretize_list = [s for _, s in env.concretized_specs()]
if not args.yes_to_all:
confirmation.confirm_action(deconcretize_list, "deconcretized", "deconcretization")
with env.write_transaction():
for spec in deconcretize_list:
env.deconcretize(spec)
env.write()
def deconcretize(parser, args):
if not args.specs and not args.all:
tty.die(
"deconcretize requires at least one spec argument.",
" Use `spack deconcretize --all` to deconcretize ALL specs.",
)
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
deconcretize_specs(args, specs)

View File

@@ -9,11 +9,11 @@
from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.package_base
import spack.repo
import spack.store
from spack.cmd.common import arguments
description = "show dependencies of a package"
section = "basic"

View File

@@ -9,10 +9,10 @@
from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.repo
import spack.store
from spack.cmd.common import arguments
description = "show packages that depend on another"
section = "basic"

View File

@@ -20,9 +20,9 @@
from llnl.util.symlink import symlink
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.store
from spack.cmd.common import arguments
from spack.database import InstallStatuses
from spack.error import SpackError

View File

@@ -9,9 +9,9 @@
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.repo
from spack.cmd.common import arguments
description = "developer build: build from code in current working directory"
section = "build"
@@ -99,7 +99,10 @@ def dev_build(self, args):
spec = specs[0]
if not spack.repo.PATH.exists(spec.name):
raise spack.repo.UnknownPackageError(spec.name)
tty.die(
"No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package",
)
if not spec.versions.concrete_range_as_version:
tty.die(

View File

@@ -8,10 +8,10 @@
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.spec
import spack.util.path
import spack.version
from spack.cmd.common import arguments
from spack.error import SpackError
description = "add a spec to an environment's dev-build information"

View File

@@ -10,11 +10,11 @@
from llnl.util.tty.color import cprint, get_color_when
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.solver.asp as asp
import spack.util.environment
import spack.util.spack_json as sjson
from spack.cmd.common import arguments
description = "compare two specs"
section = "basic"
@@ -200,8 +200,6 @@ def diff(parser, args):
specs = []
for spec in spack.cmd.parse_specs(args.specs):
# If the spec has a hash, check it before disambiguating
spec.replace_hash()
if spec.concrete:
specs.append(spec)
else:

View File

@@ -43,7 +43,10 @@ def edit_package(name, repo_path, namespace):
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
raise spack.repo.UnknownPackageError(spec.name)
tty.die(
"No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package",
)
editor(path)

View File

@@ -5,7 +5,6 @@
import argparse
import os
import shlex
import shutil
import sys
import tempfile
@@ -20,6 +19,7 @@
import spack.cmd
import spack.cmd.common
import spack.cmd.common.arguments
import spack.cmd.common.arguments as arguments
import spack.cmd.install
import spack.cmd.modules
import spack.cmd.uninstall
@@ -30,7 +30,6 @@
import spack.schema.env
import spack.spec
import spack.tengine
from spack.cmd.common import arguments
from spack.util.environment import EnvironmentModifications
description = "manage virtual environments"
@@ -145,13 +144,10 @@ def create_temp_env_directory():
return tempfile.mkdtemp(prefix="spack-")
def _tty_info(msg):
"""tty.info like function that prints the equivalent printf statement for eval."""
decorated = f'{colorize("@*b{==>}")} {msg}\n'
print(f"printf {shlex.quote(decorated)};")
def env_activate(args):
if not args.activate_env and not args.dir and not args.temp:
tty.die("spack env activate requires an environment name, directory, or --temp")
if not args.shell:
spack.cmd.common.shell_init_instructions(
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
@@ -164,25 +160,12 @@ def env_activate(args):
env_name_or_dir = args.activate_env or args.dir
# When executing `spack env activate` without further arguments, activate
# the default environment. It's created when it doesn't exist yet.
if not env_name_or_dir and not args.temp:
short_name = "default"
if not ev.exists(short_name):
ev.create(short_name)
action = "Created and activated"
else:
action = "Activated"
env_path = ev.root(short_name)
_tty_info(f"{action} default environment in {env_path}")
# Temporary environment
elif args.temp:
if args.temp:
env = create_temp_env_directory()
env_path = os.path.abspath(env)
short_name = os.path.basename(env_path)
ev.create_in_dir(env).write(regenerate=False)
_tty_info(f"Created and activated temporary environment in {env_path}")
# Managed environment
elif ev.exists(env_name_or_dir) and not args.dir:
@@ -397,33 +380,28 @@ def env_remove(args):
and manifests embedded in repositories should be removed manually.
"""
read_envs = []
bad_envs = []
for env_name in args.rm_env:
try:
env = ev.read(env_name)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
bad_envs.append(env_name)
env = ev.read(env_name)
read_envs.append(env)
if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False)
envs = string.comma_and(args.rm_env)
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
answer = tty.get_yes_or_no(
"Really remove %s %s?"
% (
string.plural(len(args.rm_env), "environment", show_n=False),
string.comma_and(args.rm_env),
),
default=False,
)
if not answer:
tty.die("Will not remove any environments")
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")
env.destroy()
tty.msg(f"Successfully removed environment '{name}'")
tty.die("Environment %s can't be removed while activated." % env.name)
for bad_env_name in bad_envs:
shutil.rmtree(
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
)
tty.msg(f"Successfully removed environment '{bad_env_name}'")
env.destroy()
tty.msg("Successfully removed environment '%s'" % env.name)
#
@@ -570,8 +548,8 @@ def env_update_setup_parser(subparser):
def env_update(args):
manifest_file = ev.manifest_file(args.update_env)
backup_file = manifest_file + ".bkp"
needs_update = not ev.is_latest_format(manifest_file)
if not needs_update:
tty.msg('No update needed for the environment "{0}"'.format(args.update_env))
return
@@ -689,31 +667,18 @@ def env_depfile(args):
# Currently only make is supported.
spack.cmd.require_active_env(cmd_name="env depfile")
env = ev.active_environment()
# What things do we build when running make? By default, we build the
# root specs. If specific specs are provided as input, we build those.
filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
model = depfile.MakefileModel.from_env(
env,
ev.active_environment(),
filter_specs=filter_specs,
pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]),
dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]),
make_prefix=args.make_prefix,
jobserver=args.jobserver,
)
# Warn in case we're generating a depfile for an empty environment. We don't automatically
# concretize; the user should do that explicitly. Could be changed in the future if requested.
if model.empty:
if not env.user_specs:
tty.warn("no specs in the environment")
elif filter_specs is not None:
tty.warn("no concrete matching specs found in environment")
else:
tty.warn("environment is not concretized. Run `spack concretize` first")
makefile = template.render(model.to_dict())
# Finally write to stdout/file.

View File

@@ -10,10 +10,10 @@
from llnl.util.tty.colify import colify
import spack.cmd as cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.repo
import spack.store
from spack.cmd.common import arguments
description = "list extensions for package"
section = "extensions"

View File

@@ -14,12 +14,12 @@
import spack
import spack.cmd
import spack.cmd.common.arguments
import spack.config
import spack.cray_manifest as cray_manifest
import spack.detection
import spack.error
import spack.util.environment
from spack.cmd.common import arguments
description = "manage external packages in Spack configuration"
section = "config"
@@ -29,6 +29,8 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
scopes = spack.config.scopes()
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
find_parser.add_argument(
"--not-buildable",
@@ -46,14 +48,15 @@ def setup_parser(subparser):
)
find_parser.add_argument(
"--scope",
action=arguments.ConfigScope,
default=lambda: spack.config.default_modify_scope("packages"),
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
default=spack.config.default_modify_scope("packages"),
help="configuration scope to modify",
)
find_parser.add_argument(
"--all", action="store_true", help="search for all packages that Spack knows about"
)
arguments.add_common_arguments(find_parser, ["tags", "jobs"])
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
find_parser.epilog = (
'The search is by default on packages tagged with the "build-tools" or '

View File

@@ -6,11 +6,11 @@
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.repo
import spack.traverse
from spack.cmd.common import arguments
description = "fetch archives for packages"
section = "build"

View File

@@ -12,9 +12,9 @@
import spack.bootstrap
import spack.cmd as cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.repo
from spack.cmd.common import arguments
from spack.database import InstallStatuses
description = "list and search installed packages"

View File

@@ -6,7 +6,6 @@
import llnl.util.tty as tty
import spack.cmd.common.arguments
import spack.cmd.common.confirmation
import spack.cmd.uninstall
import spack.environment as ev
import spack.store
@@ -42,6 +41,6 @@ def gc(parser, args):
return
if not args.yes_to_all:
spack.cmd.common.confirmation.confirm_action(specs, "uninstalled", "uninstallation")
spack.cmd.uninstall.confirm_removal(specs)
spack.cmd.uninstall.do_uninstall(specs, force=False)

View File

@@ -7,11 +7,11 @@
import os
import spack.binary_distribution
import spack.cmd.common.arguments as arguments
import spack.mirror
import spack.paths
import spack.util.gpg
import spack.util.url
from spack.cmd.common import arguments
description = "handle GPG actions for spack"
section = "packaging"

View File

@@ -5,10 +5,10 @@
from llnl.util import tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.store
from spack.cmd.common import arguments
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
description = "generate graphs of package dependency relationships"
@@ -61,7 +61,7 @@ def graph(parser, args):
args.dot = True
env = ev.active_environment()
if env:
specs = env.concrete_roots()
specs = env.all_specs()
else:
specs = spack.store.STORE.db.query()

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import textwrap
from itertools import zip_longest
@@ -11,13 +10,12 @@
import llnl.util.tty.color as color
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.fetch_strategy as fs
import spack.install_test
import spack.repo
import spack.spec
import spack.version
from spack.cmd.common import arguments
from spack.package_base import preferred_version
description = "get detailed information on a particular package"
@@ -55,7 +53,6 @@ def setup_parser(subparser):
("--tags", print_tags.__doc__),
("--tests", print_tests.__doc__),
("--virtuals", print_virtuals.__doc__),
("--variants-by-name", "list variants in strict name order; don't group by condition"),
]
for opt, help_comment in options:
subparser.add_argument(opt, action="store_true", help=help_comment)
@@ -80,10 +77,35 @@ def license(s):
class VariantFormatter:
def __init__(self, pkg):
self.variants = pkg.variants
def __init__(self, variants):
self.variants = variants
self.headers = ("Name [Default]", "When", "Allowed values", "Description")
# Formats
fmt_name = "{0} [{1}]"
# Initialize column widths with the length of the
# corresponding headers, as they cannot be shorter
# than that
self.column_widths = [len(x) for x in self.headers]
# Expand columns based on max line lengths
for k, e in variants.items():
v, w = e
candidate_max_widths = (
len(fmt_name.format(k, self.default(v))), # Name [Default]
len(str(w)),
len(v.allowed_values), # Allowed values
len(v.description), # Description
)
self.column_widths = (
max(self.column_widths[0], candidate_max_widths[0]),
max(self.column_widths[1], candidate_max_widths[1]),
max(self.column_widths[2], candidate_max_widths[2]),
max(self.column_widths[3], candidate_max_widths[3]),
)
# Don't let name or possible values be less than max widths
_, cols = tty.terminal_size()
max_name = min(self.column_widths[0], 30)
@@ -115,8 +137,6 @@ def default(self, v):
def lines(self):
if not self.variants:
yield " None"
return
else:
yield " " + self.fmt % self.headers
underline = tuple([w * "=" for w in self.column_widths])
@@ -139,7 +159,7 @@ def lines(self):
yield " " + self.fmt % t
def print_dependencies(pkg, args):
def print_dependencies(pkg):
"""output build, link, and run package dependencies"""
for deptype in ("build", "link", "run"):
@@ -152,7 +172,7 @@ def print_dependencies(pkg, args):
color.cprint(" None")
def print_detectable(pkg, args):
def print_detectable(pkg):
"""output information on external detection"""
color.cprint("")
@@ -180,7 +200,7 @@ def print_detectable(pkg, args):
color.cprint(" False")
def print_maintainers(pkg, args):
def print_maintainers(pkg):
"""output package maintainers"""
if len(pkg.maintainers) > 0:
@@ -189,7 +209,7 @@ def print_maintainers(pkg, args):
color.cprint(section_title("Maintainers: ") + mnt)
def print_phases(pkg, args):
def print_phases(pkg):
"""output installation phases"""
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
@@ -201,7 +221,7 @@ def print_phases(pkg, args):
color.cprint(phase_str)
def print_tags(pkg, args):
def print_tags(pkg):
"""output package tags"""
color.cprint("")
@@ -213,7 +233,7 @@ def print_tags(pkg, args):
color.cprint(" None")
def print_tests(pkg, args):
def print_tests(pkg):
"""output relevant build-time and stand-alone tests"""
# Some built-in base packages (e.g., Autotools) define callback (e.g.,
@@ -251,171 +271,18 @@ def print_tests(pkg, args):
color.cprint(" None")
def _fmt_value(v):
if v is None or isinstance(v, bool):
return str(v).lower()
else:
return str(v)
def _fmt_name_and_default(variant):
"""Print colorized name [default] for a variant."""
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
def _fmt_when(when, indent):
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
def _fmt_variant_description(variant, width, indent):
"""Format a variant's description, preserving explicit line breaks."""
return "\n".join(
textwrap.fill(
line, width=width, initial_indent=indent * " ", subsequent_indent=indent * " "
)
for line in variant.description.split("\n")
)
def _fmt_variant(variant, max_name_default_len, indent, when=None, out=None):
out = out or sys.stdout
_, cols = tty.terminal_size()
name_and_default = _fmt_name_and_default(variant)
name_default_len = color.clen(name_and_default)
values = variant.values
if not isinstance(variant.values, (tuple, list, spack.variant.DisjointSetsOfValues)):
values = [variant.values]
# put 'none' first, sort the rest by value
sorted_values = sorted(values, key=lambda v: (v != "none", v))
pad = 4 # min padding between 'name [default]' and values
value_indent = (indent + max_name_default_len + pad) * " " # left edge of values
# This preserves any formatting (i.e., newlines) from how the description was
# written in package.py, but still wraps long lines for small terminals.
# This allows some packages to provide detailed help on their variants (see, e.g., gasnet).
formatted_values = "\n".join(
textwrap.wrap(
f"{', '.join(_fmt_value(v) for v in sorted_values)}",
width=cols - 2,
initial_indent=value_indent,
subsequent_indent=value_indent,
)
)
formatted_values = formatted_values[indent + name_default_len + pad :]
# name [default] value1, value2, value3, ...
padding = pad * " "
color.cprint(f"{indent * ' '}{name_and_default}{padding}@c{{{formatted_values}}}", stream=out)
# when <spec>
description_indent = indent + 4
if when is not None and when != spack.spec.Spec():
out.write(_fmt_when(when, description_indent - 2))
out.write("\n")
# description, preserving explicit line breaks from the way it's written in the package file
out.write(_fmt_variant_description(variant, cols - 2, description_indent))
out.write("\n")
def _variants_by_name_when(pkg):
"""Adaptor to get variants keyed by { name: { when: { [Variant...] } }."""
# TODO: replace with pkg.variants_by_name(when=True) when unified directive dicts are merged.
variants = {}
for name, (variant, whens) in sorted(pkg.variants.items()):
for when in whens:
variants.setdefault(name, {}).setdefault(when, []).append(variant)
return variants
def _variants_by_when_name(pkg):
"""Adaptor to get variants keyed by { when: { name: Variant } }"""
# TODO: replace with pkg.variants when unified directive dicts are merged.
variants = {}
for name, (variant, whens) in pkg.variants.items():
for when in whens:
variants.setdefault(when, {})[name] = variant
return variants
def _print_variants_header(pkg):
def print_variants(pkg):
"""output variants"""
if not pkg.variants:
print(" None")
return
color.cprint("")
color.cprint(section_title("Variants:"))
variants_by_name = _variants_by_name_when(pkg)
# Calculate the max length of the "name [default]" part of the variant display
# This lets us know where to print variant values.
max_name_default_len = max(
color.clen(_fmt_name_and_default(variant))
for name, when_variants in variants_by_name.items()
for variants in when_variants.values()
for variant in variants
)
return max_name_default_len, variants_by_name
formatter = VariantFormatter(pkg.variants)
for line in formatter.lines:
color.cprint(color.cescape(line))
def _unconstrained_ver_first(item):
"""sort key that puts specs with open version ranges first"""
spec, _ = item
return (spack.version.any_version not in spec.versions, spec)
def print_variants_grouped_by_when(pkg):
max_name_default_len, _ = _print_variants_header(pkg)
indent = 4
variants = _variants_by_when_name(pkg)
for when, variants_by_name in sorted(variants.items(), key=_unconstrained_ver_first):
padded_values = max_name_default_len + 4
start_indent = indent
if when != spack.spec.Spec():
sys.stdout.write("\n")
sys.stdout.write(_fmt_when(when, indent))
sys.stdout.write("\n")
# indent names slightly inside 'when', but line up values
padded_values -= 2
start_indent += 2
for name, variant in sorted(variants_by_name.items()):
_fmt_variant(variant, padded_values, start_indent, None, out=sys.stdout)
def print_variants_by_name(pkg):
max_name_default_len, variants_by_name = _print_variants_header(pkg)
max_name_default_len += 4
indent = 4
for name, when_variants in variants_by_name.items():
for when, variants in sorted(when_variants.items(), key=_unconstrained_ver_first):
for variant in variants:
_fmt_variant(variant, max_name_default_len, indent, when, out=sys.stdout)
sys.stdout.write("\n")
def print_variants(pkg, args):
"""output variants"""
if args.variants_by_name:
print_variants_by_name(pkg)
else:
print_variants_grouped_by_when(pkg)
def print_versions(pkg, args):
def print_versions(pkg):
"""output versions"""
color.cprint("")
@@ -433,24 +300,18 @@ def print_versions(pkg, args):
pad = padder(pkg.versions, 4)
preferred = preferred_version(pkg)
url = ""
if pkg.has_code:
url = fs.for_package_version(pkg, preferred)
def get_url(version):
try:
return fs.for_package_version(pkg, version)
except spack.fetch_strategy.InvalidArgsError:
return "No URL"
url = get_url(preferred) if pkg.has_code else ""
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
color.cwrite(line)
print()
color.cprint(line)
safe = []
deprecated = []
for v in reversed(sorted(pkg.versions)):
if pkg.has_code:
url = get_url(v)
url = fs.for_package_version(pkg, v)
if pkg.versions[v].get("deprecated", False):
deprecated.append((v, url))
else:
@@ -468,7 +329,7 @@ def get_url(version):
color.cprint(line)
def print_virtuals(pkg, args):
def print_virtuals(pkg):
"""output virtual packages"""
color.cprint("")
@@ -491,7 +352,7 @@ def print_virtuals(pkg, args):
color.cprint(" None")
def print_licenses(pkg, args):
def print_licenses(pkg):
"""Output the licenses of the project."""
color.cprint("")
@@ -523,8 +384,7 @@ def info(parser, args):
else:
color.cprint(" None")
if getattr(pkg, "homepage"):
color.cprint(section_title("Homepage: ") + pkg.homepage)
color.cprint(section_title("Homepage: ") + pkg.homepage)
# Now output optional information in expected order
sections = [
@@ -541,6 +401,6 @@ def info(parser, args):
]
for print_it, func in sections:
if print_it:
func(pkg, args)
func(pkg)
color.cprint("")

View File

@@ -14,6 +14,7 @@
import spack.build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.fetch_strategy
@@ -22,7 +23,6 @@
import spack.report
import spack.spec
import spack.store
from spack.cmd.common import arguments
from spack.error import SpackError
from spack.installer import PackageInstaller
@@ -162,8 +162,8 @@ def setup_parser(subparser):
"--no-check-signature",
action="store_true",
dest="unsigned",
default=None,
help="do not check signatures of binary packages (override mirror config)",
default=False,
help="do not check signatures of binary packages",
)
subparser.add_argument(
"--show-log-on-error",

View File

@@ -15,9 +15,9 @@
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.repo
from spack.cmd.common import arguments
from spack.version import VersionList
description = "list and search available packages"

View File

@@ -8,12 +8,12 @@
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.cmd.find
import spack.environment as ev
import spack.store
import spack.user_environment as uenv
import spack.util.environment
from spack.cmd.common import arguments
description = "add package to the user environment"
section = "user environment"

Some files were not shown because too many files have changed in this diff Show More