Compare commits
3 Commits
bugfix/com
...
package-me
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a8a776b5b7 | ||
|
|
3b859363cb | ||
|
|
e3f3e3943c |
3
.github/workflows/bootstrap.yml
vendored
3
.github/workflows/bootstrap.yml
vendored
@@ -159,9 +159,6 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
46
.github/workflows/build-containers.yml
vendored
46
.github/workflows/build-containers.yml
vendored
@@ -38,11 +38,12 @@ jobs:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
# 2: Base image (e.g. ubuntu:18.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
@@ -57,20 +58,18 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
tags: |
|
||||
type=schedule,pattern=nightly
|
||||
type=schedule,pattern=develop
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="${{ matrix.dockerfile[0] }}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
# On a new release create a container with the same tag as the release.
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
@@ -93,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,18 +106,21 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
ghcr.io/spack/${{ env.container }}
|
||||
ghcr.io/spack/${{ env.versioned }}
|
||||
|
||||
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==23.10.1
|
||||
black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
|
||||
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,33 +1,3 @@
|
||||
# v0.20.3 (2023-10-31)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
|
||||
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
|
||||
|
||||
|
||||
# v0.20.2 (2023-10-03)
|
||||
|
||||
## Features in this release
|
||||
|
||||
Spack now supports Python 3.12 (#40155)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Improve escaping in Tcl module files (#38375)
|
||||
- Make repo cache work on repositories with zero mtime (#39214)
|
||||
- Ignore errors for newer, incompatible buildcache version (#40279)
|
||||
- Print an error when git is required, but missing (#40254)
|
||||
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
|
||||
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
|
||||
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
|
||||
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
|
||||
- Fix a bug triggered when file locking fails internally (#39188)
|
||||
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
|
||||
- Fix multiple performance regressions in environments (#38771)
|
||||
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
|
||||
|
||||
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
@@ -66,7 +66,7 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, but also Q&A.
|
||||
not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
||||
@@ -229,11 +229,3 @@ config:
|
||||
flags:
|
||||
# Whether to keep -Werror flags active in package builds.
|
||||
keep_werror: 'none'
|
||||
|
||||
# A mapping of aliases that can be used to define new commands. For instance,
|
||||
# `sp: spec -I` will define a new command `sp` that will execute `spec` with
|
||||
# the `-I` argument. Aliases cannot override existing commands.
|
||||
aliases:
|
||||
concretise: concretize
|
||||
containerise: containerize
|
||||
rm: remove
|
||||
|
||||
@@ -46,12 +46,10 @@ modules:
|
||||
tcl:
|
||||
all:
|
||||
autoload: direct
|
||||
hide_implicits: true
|
||||
|
||||
# Default configurations if lmod is enabled
|
||||
lmod:
|
||||
all:
|
||||
autoload: direct
|
||||
hide_implicits: true
|
||||
hierarchy:
|
||||
- mpi
|
||||
|
||||
@@ -1526,30 +1526,6 @@ any MPI implementation will do. If another package depends on
|
||||
error. Likewise, if you try to plug in some package that doesn't
|
||||
provide MPI, Spack will raise an error.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Explicit binding of virtual dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are packages that provide more than just one virtual dependency. When interacting with them, users
|
||||
might want to utilize just a subset of what they could provide, and use other providers for virtuals they
|
||||
need.
|
||||
|
||||
It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
|
||||
special syntax:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
|
||||
|
||||
Concretizing the spec above produces the following DAG:
|
||||
|
||||
.. figure:: images/strumpack_virtuals.svg
|
||||
:scale: 60 %
|
||||
:align: center
|
||||
|
||||
where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
|
||||
and ``blas`` dependencies are satisfied by ``openblas``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying Specs by Hash
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -155,182 +155,16 @@ List of popular build caches
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
|
||||
|
||||
----------
|
||||
Relocation
|
||||
----------
|
||||
|
||||
When using buildcaches across different machines, it is likely that the install
|
||||
root will be different from the one used to build the binaries.
|
||||
|
||||
To address this issue, Spack automatically relocates all paths encoded in binaries
|
||||
and scripts to their new location upon install.
|
||||
|
||||
Note that there are some cases where this is not possible: if binaries are built in
|
||||
a relatively short path, and then installed to a longer path, there may not be enough
|
||||
space in the binary to encode the new path. In this case, Spack will fail to install
|
||||
the package from the build cache, and a source build is required.
|
||||
|
||||
To reduce the likelihood of this happening, it is highly recommended to add padding to
|
||||
the install root during the build, as specified in the :ref:`config <config-yaml>`
|
||||
section of the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
|
||||
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
||||
Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
|
||||
Github Packages, GitLab Container Registry, JFrog Artifactory, and others
|
||||
as build caches. This is a convenient way to share binaries using public
|
||||
infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify a username and password (or personal access token):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
is a great way to make applications available to users without requiring them to
|
||||
install Spack -- all you need is Docker, Podman or any other OCI-compatible container
|
||||
runtime.
|
||||
|
||||
To produce container images, all you need to do is add the ``--base-image`` flag
|
||||
when pushing to the build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
|
||||
Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
|
||||
$ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
root@e4c2b6f6b3f4:/# ninja --version
|
||||
1.11.1
|
||||
|
||||
If ``--base-image`` is not specified, distroless images are produced. In practice,
|
||||
you won't be able to run these as containers, since they don't come with libc and
|
||||
other system dependencies. However, they are still compatible with tools like
|
||||
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
|
||||
|
||||
.. note::
|
||||
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
|
||||
``max depth exceeded`` error may be produced when pulling the image. There
|
||||
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
|
||||
|
||||
------------------------------------
|
||||
Spack build cache for GitHub Actions
|
||||
------------------------------------
|
||||
|
||||
To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
Initial build and later installation do not necessarily happen at the same
|
||||
location. Spack provides a relocation capability and corrects for RPATHs and
|
||||
non-relocatable scripts. However, many packages compile paths into binary
|
||||
artifacts directly. In such cases, the build instructions of this package would
|
||||
need to be adjusted for better re-locatability.
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
||||
@@ -526,52 +526,56 @@ Package Preferences
|
||||
In some cases package requirements can be too strong, and package
|
||||
preferences are the better option. Package preferences do not impose
|
||||
constraints on packages for particular versions or variants values,
|
||||
they rather only set defaults. The concretizer is free to change
|
||||
them if it must, due to other constraints, and also prefers reusing
|
||||
installed packages over building new ones that are a better match for
|
||||
preferences.
|
||||
they rather only set defaults -- the concretizer is free to change
|
||||
them if it must due to other constraints. Also note that package
|
||||
preferences are of lower priority than reuse of already installed
|
||||
packages.
|
||||
|
||||
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||
target: [x86_64_v3]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
These preferences override Spack's default and effectively reorder priorities
|
||||
when looking for the best compiler, target or virtual package provider. Each
|
||||
preference takes an ordered list of spec constraints, with earlier entries in
|
||||
the list being preferred over later entries.
|
||||
|
||||
In the example above all packages prefer to be compiled with ``gcc@12.2.0``,
|
||||
to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they
|
||||
depend on ``mpi``.
|
||||
|
||||
The ``variants`` and ``version`` preferences can be set under
|
||||
package specific sections of the ``packages.yaml`` file:
|
||||
Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
|
||||
target: [sandybridge]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
In this case, the preference for ``opencv`` is to build with debug options, while
|
||||
``gperftools`` prefers version 2.2 over 2.4.
|
||||
At a high level, this example is specifying how packages are preferably
|
||||
concretized. The opencv package should prefer using GCC 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich2 for
|
||||
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Any preference can be overwritten on the command line if explicitly requested.
|
||||
Package preferences accept the follow keys or components under
|
||||
the specific package (or ``all``) section: ``compiler``, ``variants``,
|
||||
``version``, ``providers``, and ``target``. Each component has an
|
||||
ordered list of spec ``constraints``, with earlier entries in the
|
||||
list being preferred over later entries.
|
||||
|
||||
Preferences cannot overcome explicit constraints, as they only set a preferred
|
||||
ordering among homogeneous attribute values. Going back to the example, if
|
||||
``gperftools@2.3:`` was requested, then Spack will install version 2.4
|
||||
since the most preferred version 2.2 is prohibited by the version constraint.
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
|
||||
@@ -204,7 +204,6 @@ def setup(sphinx):
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
||||
@@ -304,17 +304,3 @@ To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
remain in the terminal's title indefinitely. Most terminals should already
|
||||
be set up this way and clear Spack's status information.
|
||||
|
||||
-----------
|
||||
``aliases``
|
||||
-----------
|
||||
|
||||
Aliases can be used to define new Spack commands. They can be either shortcuts
|
||||
for longer commands or include specific arguments for convenience. For instance,
|
||||
if users want to use ``spack install``'s ``-v`` argument all the time, they can
|
||||
create a new alias called ``inst`` that will always call ``install -v``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
aliases:
|
||||
inst: install -v
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 296 KiB |
@@ -1,534 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
--><!-- Title: G Pages: 1 --><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="3044pt" height="1683pt" viewBox="0.00 0.00 3043.65 1682.80">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1678.8)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1678.8 3039.6456,-1678.8 3039.6456,4 -4,4"/>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="node1" class="node">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2407.965,-1198.3002C2407.965,-1198.3002 1948.1742,-1198.3002 1948.1742,-1198.3002 1942.1742,-1198.3002 1936.1742,-1192.3002 1936.1742,-1186.3002 1936.1742,-1186.3002 1936.1742,-1123.6998 1936.1742,-1123.6998 1936.1742,-1117.6998 1942.1742,-1111.6998 1948.1742,-1111.6998 1948.1742,-1111.6998 2407.965,-1111.6998 2407.965,-1111.6998 2413.965,-1111.6998 2419.965,-1117.6998 2419.965,-1123.6998 2419.965,-1123.6998 2419.965,-1186.3002 2419.965,-1186.3002 2419.965,-1192.3002 2413.965,-1198.3002 2407.965,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt</text>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="node8" class="node">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M901.2032,-1039.5002C901.2032,-1039.5002 486.936,-1039.5002 486.936,-1039.5002 480.936,-1039.5002 474.936,-1033.5002 474.936,-1027.5002 474.936,-1027.5002 474.936,-964.8998 474.936,-964.8998 474.936,-958.8998 480.936,-952.8998 486.936,-952.8998 486.936,-952.8998 901.2032,-952.8998 901.2032,-952.8998 907.2032,-952.8998 913.2032,-958.8998 913.2032,-964.8998 913.2032,-964.8998 913.2032,-1027.5002 913.2032,-1027.5002 913.2032,-1033.5002 907.2032,-1039.5002 901.2032,-1039.5002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">openblas@0.3.21%gcc@9.4.0/o524geb</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1936.1981,-1113.832C1933.0949,-1113.4088 1930.0059,-1112.9948 1926.9392,-1112.5915 1575.405,-1066.3348 1485.3504,-1074.0879 1131.9752,-1040.5955 1064.2267,-1034.1713 990.6114,-1026.9648 923.4066,-1020.2975"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1936.4684,-1111.8504C1933.3606,-1111.4265 1930.2716,-1111.0125 1927.2,-1110.6085 1575.2335,-1064.3422 1485.1789,-1072.0953 1132.164,-1038.6045 1064.4216,-1032.1808 990.8062,-1024.9744 923.604,-1018.3073"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="923.505,-1015.7853 913.2081,-1018.2801 922.8133,-1022.751 923.505,-1015.7853"/>
|
||||
<text text-anchor="middle" x="1368.79" y="-1067.6346" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="node23" class="node">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2767.3081,-1039.5002C2767.3081,-1039.5002 2166.8311,-1039.5002 2166.8311,-1039.5002 2160.8311,-1039.5002 2154.8311,-1033.5002 2154.8311,-1027.5002 2154.8311,-1027.5002 2154.8311,-964.8998 2154.8311,-964.8998 2154.8311,-958.8998 2160.8311,-952.8998 2166.8311,-952.8998 2166.8311,-952.8998 2767.3081,-952.8998 2767.3081,-952.8998 2773.3081,-952.8998 2779.3081,-958.8998 2779.3081,-964.8998 2779.3081,-964.8998 2779.3081,-1027.5002 2779.3081,-1027.5002 2779.3081,-1033.5002 2773.3081,-1039.5002 2767.3081,-1039.5002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge29" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2256.5586,-1110.7308C2294.3103,-1089.9869 2339.6329,-1065.083 2378.4976,-1043.7276"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2257.5217,-1112.4836C2295.2735,-1091.7397 2340.5961,-1066.8358 2379.4607,-1045.4804"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2381.116,-1047.4235 2388.1946,-1039.5403 2377.745,-1041.2886 2381.116,-1047.4235"/>
|
||||
<text text-anchor="middle" x="2286.6606" y="-1079.8414" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="node27" class="node">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1539.1928,-1039.5002C1539.1928,-1039.5002 1152.9464,-1039.5002 1152.9464,-1039.5002 1146.9464,-1039.5002 1140.9464,-1033.5002 1140.9464,-1027.5002 1140.9464,-1027.5002 1140.9464,-964.8998 1140.9464,-964.8998 1140.9464,-958.8998 1146.9464,-952.8998 1152.9464,-952.8998 1152.9464,-952.8998 1539.1928,-952.8998 1539.1928,-952.8998 1545.1928,-952.8998 1551.1928,-958.8998 1551.1928,-964.8998 1551.1928,-964.8998 1551.1928,-1027.5002 1551.1928,-1027.5002 1551.1928,-1033.5002 1545.1928,-1039.5002 1539.1928,-1039.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">cmake@3.25.1%gcc@9.4.0/gguve5i</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1950.9968,-1111.6597C1829.5529,-1088.4802 1680.8338,-1060.0949 1561.2457,-1037.2697"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.7091,-1033.795 1551.2303,-1035.3581 1560.3967,-1040.6709 1561.7091,-1033.795"/>
|
||||
</g>
|
||||
<!-- i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="node2" class="node">
|
||||
<title>i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1536.3649,-86.7002C1536.3649,-86.7002 1155.7743,-86.7002 1155.7743,-86.7002 1149.7743,-86.7002 1143.7743,-80.7002 1143.7743,-74.7002 1143.7743,-74.7002 1143.7743,-12.0998 1143.7743,-12.0998 1143.7743,-6.0998 1149.7743,-.0998 1155.7743,-.0998 1155.7743,-.0998 1536.3649,-.0998 1536.3649,-.0998 1542.3649,-.0998 1548.3649,-6.0998 1548.3649,-12.0998 1548.3649,-12.0998 1548.3649,-74.7002 1548.3649,-74.7002 1548.3649,-80.7002 1542.3649,-86.7002 1536.3649,-86.7002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-36.2" font-family="Monaco" font-size="24.00" fill="#000000">pkgconf@1.8.0%gcc@9.4.0/i4avrin</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="node3" class="node">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M849.3673,-721.9002C849.3673,-721.9002 480.7719,-721.9002 480.7719,-721.9002 474.7719,-721.9002 468.7719,-715.9002 468.7719,-709.9002 468.7719,-709.9002 468.7719,-647.2998 468.7719,-647.2998 468.7719,-641.2998 474.7719,-635.2998 480.7719,-635.2998 480.7719,-635.2998 849.3673,-635.2998 849.3673,-635.2998 855.3673,-635.2998 861.3673,-641.2998 861.3673,-647.2998 861.3673,-647.2998 861.3673,-709.9002 861.3673,-709.9002 861.3673,-715.9002 855.3673,-721.9002 849.3673,-721.9002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">perl@5.36.0%gcc@9.4.0/ywrpvv2</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="node7" class="node">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M392.4016,-563.1002C392.4016,-563.1002 19.7376,-563.1002 19.7376,-563.1002 13.7376,-563.1002 7.7376,-557.1002 7.7376,-551.1002 7.7376,-551.1002 7.7376,-488.4998 7.7376,-488.4998 7.7376,-482.4998 13.7376,-476.4998 19.7376,-476.4998 19.7376,-476.4998 392.4016,-476.4998 392.4016,-476.4998 398.4016,-476.4998 404.4016,-482.4998 404.4016,-488.4998 404.4016,-488.4998 404.4016,-551.1002 404.4016,-551.1002 404.4016,-557.1002 398.4016,-563.1002 392.4016,-563.1002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">bzip2@1.0.8%gcc@9.4.0/h3ujmb3</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M539.3189,-636.1522C477.7157,-614.8394 403.4197,-589.1353 340.5959,-567.4002"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M539.9728,-634.2622C478.3696,-612.9494 404.0736,-587.2452 341.2498,-565.5101"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="341.9365,-563.1023 331.3417,-563.1403 339.6478,-569.7176 341.9365,-563.1023"/>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="node19" class="node">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1298.2296,-563.1002C1298.2296,-563.1002 937.9096,-563.1002 937.9096,-563.1002 931.9096,-563.1002 925.9096,-557.1002 925.9096,-551.1002 925.9096,-551.1002 925.9096,-488.4998 925.9096,-488.4998 925.9096,-482.4998 931.9096,-476.4998 937.9096,-476.4998 937.9096,-476.4998 1298.2296,-476.4998 1298.2296,-476.4998 1304.2296,-476.4998 1310.2296,-482.4998 1310.2296,-488.4998 1310.2296,-488.4998 1310.2296,-551.1002 1310.2296,-551.1002 1310.2296,-557.1002 1304.2296,-563.1002 1298.2296,-563.1002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">gdbm@1.23%gcc@9.4.0/uabgssx</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="edge44" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M788.523,-634.2635C849.3209,-612.9507 922.6457,-587.2465 984.6483,-565.5114"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M789.1847,-636.1509C849.9825,-614.8381 923.3073,-589.1339 985.3099,-567.3988"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="986.1559,-569.7515 994.435,-563.1403 983.8402,-563.1456 986.1559,-569.7515"/>
|
||||
</g>
|
||||
<!-- gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="node20" class="node">
|
||||
<title>gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M896.1744,-563.1002C896.1744,-563.1002 433.9648,-563.1002 433.9648,-563.1002 427.9648,-563.1002 421.9648,-557.1002 421.9648,-551.1002 421.9648,-551.1002 421.9648,-488.4998 421.9648,-488.4998 421.9648,-482.4998 427.9648,-476.4998 433.9648,-476.4998 433.9648,-476.4998 896.1744,-476.4998 896.1744,-476.4998 902.1744,-476.4998 908.1744,-482.4998 908.1744,-488.4998 908.1744,-488.4998 908.1744,-551.1002 908.1744,-551.1002 908.1744,-557.1002 902.1744,-563.1002 896.1744,-563.1002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="edge23" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M664.0696,-635.2072C664.0696,-616.1263 664.0696,-593.5257 664.0696,-573.4046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M666.0696,-635.2072C666.0696,-616.1263 666.0696,-593.5257 666.0696,-573.4046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="668.5697,-573.1403 665.0696,-563.1403 661.5697,-573.1404 668.5697,-573.1403"/>
|
||||
</g>
|
||||
<!-- nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="node24" class="node">
|
||||
<title>nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2195.2248,-563.1002C2195.2248,-563.1002 1840.9144,-563.1002 1840.9144,-563.1002 1834.9144,-563.1002 1828.9144,-557.1002 1828.9144,-551.1002 1828.9144,-551.1002 1828.9144,-488.4998 1828.9144,-488.4998 1828.9144,-482.4998 1834.9144,-476.4998 1840.9144,-476.4998 1840.9144,-476.4998 2195.2248,-476.4998 2195.2248,-476.4998 2201.2248,-476.4998 2207.2248,-482.4998 2207.2248,-488.4998 2207.2248,-488.4998 2207.2248,-551.1002 2207.2248,-551.1002 2207.2248,-557.1002 2201.2248,-563.1002 2195.2248,-563.1002"/>
|
||||
<text text-anchor="middle" x="2018.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">zlib@1.2.13%gcc@9.4.0/nizxi5u</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M861.3292,-654.5584C1116.9929,-624.5514 1561.4447,-572.3867 1818.5758,-542.2075"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M861.5624,-656.5447C1117.2261,-626.5378 1561.6778,-574.373 1818.8089,-544.1939"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1819.373,-546.6449 1828.8968,-542.003 1818.5569,-539.6926 1819.373,-546.6449"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id -->
|
||||
<g id="node4" class="node">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2383.212,-1674.7002C2383.212,-1674.7002 1972.9272,-1674.7002 1972.9272,-1674.7002 1966.9272,-1674.7002 1960.9272,-1668.7002 1960.9272,-1662.7002 1960.9272,-1662.7002 1960.9272,-1600.0998 1960.9272,-1600.0998 1960.9272,-1594.0998 1966.9272,-1588.0998 1972.9272,-1588.0998 1972.9272,-1588.0998 2383.212,-1588.0998 2383.212,-1588.0998 2389.212,-1588.0998 2395.212,-1594.0998 2395.212,-1600.0998 2395.212,-1600.0998 2395.212,-1662.7002 2395.212,-1662.7002 2395.212,-1668.7002 2389.212,-1674.7002 2383.212,-1674.7002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1624.2" font-family="Monaco" font-size="24.00" fill="#000000">strumpack@7.0.1%gcc@9.4.0/idvshq5</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge33" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2177.0696,-1587.8598C2177.0696,-1500.5185 2177.0696,-1304.1624 2177.0696,-1208.8885"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2179.0696,-1587.8598C2179.0696,-1500.5185 2179.0696,-1304.1624 2179.0696,-1208.8885"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2181.5697,-1208.611 2178.0696,-1198.611 2174.5697,-1208.611 2181.5697,-1208.611"/>
|
||||
<text text-anchor="middle" x="2125.9224" y="-1397.5399" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6199,-1629.1097C1600.5855,-1621.4505 897.1143,-1596.5054 662.748,-1516.9469 459.8544,-1447.9506 281.1117,-1289.236 401.2427,-1111.0377 418.213,-1086.3492 472.759,-1062.01 530.3793,-1041.9698"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.6625,-1627.1101C1600.6564,-1619.4517 897.1852,-1594.5067 663.3912,-1515.0531 461.1823,-1446.4551 282.4397,-1287.7405 402.8965,-1112.1623 419.028,-1088.1757 473.574,-1063.8364 531.0362,-1043.8589"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.0142,-1046.1665 540.3395,-1039.6137 529.7449,-1039.5445 532.0142,-1046.1665"/>
|
||||
<text text-anchor="middle" x="1175.5163" y="-1600.8866" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="node12" class="node">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M3003.3872,-1357.1002C3003.3872,-1357.1002 2606.752,-1357.1002 2606.752,-1357.1002 2600.752,-1357.1002 2594.752,-1351.1002 2594.752,-1345.1002 2594.752,-1345.1002 2594.752,-1282.4998 2594.752,-1282.4998 2594.752,-1276.4998 2600.752,-1270.4998 2606.752,-1270.4998 2606.752,-1270.4998 3003.3872,-1270.4998 3003.3872,-1270.4998 3009.3872,-1270.4998 3015.3872,-1276.4998 3015.3872,-1282.4998 3015.3872,-1282.4998 3015.3872,-1345.1002 3015.3872,-1345.1002 3015.3872,-1351.1002 3009.3872,-1357.1002 3003.3872,-1357.1002"/>
|
||||
<text text-anchor="middle" x="2805.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">parmetis@4.0.3%gcc@9.4.0/imopnxj</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="edge51" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2393.6993,-1587.0809C2455.3565,-1569.7539 2521.1771,-1546.2699 2577.5864,-1515.1245 2649.1588,-1475.6656 2717.4141,-1409.6691 2759.9512,-1363.9364"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2394.2404,-1589.0062C2456.0286,-1571.6376 2521.8491,-1548.1536 2578.5528,-1516.8755 2650.5491,-1477.1034 2718.8043,-1411.107 2761.4156,-1365.2986"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2763.3454,-1366.8938 2767.5512,-1357.1695 2758.1992,-1362.1485 2763.3454,-1366.8938"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="node13" class="node">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2928.3784,-1198.3002C2928.3784,-1198.3002 2563.7608,-1198.3002 2563.7608,-1198.3002 2557.7608,-1198.3002 2551.7608,-1192.3002 2551.7608,-1186.3002 2551.7608,-1186.3002 2551.7608,-1123.6998 2551.7608,-1123.6998 2551.7608,-1117.6998 2557.7608,-1111.6998 2563.7608,-1111.6998 2563.7608,-1111.6998 2928.3784,-1111.6998 2928.3784,-1111.6998 2934.3784,-1111.6998 2940.3784,-1117.6998 2940.3784,-1123.6998 2940.3784,-1123.6998 2940.3784,-1186.3002 2940.3784,-1186.3002 2940.3784,-1192.3002 2934.3784,-1198.3002 2928.3784,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2746.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">metis@5.1.0%gcc@9.4.0/ern66gy</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge25" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2371.6269,-1587.103C2443.5875,-1567.249 2513.691,-1542.0963 2537.3223,-1515.3355 2611.3482,-1433.6645 2525.4748,-1364.8484 2585.2274,-1269.8608 2602.2478,-1243.3473 2627.3929,-1221.1402 2652.8797,-1203.3777"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2372.1589,-1589.0309C2444.2629,-1569.1315 2514.3664,-1543.9788 2538.8169,-1516.6645 2612.5989,-1432.1038 2526.7255,-1363.2878 2586.9118,-1270.9392 2603.5717,-1244.8464 2628.7168,-1222.6393 2654.0229,-1205.0188"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2655.7411,-1206.8749 2662.0621,-1198.3722 2651.8184,-1201.0773 2655.7411,-1206.8749"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="node14" class="node">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1964.017,-1357.1002C1964.017,-1357.1002 1532.1222,-1357.1002 1532.1222,-1357.1002 1526.1222,-1357.1002 1520.1222,-1351.1002 1520.1222,-1345.1002 1520.1222,-1345.1002 1520.1222,-1282.4998 1520.1222,-1282.4998 1520.1222,-1276.4998 1526.1222,-1270.4998 1532.1222,-1270.4998 1532.1222,-1270.4998 1964.017,-1270.4998 1964.017,-1270.4998 1970.017,-1270.4998 1976.017,-1276.4998 1976.017,-1282.4998 1976.017,-1282.4998 1976.017,-1345.1002 1976.017,-1345.1002 1976.017,-1351.1002 1970.017,-1357.1002 1964.017,-1357.1002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">butterflypack@2.2.2%gcc@9.4.0/nqiyrxl</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="edge26" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2118.5874,-1588.7094C2039.1194,-1530.0139 1897.9154,-1425.72 1814.4793,-1364.0937"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2119.7757,-1587.1006C2040.3076,-1528.4052 1899.1036,-1424.1112 1815.6675,-1362.485"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1817.0581,-1360.404 1806.9348,-1357.2781 1812.8992,-1366.0347 1817.0581,-1360.404"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="node16" class="node">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1106.2192,-1515.9002C1106.2192,-1515.9002 683.92,-1515.9002 683.92,-1515.9002 677.92,-1515.9002 671.92,-1509.9002 671.92,-1503.9002 671.92,-1503.9002 671.92,-1441.2998 671.92,-1441.2998 671.92,-1435.2998 677.92,-1429.2998 683.92,-1429.2998 683.92,-1429.2998 1106.2192,-1429.2998 1106.2192,-1429.2998 1112.2192,-1429.2998 1118.2192,-1435.2998 1118.2192,-1441.2998 1118.2192,-1441.2998 1118.2192,-1503.9002 1118.2192,-1503.9002 1118.2192,-1509.9002 1112.2192,-1515.9002 1106.2192,-1515.9002"/>
|
||||
<text text-anchor="middle" x="895.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">slate@2022.07.00%gcc@9.4.0/4bu62ky</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6663,-1605.4991C1729.5518,-1576.8935 1365.2868,-1531.8075 1128.237,-1502.4673"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.912,-1603.5143C1729.7975,-1574.9086 1365.5325,-1529.8227 1128.4827,-1500.4825"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1128.5789,-1497.9754 1118.2247,-1500.2204 1127.719,-1504.9224 1128.5789,-1497.9754"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2395.1113,-1591.5061C2621.5772,-1545.7968 2953.3457,-1462.5053 3023.2362,-1356.6473 3049.986,-1316.785 3021.2047,-1131.5143 3003.3326,-1112.2759 2971.8969,-1077.7826 2884.3944,-1052.6467 2789.1441,-1034.9179"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2395.507,-1593.4665C2622.0642,-1547.7366 2953.8327,-1464.4452 3024.903,-1357.7527 3051.9623,-1316.478 3023.181,-1131.2073 3004.8066,-1110.9241 2972.4491,-1075.8603 2884.9466,-1050.7244 2789.5102,-1032.9517"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2789.9449,-1030.4898 2779.4781,-1032.132 2788.6845,-1037.3754 2789.9449,-1030.4898"/>
|
||||
<text text-anchor="middle" x="2611.7445" y="-1537.8321" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="node25" class="node">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1749.1952,-1515.9002C1749.1952,-1515.9002 1398.944,-1515.9002 1398.944,-1515.9002 1392.944,-1515.9002 1386.944,-1509.9002 1386.944,-1503.9002 1386.944,-1503.9002 1386.944,-1441.2998 1386.944,-1441.2998 1386.944,-1435.2998 1392.944,-1429.2998 1398.944,-1429.2998 1398.944,-1429.2998 1749.1952,-1429.2998 1749.1952,-1429.2998 1755.1952,-1429.2998 1761.1952,-1435.2998 1761.1952,-1441.2998 1761.1952,-1441.2998 1761.1952,-1503.9002 1761.1952,-1503.9002 1761.1952,-1509.9002 1755.1952,-1515.9002 1749.1952,-1515.9002"/>
|
||||
<text text-anchor="middle" x="1574.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">zfp@0.5.5%gcc@9.4.0/7rzbmgo</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="edge36" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2012.7697,-1588.9743C1930.7903,-1567.4208 1831.729,-1541.3762 1748.4742,-1519.4874"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2013.2782,-1587.0401C1931.2989,-1565.4866 1832.2376,-1539.442 1748.9827,-1517.5531"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1749.477,-1515.0982 1738.9157,-1515.9403 1747.697,-1521.8681 1749.477,-1515.0982"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2229.2864,-1587.9836C2336.2076,-1492.3172 2562.5717,-1260.0833 2429.0696,-1111.6 2372.2327,-1048.3851 1860.8259,-1017.0375 1561.5401,-1003.9799"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.5673,-1000.4779 1551.4253,-1003.5421 1561.2645,-1007.4714 1561.5673,-1000.4779"/>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="node5" class="node">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M912.4048,-1198.3002C912.4048,-1198.3002 475.7344,-1198.3002 475.7344,-1198.3002 469.7344,-1198.3002 463.7344,-1192.3002 463.7344,-1186.3002 463.7344,-1186.3002 463.7344,-1123.6998 463.7344,-1123.6998 463.7344,-1117.6998 469.7344,-1111.6998 475.7344,-1111.6998 475.7344,-1111.6998 912.4048,-1111.6998 912.4048,-1111.6998 918.4048,-1111.6998 924.4048,-1117.6998 924.4048,-1123.6998 924.4048,-1123.6998 924.4048,-1186.3002 924.4048,-1186.3002 924.4048,-1192.3002 918.4048,-1198.3002 912.4048,-1198.3002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">blaspp@2022.07.00%gcc@9.4.0/mujlx42</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M693.0696,-1111.6072C693.0696,-1092.5263 693.0696,-1069.9257 693.0696,-1049.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M695.0696,-1111.6072C695.0696,-1092.5263 695.0696,-1069.9257 695.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="697.5697,-1049.5403 694.0696,-1039.5403 690.5697,-1049.5404 697.5697,-1049.5403"/>
|
||||
<text text-anchor="middle" x="657.8516" y="-1079.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge28" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M872.2315,-1111.6072C960.9952,-1089.988 1068.311,-1063.8504 1158.3512,-1041.9204"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1159.2354,-1045.3074 1168.1232,-1039.5403 1157.5789,-1038.5062 1159.2354,-1045.3074"/>
|
||||
</g>
|
||||
<!-- htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="node6" class="node">
|
||||
<title>htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M2663.3553,-880.7002C2663.3553,-880.7002 2270.7839,-880.7002 2270.7839,-880.7002 2264.7839,-880.7002 2258.7839,-874.7002 2258.7839,-868.7002 2258.7839,-868.7002 2258.7839,-806.0998 2258.7839,-806.0998 2258.7839,-800.0998 2264.7839,-794.0998 2270.7839,-794.0998 2270.7839,-794.0998 2663.3553,-794.0998 2663.3553,-794.0998 2669.3553,-794.0998 2675.3553,-800.0998 2675.3553,-806.0998 2675.3553,-806.0998 2675.3553,-868.7002 2675.3553,-868.7002 2675.3553,-874.7002 2669.3553,-880.7002 2663.3553,-880.7002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">patchelf@0.16.1%gcc@9.4.0/htzjns6</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="node15" class="node">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M394.2232,-404.3002C394.2232,-404.3002 17.916,-404.3002 17.916,-404.3002 11.916,-404.3002 5.916,-398.3002 5.916,-392.3002 5.916,-392.3002 5.916,-329.6998 5.916,-329.6998 5.916,-323.6998 11.916,-317.6998 17.916,-317.6998 17.916,-317.6998 394.2232,-317.6998 394.2232,-317.6998 400.2232,-317.6998 406.2232,-323.6998 406.2232,-329.6998 406.2232,-329.6998 406.2232,-392.3002 406.2232,-392.3002 406.2232,-398.3002 400.2232,-404.3002 394.2232,-404.3002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">diffutils@3.8%gcc@9.4.0/xm3ldz3</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M206.0696,-476.4072C206.0696,-457.3263 206.0696,-434.7257 206.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-414.3403 206.0696,-404.3403 202.5697,-414.3404 209.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M690.0981,-952.705C684.8522,-895.2533 675.6173,-794.1153 669.9514,-732.0637"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="673.4345,-731.7184 669.0396,-722.0781 666.4635,-732.355 673.4345,-731.7184"/>
|
||||
</g>
|
||||
<!-- 4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="node9" class="node">
|
||||
<title>4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1977.9121,-721.9002C1977.9121,-721.9002 1386.2271,-721.9002 1386.2271,-721.9002 1380.2271,-721.9002 1374.2271,-715.9002 1374.2271,-709.9002 1374.2271,-709.9002 1374.2271,-647.2998 1374.2271,-647.2998 1374.2271,-641.2998 1380.2271,-635.2998 1386.2271,-635.2998 1386.2271,-635.2998 1977.9121,-635.2998 1977.9121,-635.2998 1983.9121,-635.2998 1989.9121,-641.2998 1989.9121,-647.2998 1989.9121,-647.2998 1989.9121,-709.9002 1989.9121,-709.9002 1989.9121,-715.9002 1983.9121,-721.9002 1977.9121,-721.9002"/>
|
||||
<text text-anchor="middle" x="1682.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="node10" class="node">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M988.1824,-1357.1002C988.1824,-1357.1002 533.9568,-1357.1002 533.9568,-1357.1002 527.9568,-1357.1002 521.9568,-1351.1002 521.9568,-1345.1002 521.9568,-1345.1002 521.9568,-1282.4998 521.9568,-1282.4998 521.9568,-1276.4998 527.9568,-1270.4998 533.9568,-1270.4998 533.9568,-1270.4998 988.1824,-1270.4998 988.1824,-1270.4998 994.1824,-1270.4998 1000.1824,-1276.4998 1000.1824,-1282.4998 1000.1824,-1282.4998 1000.1824,-1345.1002 1000.1824,-1345.1002 1000.1824,-1351.1002 994.1824,-1357.1002 988.1824,-1357.1002"/>
|
||||
<text text-anchor="middle" x="761.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">lapackpp@2022.07.00%gcc@9.4.0/xiro2z6</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge37" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M741.8402,-1270.7959C733.6789,-1251.4525 723.9915,-1228.4917 715.4149,-1208.1641"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M743.6829,-1270.0185C735.5216,-1250.675 725.8342,-1227.7143 717.2576,-1207.3866"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="719.4676,-1206.1933 712.3555,-1198.3403 713.0181,-1208.9144 719.4676,-1206.1933"/>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge35" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M597.2326,-1271.3826C534.1471,-1251.0571 472.8527,-1225.5904 454.2471,-1198.9688 432.1275,-1166.6075 433.5639,-1144.2113 454.2226,-1111.0684 472.6194,-1081.8657 500.3255,-1060.004 530.6572,-1043.4601"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M597.8458,-1269.4789C534.9144,-1249.2102 473.6201,-1223.7435 455.8921,-1197.8312 434.1234,-1166.7355 435.5598,-1144.3393 455.9166,-1112.1316 473.8583,-1083.4358 501.5644,-1061.5741 531.6142,-1045.2163"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.9062,-1047.362 540.1422,-1039.6231 529.6595,-1041.1605 532.9062,-1047.362"/>
|
||||
<text text-anchor="middle" x="474.3109" y="-1250.2598" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge45" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M833.5823,-1270.3956C865.3249,-1250.0918 902.2709,-1224.6296 933.0696,-1198.4 973.2414,-1164.1878 969.8532,-1140.395 1014.0696,-1111.6 1058.5051,-1082.6623 1111.0286,-1060.0733 1161.029,-1042.8573"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1162.313,-1046.1177 1170.6621,-1039.5953 1160.0678,-1039.4876 1162.313,-1046.1177"/>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="node11" class="node">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1527.3625,-245.5002C1527.3625,-245.5002 1164.7767,-245.5002 1164.7767,-245.5002 1158.7767,-245.5002 1152.7767,-239.5002 1152.7767,-233.5002 1152.7767,-233.5002 1152.7767,-170.8998 1152.7767,-170.8998 1152.7767,-164.8998 1158.7767,-158.8998 1164.7767,-158.8998 1164.7767,-158.8998 1527.3625,-158.8998 1527.3625,-158.8998 1533.3625,-158.8998 1539.3625,-164.8998 1539.3625,-170.8998 1539.3625,-170.8998 1539.3625,-233.5002 1539.3625,-233.5002 1539.3625,-239.5002 1533.3625,-245.5002 1527.3625,-245.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">ncurses@6.4%gcc@9.4.0/j5rupoq</text>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-158.8072C1346.0696,-139.7263 1346.0696,-117.1257 1346.0696,-97.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-96.7403 1346.0696,-86.7403 1342.5697,-96.7404 1349.5697,-96.7403"/>
|
||||
<text text-anchor="middle" x="1292.7436" y="-127.0482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=pkgconfig</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2788.0102,-1270.7555C2780.8234,-1251.412 2772.2926,-1228.4513 2764.7402,-1208.1236"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2789.885,-1270.0589C2782.6982,-1250.7155 2774.1674,-1227.7547 2766.615,-1207.4271"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2768.9358,-1206.4953 2762.1721,-1198.3403 2762.3741,-1208.9332 2768.9358,-1206.4953"/>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2907.2846,-1269.5018C2936.475,-1251.8137 2964.9158,-1228.1116 2981.1904,-1197.9236 2999.477,-1164.2363 3005.2125,-1141.4693 2981.289,-1112.225 2954.5472,-1078.5579 2876.5297,-1053.8974 2789.2983,-1036.3535"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2908.3216,-1271.2119C2937.7554,-1253.3501 2966.1962,-1229.648 2982.9488,-1198.8764 3001.4164,-1164.7249 3007.1519,-1141.9579 2982.8502,-1110.975 2955.15,-1076.6509 2877.1325,-1051.9904 2789.6927,-1034.3928"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2790.125,-1031.93 2779.6364,-1033.4269 2788.7692,-1038.7974 2790.125,-1031.93"/>
|
||||
<text text-anchor="middle" x="2836.0561" y="-1059.5023" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge49" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2883.731,-1270.4691C2909.4451,-1251.9243 2934.9956,-1227.7144 2949.0696,-1198.4 2965.7663,-1163.6227 2975.3506,-1139.841 2949.0696,-1111.6 2925.7161,-1086.5049 1993.0368,-1031.9055 1561.3071,-1007.9103"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.3813,-1004.4092 1551.2026,-1007.3492 1560.9931,-1011.3984 1561.3813,-1004.4092"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge50" class="edge">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2551.6031,-1113.7387C2547.0531,-1112.9948 2542.537,-1112.2802 2538.0696,-1111.6 2198.5338,-1059.8997 1800.8632,-1026.8711 1561.4583,-1009.9443"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.4619,-1006.436 1551.2407,-1009.2249 1560.9702,-1013.4187 1561.4619,-1006.436"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge34" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1865.2226,-1269.4691C1922.6966,-1248.2438 1991.964,-1222.6632 2050.6644,-1200.985"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1865.9154,-1271.3453C1923.3894,-1250.12 1992.6569,-1224.5394 2051.3572,-1202.8612"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2052.5441,-1205.088 2060.7123,-1198.3403 2050.119,-1198.5215 2052.5441,-1205.088"/>
|
||||
<text text-anchor="middle" x="1910.9073" y="-1238.6056" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge52" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1519.9696,-1290.6844C1394.6018,-1273.3057 1237.6631,-1244.7294 1102.7507,-1199.3478 1021.8138,-1171.8729 1008.1992,-1149.8608 932.6248,-1112.4956 887.1715,-1089.9216 836.578,-1065.4054 793.6914,-1044.8018"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1520.2442,-1288.7034C1394.9601,-1271.3381 1238.0214,-1242.7618 1103.3885,-1197.4522 1023.5148,-1170.8208 1009.9002,-1148.8087 933.5144,-1110.7044 888.0436,-1088.1218 837.4502,-1063.6056 794.5574,-1042.999"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="795.6235,-1040.7377 785.0938,-1039.565 792.5939,-1047.0482 795.6235,-1040.7377"/>
|
||||
<text text-anchor="middle" x="1046.8307" y="-1202.5988" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="node21" class="node">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1547.9922,-1198.3002C1547.9922,-1198.3002 1144.147,-1198.3002 1144.147,-1198.3002 1138.147,-1198.3002 1132.147,-1192.3002 1132.147,-1186.3002 1132.147,-1186.3002 1132.147,-1123.6998 1132.147,-1123.6998 1132.147,-1117.6998 1138.147,-1111.6998 1144.147,-1111.6998 1144.147,-1111.6998 1547.9922,-1111.6998 1547.9922,-1111.6998 1553.9922,-1111.6998 1559.9922,-1117.6998 1559.9922,-1123.6998 1559.9922,-1123.6998 1559.9922,-1186.3002 1559.9922,-1186.3002 1559.9922,-1192.3002 1553.9922,-1198.3002 1547.9922,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">arpack-ng@3.8.0%gcc@9.4.0/lfh3aov</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="edge46" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1637.8539,-1271.3373C1584.2332,-1250.1557 1519.6324,-1224.6368 1464.827,-1202.9873"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1638.5887,-1269.4771C1584.968,-1248.2956 1520.3672,-1222.7767 1465.5618,-1201.1272"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1466.3716,-1198.7592 1455.785,-1198.3403 1463.7998,-1205.2696 1466.3716,-1198.7592"/>
|
||||
</g>
|
||||
<!-- 57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="node22" class="node">
|
||||
<title>57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1906.2352,-1198.3002C1906.2352,-1198.3002 1589.904,-1198.3002 1589.904,-1198.3002 1583.904,-1198.3002 1577.904,-1192.3002 1577.904,-1186.3002 1577.904,-1186.3002 1577.904,-1123.6998 1577.904,-1123.6998 1577.904,-1117.6998 1583.904,-1111.6998 1589.904,-1111.6998 1589.904,-1111.6998 1906.2352,-1111.6998 1906.2352,-1111.6998 1912.2352,-1111.6998 1918.2352,-1117.6998 1918.2352,-1123.6998 1918.2352,-1123.6998 1918.2352,-1186.3002 1918.2352,-1186.3002 1918.2352,-1192.3002 1912.2352,-1198.3002 1906.2352,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">sed@4.8%gcc@9.4.0/57joith</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="edge27" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1748.0696,-1270.4072C1748.0696,-1251.3263 1748.0696,-1228.7257 1748.0696,-1208.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1751.5697,-1208.3403 1748.0696,-1198.3403 1744.5697,-1208.3404 1751.5697,-1208.3403"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge24" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1975.9734,-1301.684C2148.2819,-1288.3961 2365.6859,-1259.5384 2428.3689,-1197.6866 2466.9261,-1160.1438 2472.9783,-1095.7153 2471.5152,-1049.9701"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1976.1272,-1303.678C2148.5451,-1290.3788 2365.949,-1261.521 2429.7703,-1199.1134 2468.9173,-1160.3309 2474.9695,-1095.9024 2473.5142,-1049.9065"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2476.0078,-1049.7027 2472.0657,-1039.8686 2469.0147,-1050.0146 2476.0078,-1049.7027"/>
|
||||
<text text-anchor="middle" x="2207.8884" y="-1273.0053" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1520.1614,-1301.6771C1362.9712,-1287.992 1173.582,-1259.0928 1123.0696,-1198.4 1098.3914,-1168.7481 1103.0165,-1144.5563 1123.0696,-1111.6 1140.5998,-1082.79 1167.9002,-1060.8539 1197.4647,-1044.2681"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1199.1408,-1047.3408 1206.2789,-1039.5114 1195.8163,-1041.1806 1199.1408,-1047.3408"/>
|
||||
</g>
|
||||
<!-- ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="node18" class="node">
|
||||
<title>ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M400.2088,-245.5002C400.2088,-245.5002 11.9304,-245.5002 11.9304,-245.5002 5.9304,-245.5002 -.0696,-239.5002 -.0696,-233.5002 -.0696,-233.5002 -.0696,-170.8998 -.0696,-170.8998 -.0696,-164.8998 5.9304,-158.8998 11.9304,-158.8998 11.9304,-158.8998 400.2088,-158.8998 400.2088,-158.8998 406.2088,-158.8998 412.2088,-164.8998 412.2088,-170.8998 412.2088,-170.8998 412.2088,-233.5002 412.2088,-233.5002 412.2088,-239.5002 406.2088,-245.5002 400.2088,-245.5002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">libiconv@1.17%gcc@9.4.0/ogcucq2</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="edge47" class="edge">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M205.0696,-317.6072C205.0696,-298.5263 205.0696,-275.9257 205.0696,-255.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M207.0696,-317.6072C207.0696,-298.5263 207.0696,-275.9257 207.0696,-255.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-255.5403 206.0696,-245.5403 202.5697,-255.5404 209.5697,-255.5403"/>
|
||||
<text text-anchor="middle" x="165.5739" y="-285.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=iconv</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge42" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M672.6614,-1430.2151C600.7916,-1411.3548 534.1254,-1386.9583 512.2667,-1357.7962 489.0909,-1326.029 493.54,-1304.0273 512.1928,-1269.9192 527.5256,-1242.0821 552.3382,-1220.1508 578.9347,-1203.0434"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M673.169,-1428.2806C601.4789,-1409.4766 534.8127,-1385.0802 513.8725,-1356.6038 491.0512,-1326.4254 495.5003,-1304.4237 513.9464,-1270.8808 528.8502,-1243.5806 553.6627,-1221.6493 580.016,-1204.7259"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="581.46,-1206.7724 588.1193,-1198.532 577.7747,-1200.8211 581.46,-1206.7724"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge43" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M680.4783,-1430.2246C600.8632,-1410.3933 522.8724,-1385.2921 493.3877,-1357.9314 411.1392,-1281.1573 374.1678,-1206.1582 435.2305,-1111.0561 454.3431,-1081.6726 482.5021,-1059.8261 513.5088,-1043.3725"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M680.9617,-1428.2839C601.476,-1408.4895 523.4851,-1383.3883 494.7515,-1356.4686 412.9331,-1280.273 375.9616,-1205.2739 436.9087,-1112.1439 455.569,-1083.2528 483.728,-1061.4063 514.4455,-1045.1396"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="515.8631,-1047.2236 523.1893,-1039.5699 512.6893,-1040.9844 515.8631,-1047.2236"/>
|
||||
<text text-anchor="middle" x="453.0969" y="-1356.92" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="edge38" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M857.6892,-1429.8521C840.9235,-1409.9835 820.9375,-1386.2985 803.4466,-1365.5705"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M859.2178,-1428.5623C842.4521,-1408.6937 822.466,-1385.0087 804.9751,-1364.2807"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="806.7654,-1362.5258 797.6414,-1357.1403 801.4156,-1367.0402 806.7654,-1362.5258"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1118.1783,-1450.5735C1412.4221,-1422.447 1902.6188,-1374.0528 1984.8578,-1356.2227 2203.916,-1308.9943 2329.6342,-1377.1305 2461.2658,-1197.8052 2492.3675,-1156.1664 2488.743,-1094.1171 2480.3694,-1050.0521"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1118.3686,-1452.5644C1412.6186,-1424.4374 1902.8153,-1376.0432 1985.2814,-1358.1773 2202.963,-1310.7526 2328.6812,-1378.8889 2462.8734,-1198.9948 2494.3641,-1156.0498 2490.7395,-1094.0005 2482.3343,-1049.6791"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2484.7438,-1048.9818 2479.3189,-1039.8812 2477.8845,-1050.3784 2484.7438,-1048.9818"/>
|
||||
<text text-anchor="middle" x="1820.4407" y="-1379.7188" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge32" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M947.2173,-1428.5496C968.7089,-1408.5917 992.2747,-1383.3345 1008.2117,-1356.6861 1067.0588,-1259.8646 1008.3745,-1197.6371 1084.3226,-1110.9351 1110.3076,-1081.7965 1144.7149,-1059.7578 1180.1804,-1043.0531"/>
|
||||
<path fill="none" stroke="#daa520" stroke-width="2" d="M948.5783,-1430.0151C970.1712,-1409.9561 993.737,-1384.6989 1009.9275,-1357.7139 1068.5139,-1258.4924 1009.8295,-1196.2649 1085.8166,-1112.2649 1111.3864,-1083.4807 1145.7936,-1061.442 1181.0322,-1044.8626"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1182.4567,-1046.9607 1190.1008,-1039.6246 1179.5503,-1040.5926 1182.4567,-1046.9607"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="node17" class="node">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1822.3657,-880.7002C1822.3657,-880.7002 1437.7735,-880.7002 1437.7735,-880.7002 1431.7735,-880.7002 1425.7735,-874.7002 1425.7735,-868.7002 1425.7735,-868.7002 1425.7735,-806.0998 1425.7735,-806.0998 1425.7735,-800.0998 1431.7735,-794.0998 1437.7735,-794.0998 1437.7735,-794.0998 1822.3657,-794.0998 1822.3657,-794.0998 1828.3657,-794.0998 1834.3657,-800.0998 1834.3657,-806.0998 1834.3657,-806.0998 1834.3657,-868.7002 1834.3657,-868.7002 1834.3657,-874.7002 1828.3657,-880.7002 1822.3657,-880.7002"/>
|
||||
<text text-anchor="middle" x="1630.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">openssl@1.1.1s%gcc@9.4.0/5xerf6i</text>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1425.7129,-803.7711C1262.7545,-776.9548 1035.5151,-739.5603 871.9084,-712.6373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="872.1525,-709.1305 861.7169,-710.9602 871.0158,-716.0376 872.1525,-709.1305"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="edge48" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1644.2788,-794.0072C1650.5843,-774.7513 1658.0636,-751.9107 1664.6976,-731.6514"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1668.0917,-732.533 1667.8776,-721.9403 1661.4393,-730.3546 1668.0917,-732.533"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge41" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1834.3289,-793.5645C1906.6817,-774.1673 1975.9199,-749.2273 1998.2925,-721.3707 2031.5218,-680.681 2032.1636,-617.9031 2027.044,-573.3921"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1834.8468,-795.4962C1907.3595,-776.0489 1976.5977,-751.1089 1999.8467,-722.6293 2033.5217,-680.7015 2034.1635,-617.9235 2029.0309,-573.1639"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2031.4885,-572.6712 2026.7474,-563.1964 2024.5451,-573.5598 2031.4885,-572.6712"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="node26" class="node">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1306.1776,-404.3002C1306.1776,-404.3002 929.9616,-404.3002 929.9616,-404.3002 923.9616,-404.3002 917.9616,-398.3002 917.9616,-392.3002 917.9616,-392.3002 917.9616,-329.6998 917.9616,-329.6998 917.9616,-323.6998 923.9616,-317.6998 929.9616,-317.6998 929.9616,-317.6998 1306.1776,-317.6998 1306.1776,-317.6998 1312.1776,-317.6998 1318.1776,-323.6998 1318.1776,-329.6998 1318.1776,-329.6998 1318.1776,-392.3002 1318.1776,-392.3002 1318.1776,-398.3002 1312.1776,-404.3002 1306.1776,-404.3002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">readline@8.2%gcc@9.4.0/v32wejd</text>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1117.0696,-476.4072C1117.0696,-457.3263 1117.0696,-434.7257 1117.0696,-414.6046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1119.0696,-476.4072C1119.0696,-457.3263 1119.0696,-434.7257 1119.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1121.5697,-414.3403 1118.0696,-404.3403 1114.5697,-414.3404 1121.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1167.6711,-1112.5788C1078.9073,-1090.9596 971.5916,-1064.822 881.5513,-1042.892"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1168.1444,-1110.6356C1079.3806,-1089.0165 972.0649,-1062.8788 882.0246,-1040.9488"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="882.5603,-1038.5062 872.016,-1039.5403 880.9038,-1045.3074 882.5603,-1038.5062"/>
|
||||
<text text-anchor="middle" x="963.904" y="-1079.817" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge31" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1559.7922,-1112.1043C1562.8511,-1111.5975 1565.8904,-1111.1002 1568.9103,-1110.6128 1759.2182,-1079.8992 1973.2397,-1052.1328 2144.6143,-1031.5343"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1560.1191,-1114.0774C1563.1741,-1113.5712 1566.2134,-1113.0739 1569.2289,-1112.5872 1759.4755,-1081.8826 1973.497,-1054.1161 2144.8529,-1033.52"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2145.1529,-1036.002 2154.6648,-1031.3357 2144.3191,-1029.0518 2145.1529,-1036.002"/>
|
||||
<text text-anchor="middle" x="1828.178" y="-1072.4692" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-1111.6072C1346.0696,-1092.5263 1346.0696,-1069.9257 1346.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-1049.5403 1346.0696,-1039.5403 1342.5697,-1049.5404 1349.5697,-1049.5403"/>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="edge30" class="edge">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2467.0696,-952.8072C2467.0696,-933.7263 2467.0696,-911.1257 2467.0696,-891.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2470.5697,-890.7403 2467.0696,-880.7403 2463.5697,-890.7404 2470.5697,-890.7403"/>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1422.351,-1429.2133C1312.2528,-1388.8872 1171.1589,-1316.8265 1103.0696,-1198.4 1083.8409,-1164.956 1082.4563,-1144.2088 1103.0696,-1111.6 1121.4102,-1082.5864 1149.2483,-1060.7204 1179.6189,-1044.2895"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1181.4205,-1047.2977 1188.6801,-1039.5809 1178.1927,-1041.0863 1181.4205,-1047.2977"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge39" class="edge">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1179.8001,-316.7866C1209.2065,-296.3053 1244.4355,-271.7686 1274.8343,-250.5961"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1180.9431,-318.4278C1210.3495,-297.9465 1245.5785,-273.4098 1275.9774,-252.2373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1277.6375,-254.1277 1283.8429,-245.5403 1273.6367,-248.3836 1277.6375,-254.1277"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1345.0696,-952.7909C1345.0696,-891.6316 1345.0696,-776.6094 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-519.8 1345.0696,-426.9591 1345.0696,-318.8523 1345.0696,-255.7237"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1347.0696,-952.7909C1347.0696,-891.6316 1347.0696,-776.6094 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-519.8 1347.0696,-426.9591 1347.0696,-318.8523 1347.0696,-255.7237"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-255.6091 1346.0696,-245.6091 1342.5697,-255.6092 1349.5697,-255.6091"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="edge40" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1423.1858,-951.9344C1460.2844,-931.1905 1504.8229,-906.2866 1543.0151,-884.9312"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1424.1619,-953.68C1461.2605,-932.9361 1505.799,-908.0322 1543.9912,-886.6769"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1545.5391,-888.6757 1552.5592,-880.7403 1542.1228,-882.5659 1545.5391,-888.6757"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 58 KiB |
@@ -519,11 +519,11 @@ inspections and customize them per-module-set.
|
||||
|
||||
modules:
|
||||
prefix_inspections:
|
||||
./bin:
|
||||
bin:
|
||||
- PATH
|
||||
./man:
|
||||
man:
|
||||
- MANPATH
|
||||
./:
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
Prefix inspections are only applied if the relative path inside the
|
||||
@@ -579,7 +579,7 @@ the view.
|
||||
view_relative_modules:
|
||||
use_view: my_view
|
||||
prefix_inspections:
|
||||
./bin:
|
||||
bin:
|
||||
- PATH
|
||||
view:
|
||||
my_view:
|
||||
|
||||
@@ -2352,7 +2352,7 @@ the following at the command line of a bash shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2688,6 +2688,60 @@ appear in the package file (or in this case, in the list).
|
||||
right version. If two packages depend on ``binutils`` patched *the
|
||||
same* way, they can both use a single installation of ``binutils``.
|
||||
|
||||
.. _setup-dependent-environment:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Influence how dependents are built or run
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages
|
||||
that depend on a particular Qt installation will find it.
|
||||
Another good example of how a dependency can influence
|
||||
the build environment of dependents is the Python package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
In the method above it is ensured that any package that depends on Python
|
||||
will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows most python packages to have a very simple install procedure,
|
||||
like the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
setup_py("install", "--prefix={0}".format(prefix))
|
||||
|
||||
Finally the Python package takes also care of the modifications to ``PYTHONPATH``
|
||||
to allow dependencies to run correctly:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_run_environment
|
||||
:linenos:
|
||||
|
||||
|
||||
.. _packaging_conflicts:
|
||||
|
||||
@@ -2832,70 +2886,6 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
extends("python", when="+python")
|
||||
...
|
||||
|
||||
.. _setup-environment:
|
||||
|
||||
--------------------------------------------
|
||||
Runtime and build time environment variables
|
||||
--------------------------------------------
|
||||
|
||||
Spack provides a few methods to help package authors set up the required environment variables for
|
||||
their package. Environment variables typically depend on how the package is used: variables that
|
||||
make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
|
||||
installation will find it.
|
||||
|
||||
The following diagram will give you an idea when each of these methods is called in a build
|
||||
context:
|
||||
|
||||
.. image:: images/setup_env.png
|
||||
:align: center
|
||||
|
||||
Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
|
||||
dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
|
||||
This means that the former should only be used if the environment variables depend on the dependent
|
||||
package, whereas the latter should be used if the environment variables depend only on the package
|
||||
itself.
|
||||
|
||||
--------------------------------
|
||||
Setting package module variables
|
||||
--------------------------------
|
||||
|
||||
Apart from modifying environment variables of the dependent package, you can also define Python
|
||||
variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows Python packages to directly use these variables:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
install("script.py", python_platlib)
|
||||
|
||||
.. note::
|
||||
|
||||
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
|
||||
global variables are coming from when editing a ``package.py`` file.
|
||||
|
||||
-----
|
||||
Views
|
||||
-----
|
||||
@@ -2974,33 +2964,6 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
|
||||
can be used to satisfy the dependency of any package that
|
||||
``depends_on("mpi")``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Providing multiple virtuals simultaneously
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
|
||||
there are subsets of those virtuals that need to be provided together by the same package.
|
||||
|
||||
A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
|
||||
library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
|
||||
``openblas`` at all. It cannot pick one or the other.
|
||||
|
||||
To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
provides('blas', 'lapack')
|
||||
|
||||
This makes it impossible to select ``openblas`` as a provider for one of the two
|
||||
virtual dependencies and not for the other. If you try to, Spack will report an error:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
|
||||
==> Error: concretization failed for the following reasons:
|
||||
|
||||
1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Versioned Interfaces
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3503,56 +3466,6 @@ is equivalent to:
|
||||
Constraints from nested context managers are also combined together, but they are rarely
|
||||
needed or recommended.
|
||||
|
||||
.. _default_args:
|
||||
|
||||
------------------------
|
||||
Common default arguments
|
||||
------------------------
|
||||
|
||||
Similarly, if directives have a common set of default arguments, you can
|
||||
group them together in a ``with default_args()`` block:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class PyExample(PythonPackage):
|
||||
|
||||
with default_args(type=("build", "run")):
|
||||
depends_on("py-foo")
|
||||
depends_on("py-foo@2:", when="@2:")
|
||||
depends_on("py-bar")
|
||||
depends_on("py-bz")
|
||||
|
||||
The above is short for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class PyExample(PythonPackage):
|
||||
|
||||
depends_on("py-foo", type=("build", "run"))
|
||||
depends_on("py-foo@2:", when="@2:", type=("build", "run"))
|
||||
depends_on("py-bar", type=("build", "run"))
|
||||
depends_on("py-bz", type=("build", "run"))
|
||||
|
||||
.. note::
|
||||
|
||||
The ``with when()`` context manager is composable, while ``with default_args()``
|
||||
merely overrides the default. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with default_args(when="+feature"):
|
||||
depends_on("foo")
|
||||
depends_on("bar")
|
||||
depends_on("baz", when="+baz")
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("foo", when="+feature")
|
||||
depends_on("bar", when="+feature")
|
||||
depends_on("baz", when="+baz") # Note: not when="+feature+baz"
|
||||
|
||||
.. _install-method:
|
||||
|
||||
------------------
|
||||
@@ -3852,7 +3765,7 @@ Similarly, ``spack install example +feature build_system=autotools`` will pick
|
||||
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
|
||||
|
||||
Dependencies are always specified in the package class. When some dependencies
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
usual:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -3870,7 +3783,7 @@ usual:
|
||||
depends_on("cmake@3.18:", when="@2.0:", type="build")
|
||||
depends_on("cmake@3:", type="build")
|
||||
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
with when("build_system=autotools"):
|
||||
depends_on("perl", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
@@ -6918,58 +6831,25 @@ the adapter role is to "emulate" a method resolution order like the one represen
|
||||
Specifying License Information
|
||||
------------------------------
|
||||
|
||||
Most of the software in Spack is open source, and most open source software is released
|
||||
under one or more `common open source licenses <https://opensource.org/licenses/>`_.
|
||||
Specifying the license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the `SPDX identifier
|
||||
<https://spdx.org/licenses/>`_ for a project and then add it using the license
|
||||
directive:
|
||||
A significant portion of software that Spack packages is open source. Most open
|
||||
source software is released under one or more common open source licenses.
|
||||
Specifying the specific license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the SPDX identifier for
|
||||
a project and then add it using the license directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<SPDX Identifier HERE>")
|
||||
|
||||
For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
|
||||
so you'd write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
|
||||
<https://spdx.github.io/spdx-spec/v2-draft/SPDX-license-expressions/>`_ with both of its
|
||||
licenses:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0 OR MIT")
|
||||
|
||||
Note that specifying a license without a when clause makes it apply to all
|
||||
versions and variants of the package, which might not actually be the case.
|
||||
For example, a project might have switched licenses at some point or have
|
||||
certain build configurations that include files that are licensed differently.
|
||||
Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
|
||||
in version ``0.12`` in 2018.
|
||||
|
||||
You can specify when a ``license()`` directive applies using with a ``when=``
|
||||
clause, just like other directives. For example, to specify that a specific
|
||||
license identifier should only apply to versions up to ``0.11``, but another
|
||||
license should apply for later versions, you could write:
|
||||
To account for this, you can specify when licenses should be applied. For
|
||||
example, to specify that a specific license identifier should only apply
|
||||
to versionup to and including 1.5, you could write the following directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("LGPL-2.1", when="@:0.11")
|
||||
license("Apache-2.0 OR MIT", when="@0.12:")
|
||||
license("...", when="@:1.5")
|
||||
|
||||
Note that unlike for most other directives, the ``when=`` constraints in the
|
||||
``license()`` directive can't intersect. Spack needs to be able to resolve
|
||||
exactly one license identifier expression for any given version. To specify
|
||||
*multiple* licenses, use SPDX expressions and operators as above. The operators
|
||||
you probably care most about are:
|
||||
|
||||
* ``OR``: user chooses one license to adhere to; and
|
||||
* ``AND``: user has to adhere to all the licenses.
|
||||
|
||||
You may also care about `license exceptions
|
||||
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
|
||||
e.g. ``Apache-2.0 WITH LLVM-exception``.
|
||||
|
||||
@@ -6,8 +6,8 @@ python-levenshtein==0.23.0
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.7
|
||||
pytest==7.4.3
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.10.1
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.6.1
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
|
||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.2"
|
||||
__version__ = "0.2.1"
|
||||
|
||||
@@ -2318,26 +2318,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"power10": {
|
||||
"from": ["power9"],
|
||||
"vendor": "IBM",
|
||||
"generation": 10,
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.1:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64le": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2425,29 +2405,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"power10le": {
|
||||
"from": ["power9le"],
|
||||
"vendor": "IBM",
|
||||
"generation": 10,
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"name": "power10",
|
||||
"versions": "11.1:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"family": "ppc64le",
|
||||
"name": "power10",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"aarch64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2635,37 +2592,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv9.0a": {
|
||||
"from": ["armv8.5a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "12:",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "14:",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"thunderx2": {
|
||||
"from": ["armv8.1a"],
|
||||
"vendor": "Cavium",
|
||||
@@ -2887,12 +2813,8 @@
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:21.9",
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
@@ -3020,7 +2942,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
@@ -3032,126 +2954,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"bti"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-v2",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
"vendor": "Apple",
|
||||
|
||||
@@ -211,7 +211,6 @@ def info(message, *args, **kwargs):
|
||||
stream.write(line + "\n")
|
||||
else:
|
||||
stream.write(indent + _output_filter(str(arg)) + "\n")
|
||||
stream.flush()
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
|
||||
@@ -5,13 +5,11 @@
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
@@ -25,7 +23,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from typing import Dict, List, NamedTuple, Optional, Tuple, Union
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -33,7 +31,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
@@ -41,9 +38,6 @@
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
@@ -53,7 +47,6 @@
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.timer as timer
|
||||
@@ -131,25 +124,25 @@ class BinaryCacheIndex:
|
||||
mean we should have paid the price to update the cache earlier?
|
||||
"""
|
||||
|
||||
def __init__(self, cache_root: Optional[str] = None):
|
||||
self._index_cache_root: str = cache_root or binary_index_location()
|
||||
def __init__(self, cache_root):
|
||||
self._index_cache_root = cache_root
|
||||
|
||||
# the key associated with the serialized _local_index_cache
|
||||
self._index_contents_key = "contents.json"
|
||||
|
||||
# a FileCache instance storing copies of remote binary cache indices
|
||||
self._index_file_cache: Optional[file_cache.FileCache] = None
|
||||
self._index_file_cache = None
|
||||
|
||||
# stores a map of mirror URL to index hash and cache key (index path)
|
||||
self._local_index_cache: Optional[dict] = None
|
||||
self._local_index_cache = None
|
||||
|
||||
# hashes of remote indices already ingested into the concrete spec
|
||||
# cache (_mirrors_for_spec)
|
||||
self._specs_already_associated: Set[str] = set()
|
||||
self._specs_already_associated = set()
|
||||
|
||||
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
|
||||
# whether the fetch succeeded or not.
|
||||
self._last_fetch_times: Dict[str, float] = {}
|
||||
self._last_fetch_times = {}
|
||||
|
||||
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
|
||||
# entries indicating mirrors where that concrete spec can be found.
|
||||
@@ -159,7 +152,7 @@ def __init__(self, cache_root: Optional[str] = None):
|
||||
# - the concrete spec itself, keyed by ``spec`` (including the
|
||||
# full hash, since the dag hash may match but we want to
|
||||
# use the updated source if available)
|
||||
self._mirrors_for_spec: Dict[str, dict] = {}
|
||||
self._mirrors_for_spec = {}
|
||||
|
||||
def _init_local_index_cache(self):
|
||||
if not self._index_file_cache:
|
||||
@@ -478,18 +471,14 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
FetchIndexError
|
||||
"""
|
||||
# TODO: get rid of this request, handle 404 better
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
if not web_util.url_exists(
|
||||
url_util.join(mirror_url, _build_cache_relative_path, "index.json")
|
||||
):
|
||||
return False
|
||||
|
||||
if scheme == "oci":
|
||||
# TODO: Actually etag and OCI are not mutually exclusive...
|
||||
fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None))
|
||||
elif cache_entry.get("etag"):
|
||||
fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"])
|
||||
etag = cache_entry.get("etag", None)
|
||||
if etag:
|
||||
fetcher = EtagIndexFetcher(mirror_url, etag)
|
||||
else:
|
||||
fetcher = DefaultIndexFetcher(
|
||||
mirror_url, local_hash=cache_entry.get("index_hash", None)
|
||||
@@ -530,8 +519,15 @@ def binary_index_location():
|
||||
return spack.util.path.canonicalize_path(cache_root)
|
||||
|
||||
|
||||
#: Default binary cache index instance
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
def _binary_index():
|
||||
"""Get the singleton store instance."""
|
||||
return BinaryCacheIndex(binary_index_location())
|
||||
|
||||
|
||||
#: Singleton binary_index instance
|
||||
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
_binary_index
|
||||
)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
@@ -626,14 +622,21 @@ def build_cache_prefix(prefix):
|
||||
|
||||
|
||||
def buildinfo_file_name(prefix):
|
||||
"""Filename of the binary package meta-data file"""
|
||||
return os.path.join(prefix, ".spack", "binary_distribution")
|
||||
"""
|
||||
Filename of the binary package meta-data file
|
||||
"""
|
||||
return os.path.join(prefix, ".spack/binary_distribution")
|
||||
|
||||
|
||||
def read_buildinfo_file(prefix):
|
||||
"""Read buildinfo file"""
|
||||
with open(buildinfo_file_name(prefix), "r") as f:
|
||||
return syaml.load(f)
|
||||
"""
|
||||
Read buildinfo file
|
||||
"""
|
||||
filename = buildinfo_file_name(prefix)
|
||||
with open(filename, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
buildinfo = syaml.load(content)
|
||||
return buildinfo
|
||||
|
||||
|
||||
class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
@@ -816,6 +819,18 @@ def tarball_path_name(spec, ext):
|
||||
return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
|
||||
|
||||
|
||||
def checksum_tarball(file):
|
||||
# calculate sha256 hash of tar file
|
||||
block_size = 65536
|
||||
hasher = hashlib.sha256()
|
||||
with open(file, "rb") as tfile:
|
||||
buf = tfile.read(block_size)
|
||||
while len(buf) > 0:
|
||||
hasher.update(buf)
|
||||
buf = tfile.read(block_size)
|
||||
return hasher.hexdigest()
|
||||
|
||||
|
||||
def select_signing_key(key=None):
|
||||
if key is None:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
@@ -1132,17 +1147,14 @@ def gzip_compressed_tarfile(path):
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
|
||||
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
|
||||
name="", mode="w", fileobj=outer_checksum
|
||||
) as tar:
|
||||
yield tar, inner_checksum, outer_checksum
|
||||
with open(path, "wb") as fileobj, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
|
||||
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
|
||||
yield tar
|
||||
|
||||
|
||||
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
|
||||
"""Compute tarfile entry name as the relative path from the (system) root."""
|
||||
return _path(*_path(absolute_path).parts[1:]).as_posix()
|
||||
def _tarinfo_name(p: str):
|
||||
return p.lstrip("/")
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
@@ -1222,88 +1234,8 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
|
||||
myfileobj = None
|
||||
|
||||
def __init__(self, fileobj, algorithm=hashlib.sha256):
|
||||
self.fileobj = fileobj
|
||||
self.hasher = algorithm()
|
||||
self.length = 0
|
||||
|
||||
def hexdigest(self):
|
||||
return self.hasher.hexdigest()
|
||||
|
||||
def write(self, data):
|
||||
if isinstance(data, (bytes, bytearray)):
|
||||
length = len(data)
|
||||
else:
|
||||
data = memoryview(data)
|
||||
length = data.nbytes
|
||||
|
||||
if length > 0:
|
||||
self.fileobj.write(data)
|
||||
self.hasher.update(data)
|
||||
|
||||
self.length += length
|
||||
|
||||
return length
|
||||
|
||||
def read(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read() on write-only object")
|
||||
|
||||
def read1(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read1() on write-only object")
|
||||
|
||||
def peek(self, n):
|
||||
raise OSError(errno.EBADF, "peek() on write-only object")
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self.fileobj is None
|
||||
|
||||
def close(self):
|
||||
fileobj = self.fileobj
|
||||
if fileobj is None:
|
||||
return
|
||||
self.fileobj.close()
|
||||
self.fileobj = None
|
||||
|
||||
def flush(self):
|
||||
self.fileobj.flush()
|
||||
|
||||
def fileno(self):
|
||||
return self.fileobj.fileno()
|
||||
|
||||
def rewind(self):
|
||||
raise OSError("Can't rewind while computing checksum")
|
||||
|
||||
def readable(self):
|
||||
return False
|
||||
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def tell(self):
|
||||
return self.fileobj.tell()
|
||||
|
||||
def seek(self, offset, whence=io.SEEK_SET):
|
||||
# In principle forward seek is possible with b"0" padding,
|
||||
# but this is not implemented.
|
||||
if offset == 0 and whence == io.SEEK_CUR:
|
||||
return
|
||||
raise OSError("Can't seek while computing checksum")
|
||||
|
||||
def readline(self, size=-1):
|
||||
raise OSError(errno.EBADF, "readline() on write-only object")
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
|
||||
@@ -1315,8 +1247,6 @@ def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
tarinfo.mode = 0o644
|
||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
||||
|
||||
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
|
||||
|
||||
|
||||
class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
@@ -1392,9 +1322,13 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
|
||||
_do_create_tarball(tarfile_path, binaries_dir, buildinfo)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# add sha256 checksum to spec.json
|
||||
|
||||
with open(spec_file, "r") as inputfile:
|
||||
content = inputfile.read()
|
||||
if spec_file.endswith(".json"):
|
||||
@@ -1437,21 +1371,10 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs),
|
||||
)
|
||||
|
||||
|
||||
def specs_to_be_packaged(
|
||||
specs: List[Spec], root: bool = True, dependencies: bool = True
|
||||
) -> List[Spec]:
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
Raises NotInstalledError if a spec is not installed but picked to be packaged.
|
||||
|
||||
Args:
|
||||
specs: list of root specs to be processed
|
||||
@@ -1459,35 +1382,19 @@ def specs_to_be_packaged(
|
||||
dependencies: include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
|
||||
if not root and not dependencies:
|
||||
return []
|
||||
elif dependencies:
|
||||
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
|
||||
else:
|
||||
nodes = set(specs)
|
||||
|
||||
# Filter packageable roots
|
||||
# Limit to installed non-externals.
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
|
||||
# Mass install check
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if root:
|
||||
# Error on uninstalled roots, when roots are requested
|
||||
uninstalled_roots = list(s for s in specs if not s.installed)
|
||||
if uninstalled_roots:
|
||||
raise NotInstalledError(uninstalled_roots)
|
||||
roots = specs
|
||||
else:
|
||||
roots = []
|
||||
|
||||
if dependencies:
|
||||
# Error on uninstalled deps, when deps are requested
|
||||
deps = list(
|
||||
traverse.traverse_nodes(
|
||||
specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash
|
||||
)
|
||||
)
|
||||
uninstalled_deps = list(s for s in deps if not s.installed)
|
||||
if uninstalled_deps:
|
||||
raise NotInstalledError(uninstalled_deps)
|
||||
else:
|
||||
deps = []
|
||||
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
return list(filter(packageable, nodes))
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
@@ -1595,6 +1502,8 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
tarball = tarball_path_name(spec, ".spack")
|
||||
specfile_prefix = tarball_name(spec, ".spec")
|
||||
|
||||
mirrors_to_try = []
|
||||
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
@@ -1607,116 +1516,63 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
|
||||
mirrors = try_first + try_next
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append(
|
||||
{
|
||||
"specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
|
||||
"spackfile": url_util.join(url, _build_cache_relative_path, tarball),
|
||||
}
|
||||
)
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
for ext in ["json.sig", "json"]:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||
spackfile_url = mirror_to_try["spackfile"]
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
if ext.endswith(".sig") and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
response = spack.oci.opener.urlopen(
|
||||
urllib.request.Request(
|
||||
url=ref.manifest_url(),
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
if unsigned or signature_verified or not ext.endswith(".sig"):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
}
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
with spack.oci.oci.make_stage(
|
||||
ref.blob_url(spec_digest), spec_digest, keep=True
|
||||
) as local_specfile_stage:
|
||||
try:
|
||||
local_specfile_stage.fetch()
|
||||
local_specfile_stage.check()
|
||||
except Exception:
|
||||
continue
|
||||
local_specfile_stage.cache_local()
|
||||
|
||||
with spack.oci.oci.make_stage(
|
||||
ref.blob_url(tarball_digest), tarball_digest, keep=True
|
||||
) as tarball_stage:
|
||||
try:
|
||||
tarball_stage.fetch()
|
||||
tarball_stage.check()
|
||||
except Exception:
|
||||
continue
|
||||
tarball_stage.cache_local()
|
||||
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if try_signed and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
@@ -1949,7 +1805,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = remote_checksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -2010,7 +1866,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
spec_dict = sjson.load(content)
|
||||
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified = download_result["signature_verified"]
|
||||
tmpdir = None
|
||||
@@ -2043,7 +1898,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = bchecksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
@@ -2249,7 +2104,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# The index may be out-of-date. If we aren't only considering indices, try
|
||||
# to fetch directly since we know where the file should be.
|
||||
@@ -2258,7 +2113,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
# We found a spec by the direct fetch approach, we might as well
|
||||
# add it to our mapping.
|
||||
if results:
|
||||
BINARY_INDEX.update_spec(spec, results)
|
||||
binary_index.update_spec(spec, results)
|
||||
|
||||
return results
|
||||
|
||||
@@ -2274,12 +2129,12 @@ def update_cache_and_get_specs():
|
||||
Throws:
|
||||
FetchCacheError
|
||||
"""
|
||||
BINARY_INDEX.update()
|
||||
return BINARY_INDEX.get_all_built_specs()
|
||||
binary_index.update()
|
||||
return binary_index.get_all_built_specs()
|
||||
|
||||
|
||||
def clear_spec_cache():
|
||||
BINARY_INDEX.clear()
|
||||
binary_index.clear()
|
||||
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
@@ -2602,7 +2457,7 @@ def get_remote_hash(self):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
def conditional_fetch(self):
|
||||
# Do an intermediate fetch for the hash
|
||||
# and a conditional fetch for the contents
|
||||
|
||||
@@ -2616,12 +2471,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
return FetchCacheError("Remote index {} is invalid".format(url_index), e)
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -2653,7 +2508,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
self.etag = etag
|
||||
self.urlopen = urlopen
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
def conditional_fetch(self):
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, _build_cache_relative_path, "index.json")
|
||||
headers = {
|
||||
@@ -2684,59 +2539,3 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
|
||||
|
||||
class OCIIndexFetcher:
|
||||
def __init__(self, url: str, local_hash, urlopen=None) -> None:
|
||||
self.local_hash = local_hash
|
||||
|
||||
# Remove oci:// prefix
|
||||
assert url.startswith("oci://")
|
||||
self.ref = spack.oci.image.ImageReference.from_string(url[6:])
|
||||
self.urlopen = urlopen or spack.oci.opener.urlopen
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
"""Download an index from an OCI registry type mirror."""
|
||||
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=url_manifest,
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError(
|
||||
"Could not fetch manifest from {}".format(url_manifest), e
|
||||
) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Get first blob hash, which should be the index.json
|
||||
try:
|
||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||
except Exception as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
||||
|
||||
# Fresh?
|
||||
if index_digest.digest == self.local_hash:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
@@ -143,9 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@@ -214,7 +214,7 @@ def _install_and_test(
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
@@ -291,10 +291,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
|
||||
@@ -752,13 +752,19 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == Context.TEST:
|
||||
if context == Context.BUILD:
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
|
||||
if (not dirty) and (not env_mods.is_unset("CPATH")):
|
||||
tty.debug(
|
||||
"A dependency has updated CPATH, this may lead pkg-"
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == Context.TEST:
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||
tty.debug(
|
||||
"A dependency has updated CPATH, this may lead pkg-config to assume that the package "
|
||||
"is part of the system includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
|
||||
# First apply the clean environment changes
|
||||
env_base.apply_modifications()
|
||||
@@ -947,11 +953,8 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
|
||||
reversed(specs_with_type), lambda t: t[0].external
|
||||
)
|
||||
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_run_env = (
|
||||
UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
|
||||
)
|
||||
self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
|
||||
self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0)
|
||||
|
||||
if context == Context.RUN or context == Context.TEST:
|
||||
self.should_be_runnable |= UseMode.ROOT
|
||||
@@ -991,9 +994,8 @@ def get_env_modifications(self) -> EnvironmentModifications:
|
||||
- Updating PATH for packages that are required at runtime
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies (when context=build)
|
||||
- Running custom package environment modifications: setup_run_environment,
|
||||
setup_dependent_run_environment, setup_build_environment,
|
||||
setup_dependent_build_environment.
|
||||
- Running custom package environment modifications (setup_run_environment,
|
||||
setup_dependent_build_environment, setup_dependent_run_environment)
|
||||
|
||||
The (partial) order imposed on the specs is externals first, then topological
|
||||
from leaf to root. That way externals cannot contribute search paths that would shadow
|
||||
@@ -1006,27 +1008,19 @@ def get_env_modifications(self) -> EnvironmentModifications:
|
||||
if self.should_setup_dependent_build_env & flag:
|
||||
self._make_buildtime_detectable(dspec, env)
|
||||
|
||||
for root in self.specs: # there is only one root in build context
|
||||
spack.builder.create(pkg).setup_dependent_build_environment(env, root)
|
||||
|
||||
if self.should_setup_build_env & flag:
|
||||
spack.builder.create(pkg).setup_build_environment(env)
|
||||
for spec in self.specs:
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
|
||||
if self.should_be_runnable & flag:
|
||||
self._make_runnable(dspec, env)
|
||||
|
||||
if self.should_setup_run_env & flag:
|
||||
run_env_mods = EnvironmentModifications()
|
||||
for spec in dspec.dependents(deptype=dt.LINK | dt.RUN):
|
||||
# TODO: remove setup_dependent_run_environment...
|
||||
for spec in dspec.dependents(deptype=dt.RUN):
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(run_env_mods, spec)
|
||||
pkg.setup_run_environment(run_env_mods)
|
||||
if self.context == Context.BUILD:
|
||||
# Don't let the runtime environment of comiler like dependencies leak into the
|
||||
# build env
|
||||
run_env_mods.drop("CC", "CXX", "F77", "FC")
|
||||
env.extend(run_env_mods)
|
||||
|
||||
pkg.setup_dependent_run_environment(env, spec)
|
||||
pkg.setup_run_environment(env)
|
||||
return env
|
||||
|
||||
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
from typing import Optional
|
||||
|
||||
import archspec
|
||||
@@ -23,29 +24,14 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary):
|
||||
"""Iterable that yields KEY=VALUE paths through a dictionary.
|
||||
Args:
|
||||
dictionary: Possibly nested dictionary of arbitrary keys and values.
|
||||
Yields:
|
||||
A single path through the dictionary.
|
||||
"""
|
||||
for key, item in dictionary.items():
|
||||
if isinstance(item, dict):
|
||||
# Recursive case
|
||||
for value in _flatten_dict(item):
|
||||
yield f"{key}={value}"
|
||||
else:
|
||||
# Base case
|
||||
yield f"{key}={item}"
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@@ -367,6 +353,51 @@ def libs(self):
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
||||
|
||||
def fixup_shebangs(path: str, old_interpreter: bytes, new_interpreter: bytes):
|
||||
# Recurse into the install prefix and fixup shebangs
|
||||
exe = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
dirs = [path]
|
||||
hardlinks = set()
|
||||
|
||||
while dirs:
|
||||
with os.scandir(dirs.pop()) as entries:
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
# Only consider files, not symlinks
|
||||
if not entry.is_file(follow_symlinks=False):
|
||||
continue
|
||||
|
||||
lstat = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip over files that are not executable
|
||||
if not (lstat.st_mode & exe):
|
||||
continue
|
||||
|
||||
# Don't modify hardlinks more than once
|
||||
if lstat.st_nlink > 1:
|
||||
key = (lstat.st_ino, lstat.st_dev)
|
||||
if key in hardlinks:
|
||||
continue
|
||||
hardlinks.add(key)
|
||||
|
||||
# Finally replace shebangs if any.
|
||||
with open(entry.path, "rb+") as f:
|
||||
contents = f.read(2)
|
||||
if contents != b"#!":
|
||||
continue
|
||||
contents += f.read()
|
||||
|
||||
if old_interpreter not in contents:
|
||||
continue
|
||||
|
||||
f.seek(0)
|
||||
f.write(contents.replace(old_interpreter, new_interpreter))
|
||||
f.truncate()
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
phases = ("install",)
|
||||
@@ -378,7 +409,7 @@ class PythonPipBuilder(BaseBuilder):
|
||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||
legacy_attributes = ("build_directory", "install_time_test_callbacks")
|
||||
|
||||
#: Callback names for install-time test
|
||||
install_time_test_callbacks = ["test"]
|
||||
@@ -423,15 +454,14 @@ def build_directory(self):
|
||||
def config_settings(self, spec, prefix):
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer for keys that appear only a single time,
|
||||
or pip 23.1 or newer if the same key appears multiple times.
|
||||
Requires pip 22.1 or newer.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
Returns:
|
||||
dict: Possibly nested dictionary of KEY, VALUE settings
|
||||
dict: dictionary of KEY, VALUE settings
|
||||
"""
|
||||
return {}
|
||||
|
||||
@@ -464,32 +494,84 @@ def global_options(self, spec, prefix):
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def _build_venv_path(self):
|
||||
"""Return the path to the virtual environment used for building when
|
||||
python is external."""
|
||||
return os.path.join(self.spec.package.stage.path, "build_env")
|
||||
|
||||
@property
|
||||
def _build_venv_python(self) -> Executable:
|
||||
"""Return the Python executable in the build virtual environment when
|
||||
python is external."""
|
||||
return Executable(os.path.join(self._build_venv_path, "bin", "python"))
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install everything from build directory."""
|
||||
python: Executable = spec["python"].command
|
||||
# Since we invoke pip with --no-build-isolation, we have to make sure that pip cannot
|
||||
# execute hooks from user and system site-packages.
|
||||
if spec["python"].external:
|
||||
# There are no environment variables to disable the system site-packages, so we use a
|
||||
# virtual environment instead. The downside of this approach is that pip produces
|
||||
# incorrect shebangs that refer to the virtual environment, which we have to fix up.
|
||||
python("-m", "venv", "--without-pip", self._build_venv_path)
|
||||
pip = self._build_venv_python
|
||||
else:
|
||||
# For a Spack managed Python, system site-packages is empty/unused by design, so it
|
||||
# suffices to disable user site-packages, for which there is an environment variable.
|
||||
pip = python
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
||||
|
||||
for key, value in self.config_settings(spec, prefix).items():
|
||||
if spec["py-pip"].version < Version("22.1"):
|
||||
raise SpecError(
|
||||
"'{}' package uses 'config_settings' which is only supported by "
|
||||
"pip 22.1+. Add the following line to the package to fix this:\n\n"
|
||||
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
|
||||
)
|
||||
|
||||
args.append("--config-settings={}={}".format(key, value))
|
||||
|
||||
for setting in _flatten_dict(self.config_settings(spec, prefix)):
|
||||
args.append(f"--config-settings={setting}")
|
||||
for option in self.install_options(spec, prefix):
|
||||
args.append(f"--install-option={option}")
|
||||
args.append("--install-option=" + option)
|
||||
for option in self.global_options(spec, prefix):
|
||||
args.append(f"--global-option={option}")
|
||||
args.append("--global-option=" + option)
|
||||
|
||||
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
|
||||
args.append(pkg.stage.archive_file)
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = spec["python"].command
|
||||
# Hide user packages, since we don't have build isolation. This is
|
||||
# necessary because pip / setuptools may run hooks from arbitrary
|
||||
# packages during the build. There is no equivalent variable to hide
|
||||
# system packages, so this is not reliable for external Python.
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def fixup_shebangs_pointing_to_build(self):
|
||||
"""When installing a package using an external python, we use a temporary virtual
|
||||
environment which improves build isolation. The downside is that pip produces shebangs
|
||||
that point to the temporary virtual environment. This method fixes them up to point to the
|
||||
underlying Python."""
|
||||
# No need to fixup shebangs if no build venv was used. (this post install function also
|
||||
# runs when install was overridden in another package, so check existence of the venv path)
|
||||
if not os.path.exists(self._build_venv_path):
|
||||
return
|
||||
|
||||
# Use sys.executable, since that's what pip uses.
|
||||
interpreter = (
|
||||
lambda python: python("-c", "import sys; print(sys.executable)", output=str)
|
||||
.strip()
|
||||
.encode("utf-8")
|
||||
)
|
||||
|
||||
fixup_shebangs(
|
||||
path=self.spec.prefix,
|
||||
old_interpreter=interpreter(self._build_venv_python),
|
||||
new_interpreter=interpreter(self.spec["python"].command),
|
||||
)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@@ -25,7 +25,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -46,22 +45,7 @@
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
# "always",
|
||||
"unknown_failure",
|
||||
"script_failure",
|
||||
"api_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"runner_system_failure",
|
||||
"runner_unsupported",
|
||||
"stale_schedule",
|
||||
# "job_execution_timeout",
|
||||
"archived_failure",
|
||||
"unmet_prerequisites",
|
||||
"scheduler_failure",
|
||||
"data_integrity_failure",
|
||||
]
|
||||
JOB_RETRY_CONDITIONS = ["always"]
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -113,6 +97,15 @@ def _remove_reserved_tags(tags):
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||
|
||||
if spec.architecture:
|
||||
format_elements.append(" {arch=architecture}")
|
||||
|
||||
return spec.format("".join(format_elements))
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
||||
|
||||
@@ -217,22 +210,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
|
||||
tty.msg("Staging summary ([x] means a job needs rebuilding):")
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(f" stage {stage_index} ({len(stage)} jobs):")
|
||||
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)):
|
||||
for job in sorted(stage):
|
||||
s = spec_labels[job]
|
||||
rebuild = rebuild_decisions[job].rebuild
|
||||
reason = rebuild_decisions[job].reason
|
||||
reason_msg = f" ({reason})" if reason else ""
|
||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if rebuild_decisions[job].rebuild:
|
||||
status = colorize("@*g{[x]} ")
|
||||
msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}"
|
||||
else:
|
||||
msg = f"{s.format(spec_fmt)}{reason_msg}"
|
||||
if rebuild_decisions[job].mirrors:
|
||||
msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]"
|
||||
msg = colorize(f" @K - {cescape(msg)}@.")
|
||||
tty.msg(msg)
|
||||
reason_msg = " ({0})".format(reason) if reason else ""
|
||||
tty.msg(
|
||||
" [{1}] {0} -> {2}{3}".format(
|
||||
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
|
||||
)
|
||||
)
|
||||
if rebuild_decisions[job].mirrors:
|
||||
tty.msg(" found on the following mirrors:")
|
||||
for murl in rebuild_decisions[job].mirrors:
|
||||
tty.msg(" {0}".format(murl))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list):
|
||||
@@ -939,7 +932,7 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
try:
|
||||
bindist.BINARY_INDEX.update()
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.warn(e)
|
||||
|
||||
@@ -2265,13 +2258,13 @@ def build_name(self):
|
||||
spec.architecture,
|
||||
self.build_group,
|
||||
)
|
||||
tty.debug(
|
||||
tty.verbose(
|
||||
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
|
||||
)
|
||||
return build_name
|
||||
|
||||
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
|
||||
return build_name
|
||||
|
||||
@property # type: ignore
|
||||
@@ -2285,11 +2278,11 @@ def build_stamp(self):
|
||||
Returns: (str) current CDash build stamp"""
|
||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||
if build_stamp:
|
||||
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||
tty.debug("Generated new build stamp ({0})".format(build_stamp))
|
||||
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
|
||||
return build_stamp
|
||||
|
||||
@property # type: ignore
|
||||
|
||||
@@ -3,19 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
@@ -25,37 +22,17 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -81,9 +58,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
)
|
||||
push.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL"
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
@@ -109,10 +84,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--base-image", default=None, help="specify the base image for the buildcache. "
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
@@ -296,22 +268,7 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
def _make_pool():
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
def push_fn(args: argparse.Namespace):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
@@ -324,80 +281,63 @@ def push_fn(args):
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(args.mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
url = mirror.push_url
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if image_ref:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not args.unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
url = args.mirror.push_url
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# TODO: unify this logic in the future.
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
|
||||
else:
|
||||
skipped = []
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
|
||||
total_specs = len(specs)
|
||||
digits = len(str(total_specs))
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {url}"
|
||||
tty.info(msg)
|
||||
if total_specs > 1:
|
||||
msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
|
||||
else:
|
||||
msg = f"Pushed {format_spec(spec)} to {url}"
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
tty.info(msg)
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -424,341 +364,6 @@ def push_fn(args):
|
||||
),
|
||||
)
|
||||
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if image_ref and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
spec: spack.spec.Spec,
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
# We use this to store the Spack spec, so we can use it to create an index.
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
config.update(spec_dict)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
"annotations": {"org.opencontainers.image.description": spec.format()},
|
||||
}
|
||||
|
||||
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
return image_ref_for_spec
|
||||
|
||||
|
||||
def _push_oci(
|
||||
args,
|
||||
image_ref: ImageReference,
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: multiprocessing.pool.Pool,
|
||||
) -> List[str]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
args: The command line arguments.
|
||||
image_ref: The image reference.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
|
||||
Returns:
|
||||
List[str]: The list of skipped specs (already in the buildcache).
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# The base image to use for the package. When not set, we use
|
||||
# the OCI registry only for storage, and do not use any base image.
|
||||
base_image_ref: Optional[ImageReference] = (
|
||||
ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
)
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not args.force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base image layers, probably fine to do sequentially.
|
||||
for spec in to_be_uploaded:
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
# Get base image details, if we don't have them yet
|
||||
if architecture in base_images:
|
||||
continue
|
||||
if base_image_ref is None:
|
||||
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
|
||||
else:
|
||||
base_images[architecture] = copy_missing_layers_with_retry(
|
||||
base_image_ref, image_ref, architecture
|
||||
)
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pushed_image_ref = pool.starmap(
|
||||
_put_manifest,
|
||||
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
|
||||
|
||||
return skipped
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(
|
||||
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
|
||||
) -> None:
|
||||
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
|
||||
spack.oci.opener.ensure_status(response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -917,7 +522,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
local_path = os.path.join(tmpdir, os.path.basename(src_url))
|
||||
|
||||
try:
|
||||
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
|
||||
temp_stage = Stage(src_url, path=os.path.dirname(local_path))
|
||||
try:
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
@@ -1011,20 +616,6 @@ def manifest_copy(manifest_file_list):
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
# Special case OCI images for now.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
image_ref = None
|
||||
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
@@ -20,6 +21,7 @@
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
description = "checksum available versions of a package"
|
||||
@@ -35,30 +37,30 @@ def setup_parser(subparser):
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--batch",
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--latest",
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--preferred",
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"--add-to-package",
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
@@ -66,26 +68,27 @@ def setup_parser(subparser):
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)")
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
subparser.add_argument(
|
||||
"versions",
|
||||
nargs="*",
|
||||
help="checksum these specific versions (if omitted, Spack searches for remote versions)",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["jobs"])
|
||||
subparser.epilog = (
|
||||
"examples:\n"
|
||||
" `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n"
|
||||
" `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n"
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
)
|
||||
|
||||
|
||||
def checksum(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
# Did the user pass 'package@version' string?
|
||||
if len(args.versions) == 0 and "@" in args.package:
|
||||
args.versions = [args.package.split("@")[1]]
|
||||
args.package = args.package.split("@")[0]
|
||||
|
||||
# Make sure the user provided a package and not a URL
|
||||
if not valid_fully_qualified_module_name(args.package):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
@@ -149,10 +152,7 @@ def checksum(parser, args):
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
filtered_url_dict = spack.stage.interactive_version_filter(
|
||||
url_dict,
|
||||
pkg.versions,
|
||||
url_changes=url_changed_for_version,
|
||||
initial_verion_filter=spec.versions,
|
||||
url_dict, pkg.versions, url_changes=url_changed_for_version
|
||||
)
|
||||
if not filtered_url_dict:
|
||||
exit(0)
|
||||
|
||||
@@ -796,9 +796,7 @@ def names(args: Namespace, out: IO) -> None:
|
||||
commands = copy.copy(spack.cmd.all_commands())
|
||||
|
||||
if args.aliases:
|
||||
aliases = spack.config.get("config:aliases")
|
||||
if aliases:
|
||||
commands.extend(aliases.keys())
|
||||
commands.extend(spack.main.aliases.keys())
|
||||
|
||||
colify(commands, output=out)
|
||||
|
||||
@@ -814,10 +812,8 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
aliases_config = spack.config.get("config:aliases")
|
||||
if aliases_config:
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in aliases_config.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
|
||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
@@ -543,7 +543,7 @@ def add_concretizer_args(subparser):
|
||||
)
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
def add_s3_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
@@ -559,8 +559,6 @@ def add_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
|
||||
@@ -31,19 +31,6 @@ def setup_parser(subparser):
|
||||
aliases=["add"],
|
||||
help="search the system for compilers to add to Spack configuration",
|
||||
)
|
||||
mixed_toolchain_group = find_parser.add_mutually_exclusive_group()
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--mixed-toolchain",
|
||||
action="store_true",
|
||||
default=sys.platform == "darwin",
|
||||
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
mixed_toolchain_group.add_argument(
|
||||
"--no-mixed-toolchain",
|
||||
action="store_false",
|
||||
dest="mixed_toolchain",
|
||||
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
|
||||
)
|
||||
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
@@ -99,9 +86,7 @@ def compiler_find(args):
|
||||
|
||||
# Below scope=None because we want new compilers that don't appear
|
||||
# in any other configuration.
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
)
|
||||
new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
|
||||
n = len(new_compilers)
|
||||
|
||||
@@ -407,9 +407,7 @@ def config_prefer_upstream(args):
|
||||
pkgs = {}
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(spec.name, {"version": []})
|
||||
all = pkgs.get("all", {"compiler": []})
|
||||
pkgs["all"] = all
|
||||
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
@@ -420,8 +418,8 @@ def config_prefer_upstream(args):
|
||||
pkg["version"].append(version)
|
||||
|
||||
compiler = str(spec.compiler)
|
||||
if compiler not in all["compiler"]:
|
||||
all["compiler"].append(compiler)
|
||||
if compiler not in pkg["compiler"]:
|
||||
pkg["compiler"].append(compiler)
|
||||
|
||||
# Get and list all the variants that differ from the default.
|
||||
variants = []
|
||||
|
||||
@@ -64,7 +64,6 @@ class {class_name}({base_class_name}):
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
# FIXME: Add the SPDX identifier of the project's license below.
|
||||
# See https://spdx.org/licenses/ for a list.
|
||||
license("UNKNOWN")
|
||||
|
||||
{versions}
|
||||
|
||||
@@ -99,7 +99,10 @@ def dev_build(self, args):
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
)
|
||||
|
||||
if not spec.versions.concrete_range_as_version:
|
||||
tty.die(
|
||||
|
||||
@@ -43,7 +43,10 @@ def edit_package(name, repo_path, namespace):
|
||||
if not os.access(path, os.R_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
else:
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
)
|
||||
|
||||
editor(path)
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -145,13 +144,10 @@ def create_temp_env_directory():
|
||||
return tempfile.mkdtemp(prefix="spack-")
|
||||
|
||||
|
||||
def _tty_info(msg):
|
||||
"""tty.info like function that prints the equivalent printf statement for eval."""
|
||||
decorated = f'{colorize("@*b{==>}")} {msg}\n'
|
||||
print(f"printf {shlex.quote(decorated)};")
|
||||
|
||||
|
||||
def env_activate(args):
|
||||
if not args.activate_env and not args.dir and not args.temp:
|
||||
tty.die("spack env activate requires an environment name, directory, or --temp")
|
||||
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
|
||||
@@ -164,25 +160,12 @@ def env_activate(args):
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
# When executing `spack env activate` without further arguments, activate
|
||||
# the default environment. It's created when it doesn't exist yet.
|
||||
if not env_name_or_dir and not args.temp:
|
||||
short_name = "default"
|
||||
if not ev.exists(short_name):
|
||||
ev.create(short_name)
|
||||
action = "Created and activated"
|
||||
else:
|
||||
action = "Activated"
|
||||
env_path = ev.root(short_name)
|
||||
_tty_info(f"{action} default environment in {env_path}")
|
||||
|
||||
# Temporary environment
|
||||
elif args.temp:
|
||||
if args.temp:
|
||||
env = create_temp_env_directory()
|
||||
env_path = os.path.abspath(env)
|
||||
short_name = os.path.basename(env_path)
|
||||
ev.create_in_dir(env).write(regenerate=False)
|
||||
_tty_info(f"Created and activated temporary environment in {env_path}")
|
||||
|
||||
# Managed environment
|
||||
elif ev.exists(env_name_or_dir) and not args.dir:
|
||||
@@ -397,33 +380,28 @@ def env_remove(args):
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
bad_envs = []
|
||||
for env_name in args.rm_env:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
bad_envs.append(env_name)
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
answer = tty.get_yes_or_no(
|
||||
"Really remove %s %s?"
|
||||
% (
|
||||
string.plural(len(args.rm_env), "environment", show_n=False),
|
||||
string.comma_and(args.rm_env),
|
||||
),
|
||||
default=False,
|
||||
)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in read_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
tty.die("Environment %s can't be removed while activated." % env.name)
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
env.destroy()
|
||||
tty.msg("Successfully removed environment '%s'" % env.name)
|
||||
|
||||
|
||||
#
|
||||
@@ -570,8 +548,8 @@ def env_update_setup_parser(subparser):
|
||||
def env_update(args):
|
||||
manifest_file = ev.manifest_file(args.update_env)
|
||||
backup_file = manifest_file + ".bkp"
|
||||
|
||||
needs_update = not ev.is_latest_format(manifest_file)
|
||||
|
||||
if not needs_update:
|
||||
tty.msg('No update needed for the environment "{0}"'.format(args.update_env))
|
||||
return
|
||||
@@ -689,31 +667,18 @@ def env_depfile(args):
|
||||
# Currently only make is supported.
|
||||
spack.cmd.require_active_env(cmd_name="env depfile")
|
||||
|
||||
env = ev.active_environment()
|
||||
|
||||
# What things do we build when running make? By default, we build the
|
||||
# root specs. If specific specs are provided as input, we build those.
|
||||
filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None
|
||||
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
||||
model = depfile.MakefileModel.from_env(
|
||||
env,
|
||||
ev.active_environment(),
|
||||
filter_specs=filter_specs,
|
||||
pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]),
|
||||
dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]),
|
||||
make_prefix=args.make_prefix,
|
||||
jobserver=args.jobserver,
|
||||
)
|
||||
|
||||
# Warn in case we're generating a depfile for an empty environment. We don't automatically
|
||||
# concretize; the user should do that explicitly. Could be changed in the future if requested.
|
||||
if model.empty:
|
||||
if not env.user_specs:
|
||||
tty.warn("no specs in the environment")
|
||||
elif filter_specs is not None:
|
||||
tty.warn("no concrete matching specs found in environment")
|
||||
else:
|
||||
tty.warn("environment is not concretized. Run `spack concretize` first")
|
||||
|
||||
makefile = template.render(model.to_dict())
|
||||
|
||||
# Finally write to stdout/file.
|
||||
|
||||
@@ -111,7 +111,7 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
arguments.add_connection_args(add_parser, False)
|
||||
arguments.add_s3_connection_args(add_parser, False)
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
|
||||
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
|
||||
@@ -141,7 +141,7 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_url_parser, False)
|
||||
arguments.add_s3_connection_args(set_url_parser, False)
|
||||
|
||||
# Set
|
||||
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
|
||||
@@ -170,7 +170,7 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_connection_args(set_parser, False)
|
||||
arguments.add_s3_connection_args(set_parser, False)
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
|
||||
@@ -192,8 +192,6 @@ def mirror_add(args):
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
@@ -204,8 +202,6 @@ def mirror_add(args):
|
||||
connection["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -239,8 +235,6 @@ def _configure_mirror(args):
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.21"
|
||||
tutorial_branch = "releases/v0.20"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.operating_systems
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -224,16 +223,13 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
]
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
def find_compilers(path_hints=None):
|
||||
"""Return the list of compilers found in the paths given as arguments.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
path_hints (list or None): list of path hints where to look for.
|
||||
A sensible default based on the ``PATH`` environment variable
|
||||
will be used if the value is None
|
||||
"""
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
@@ -254,7 +250,7 @@ def find_compilers(
|
||||
finally:
|
||||
tp.close()
|
||||
|
||||
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
|
||||
def valid_version(item):
|
||||
value, error = item
|
||||
if error is None:
|
||||
return True
|
||||
@@ -266,37 +262,25 @@ def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bo
|
||||
pass
|
||||
return False
|
||||
|
||||
def remove_errors(
|
||||
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
|
||||
) -> DetectVersionArgs:
|
||||
def remove_errors(item):
|
||||
value, _ = item
|
||||
assert value is not None
|
||||
return value
|
||||
|
||||
return make_compiler_list(
|
||||
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
|
||||
mixed_toolchain=mixed_toolchain,
|
||||
)
|
||||
return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions)))
|
||||
|
||||
|
||||
def find_new_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
scope: Optional[str] = None,
|
||||
*,
|
||||
mixed_toolchain: bool = False,
|
||||
):
|
||||
def find_new_compilers(path_hints=None, scope=None):
|
||||
"""Same as ``find_compilers`` but return only the compilers that are not
|
||||
already in compilers.yaml.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: scope to look for a compiler. If None consider the merged configuration.
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
path_hints (list or None): list of path hints where to look for.
|
||||
A sensible default based on the ``PATH`` environment variable
|
||||
will be used if the value is None
|
||||
scope (str): scope to look for a compiler. If None consider the
|
||||
merged configuration.
|
||||
"""
|
||||
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
|
||||
|
||||
compilers = find_compilers(path_hints)
|
||||
return select_new_compilers(compilers, scope)
|
||||
|
||||
|
||||
@@ -654,9 +638,7 @@ def all_compiler_types():
|
||||
)
|
||||
|
||||
|
||||
def arguments_to_detect_version_fn(
|
||||
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
|
||||
) -> List[DetectVersionArgs]:
|
||||
def arguments_to_detect_version_fn(operating_system, paths):
|
||||
"""Returns a list of DetectVersionArgs tuples to be used in a
|
||||
corresponding function to detect compiler versions.
|
||||
|
||||
@@ -664,7 +646,8 @@ def arguments_to_detect_version_fn(
|
||||
function by providing a method called with the same name.
|
||||
|
||||
Args:
|
||||
operating_system: the operating system on which we are looking for compilers
|
||||
operating_system (spack.operating_systems.OperatingSystem): the operating system
|
||||
on which we are looking for compilers
|
||||
paths: paths to search for compilers
|
||||
|
||||
Returns:
|
||||
@@ -673,10 +656,10 @@ def arguments_to_detect_version_fn(
|
||||
compilers in this OS.
|
||||
"""
|
||||
|
||||
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
|
||||
command_arguments: List[DetectVersionArgs] = []
|
||||
def _default(search_paths):
|
||||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in supported_compilers_for_host_platform():
|
||||
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
@@ -701,9 +684,7 @@ def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
|
||||
return fn(paths)
|
||||
|
||||
|
||||
def detect_version(
|
||||
detect_version_args: DetectVersionArgs,
|
||||
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
|
||||
def detect_version(detect_version_args):
|
||||
"""Computes the version of a compiler and adds it to the information
|
||||
passed as input.
|
||||
|
||||
@@ -712,7 +693,8 @@ def detect_version(
|
||||
needs to be checked by the code dispatching the calls.
|
||||
|
||||
Args:
|
||||
detect_version_args: information on the compiler for which we should detect the version.
|
||||
detect_version_args (DetectVersionArgs): information on the
|
||||
compiler for which we should detect the version.
|
||||
|
||||
Returns:
|
||||
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
|
||||
@@ -728,7 +710,7 @@ def _default(fn_args):
|
||||
path = fn_args.path
|
||||
|
||||
# Get compiler names and the callback to detect their versions
|
||||
callback = getattr(compiler_cls, f"{language}_version")
|
||||
callback = getattr(compiler_cls, "{0}_version".format(language))
|
||||
|
||||
try:
|
||||
version = callback(path)
|
||||
@@ -754,15 +736,13 @@ def _default(fn_args):
|
||||
return fn(detect_version_args)
|
||||
|
||||
|
||||
def make_compiler_list(
|
||||
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
def make_compiler_list(detected_versions):
|
||||
"""Process a list of detected versions and turn them into a list of
|
||||
compiler specs.
|
||||
|
||||
Args:
|
||||
detected_versions: list of DetectVersionArgs containing a valid version
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
|
||||
detected_versions (list): list of DetectVersionArgs containing a
|
||||
valid version
|
||||
|
||||
Returns:
|
||||
list: list of Compiler objects
|
||||
@@ -771,7 +751,7 @@ def make_compiler_list(
|
||||
sorted_compilers = sorted(detected_versions, key=group_fn)
|
||||
|
||||
# Gather items in a dictionary by the id, name variation and language
|
||||
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
|
||||
compilers_d = {}
|
||||
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
|
||||
compiler_id, name_variation, language = sort_key
|
||||
by_compiler_id = compilers_d.setdefault(compiler_id, {})
|
||||
@@ -780,7 +760,7 @@ def make_compiler_list(
|
||||
|
||||
def _default_make_compilers(cmp_id, paths):
|
||||
operating_system, compiler_name, version = cmp_id
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
# TODO: johnwparent - revist the following line as per discussion at:
|
||||
@@ -802,14 +782,13 @@ def _default_make_compilers(cmp_id, paths):
|
||||
getattr(variation, "suffix", None),
|
||||
)
|
||||
|
||||
# Flatten to a list of compiler id, primary variation and compiler dictionary
|
||||
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
|
||||
compilers = []
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
ordered = sorted(by_compiler_id, key=sort_fn)
|
||||
selected_variation = ordered[0]
|
||||
selected = by_compiler_id[selected_variation]
|
||||
|
||||
# Fill any missing parts from subsequent entries (without mixing toolchains)
|
||||
# fill any missing parts from subsequent entries
|
||||
for lang in ["cxx", "f77", "fc"]:
|
||||
if lang not in selected:
|
||||
next_lang = next(
|
||||
@@ -818,63 +797,14 @@ def _default_make_compilers(cmp_id, paths):
|
||||
if next_lang:
|
||||
selected[lang] = next_lang
|
||||
|
||||
flat_compilers.append((compiler_id, selected_variation, selected))
|
||||
operating_system, _, _ = compiler_id
|
||||
make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers)
|
||||
|
||||
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
|
||||
if mixed_toolchain:
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
compilers.extend(make_compilers(compiler_id, compiler))
|
||||
compilers.extend(make_compilers(compiler_id, selected))
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
|
||||
"""Add missing compilers across toolchains when they are missing for a particular language.
|
||||
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
|
||||
fortran compiler (no flang)."""
|
||||
|
||||
# First collect the clangs that are missing a fortran compiler
|
||||
clangs_without_flang = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name in ("clang", "apple-clang")
|
||||
and "f77" not in compiler
|
||||
and "fc" not in compiler
|
||||
]
|
||||
if not clangs_without_flang:
|
||||
return
|
||||
|
||||
# Filter on GCCs with fortran compiler
|
||||
gccs_with_fortran = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
|
||||
]
|
||||
|
||||
# Sort these GCCs by "best variation" (no prefix / suffix first)
|
||||
gccs_with_fortran.sort(
|
||||
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
|
||||
)
|
||||
|
||||
# Attach the optimal GCC fortran compiler to the clangs that don't have one
|
||||
for clang_id, _, clang_compiler in clangs_without_flang:
|
||||
gcc_compiler = next(
|
||||
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
|
||||
)
|
||||
|
||||
if not gcc_compiler:
|
||||
continue
|
||||
|
||||
# Update the fc / f77 entries
|
||||
clang_compiler["f77"] = gcc_compiler["f77"]
|
||||
clang_compiler["fc"] = gcc_compiler["fc"]
|
||||
|
||||
|
||||
def is_mixed_toolchain(compiler):
|
||||
"""Returns True if the current compiler is a mixed toolchain,
|
||||
False otherwise.
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -113,6 +114,17 @@ def extract_version_from_output(cls, output):
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
if sys.platform == "darwin":
|
||||
return cls.default_version("clang")
|
||||
|
||||
return cls.default_version(fortran_compiler)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-lstdc++",)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
@@ -38,10 +39,10 @@ class Clang(Compiler):
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang"]
|
||||
f77_names = ["flang", "gfortran", "xlf_r"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
fc_names = ["flang", "gfortran", "xlf90_r"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@@ -181,3 +182,16 @@ def extract_version_from_output(cls, output):
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
return ver
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# We could map from gcc/gfortran version to clang version, but on macOS
|
||||
# we normally mix any version of gfortran with any version of clang.
|
||||
if sys.platform == "darwin":
|
||||
return cls.default_version("clang")
|
||||
else:
|
||||
return cls.default_version(fc)
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
return cls.fc_version(f77)
|
||||
|
||||
@@ -69,7 +69,6 @@
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
"repos": spack.schema.repos.schema,
|
||||
"packages": spack.schema.packages.schema,
|
||||
@@ -995,7 +994,6 @@ def read_config_file(filename, schema=None):
|
||||
key = next(iter(data))
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
|
||||
return data
|
||||
|
||||
except StopIteration:
|
||||
|
||||
@@ -1522,18 +1522,14 @@ def _query(
|
||||
# TODO: like installed and known that can be queried? Or are
|
||||
# TODO: these really special cases that only belong here?
|
||||
|
||||
if query_spec is not any:
|
||||
if not isinstance(query_spec, spack.spec.Spec):
|
||||
query_spec = spack.spec.Spec(query_spec)
|
||||
|
||||
# Just look up concrete specs with hashes; no fancy search.
|
||||
if query_spec.concrete:
|
||||
# TODO: handling of hashes restriction is not particularly elegant.
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key in self._data and (not hashes or hash_key in hashes):
|
||||
return [self._data[hash_key].spec]
|
||||
else:
|
||||
return []
|
||||
# Just look up concrete specs with hashes; no fancy search.
|
||||
if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
|
||||
# TODO: handling of hashes restriction is not particularly elegant.
|
||||
hash_key = query_spec.dag_hash()
|
||||
if hash_key in self._data and (not hashes or hash_key in hashes):
|
||||
return [self._data[hash_key].spec]
|
||||
else:
|
||||
return []
|
||||
|
||||
# Abstract specs require more work -- currently we test
|
||||
# against everything.
|
||||
@@ -1541,9 +1537,6 @@ def _query(
|
||||
start_date = start_date or datetime.datetime.min
|
||||
end_date = end_date or datetime.datetime.max
|
||||
|
||||
# save specs whose name doesn't match for last, to avoid a virtual check
|
||||
deferred = []
|
||||
|
||||
for key, rec in self._data.items():
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
@@ -1568,26 +1561,8 @@ def _query(
|
||||
if not (start_date < inst_date < end_date):
|
||||
continue
|
||||
|
||||
if query_spec is any:
|
||||
if query_spec is any or rec.spec.satisfies(query_spec):
|
||||
results.append(rec.spec)
|
||||
continue
|
||||
|
||||
# check anon specs and exact name matches first
|
||||
if not query_spec.name or rec.spec.name == query_spec.name:
|
||||
if rec.spec.satisfies(query_spec):
|
||||
results.append(rec.spec)
|
||||
|
||||
# save potential virtual matches for later, but not if we already found a match
|
||||
elif not results:
|
||||
deferred.append(rec.spec)
|
||||
|
||||
# Checking for virtuals is expensive, so we save it for last and only if needed.
|
||||
# If we get here, we didn't find anything in the DB that matched by name.
|
||||
# If we did fine something, the query spec can't be virtual b/c we matched an actual
|
||||
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
|
||||
# check all the deferred specs *if* the query is virtual.
|
||||
if not results and query_spec is not any and deferred and query_spec.virtual:
|
||||
results = [spec for spec in deferred if spec.satisfies(query_spec)]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]:
|
||||
|
||||
At the moment simply returns location of VS install paths from VSWhere
|
||||
But should be extended to include more information as relevant"""
|
||||
return list(winOs.WindowsOs().vs_install_paths)
|
||||
return list(winOs.WindowsOs.vs_install_paths)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||
|
||||
@@ -15,12 +15,9 @@
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty
|
||||
|
||||
import spack.util.elf as elf_utils
|
||||
import spack.util.environment
|
||||
import spack.util.environment as environment
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import (
|
||||
@@ -42,29 +39,15 @@
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
|
||||
def common_windows_package_paths(pkg_cls=None) -> List[str]:
|
||||
"""Get the paths for common package installation location on Windows
|
||||
that are outside the PATH
|
||||
Returns [] on unix
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
paths.extend(find_win32_additional_install_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
|
||||
if pkg_cls:
|
||||
paths.extend(compute_windows_user_path_for_package(pkg_cls))
|
||||
paths.extend(compute_windows_program_path_for_package(pkg_cls))
|
||||
return paths
|
||||
|
||||
|
||||
def file_identifier(path):
|
||||
s = os.stat(path)
|
||||
return (s.st_dev, s.st_ino)
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
@@ -79,44 +62,18 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
path_hints.extend(common_windows_package_paths())
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def get_elf_compat(path):
|
||||
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
|
||||
it is host-compatible."""
|
||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
||||
# libraries, by dropping those that are not host compatible.
|
||||
with open(path, "rb") as f:
|
||||
elf = elf_utils.parse_elf(f, only_header=True)
|
||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
"""Accept an ELF file if the header matches the given compat triplet,
|
||||
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
|
||||
static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
# Fast path: assume libraries at least have .so in their basename.
|
||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||
if ".so" not in os.path.basename(path):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
try:
|
||||
return host_compat == get_elf_compat(path)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
|
||||
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from ``path_hints`` or the
|
||||
following defaults:
|
||||
|
||||
- Environment variables (Linux: ``LD_LIBRARY_PATH``, Darwin: ``DYLD_LIBRARY_PATH``,
|
||||
and ``DYLD_FALLBACK_LIBRARY_PATH``)
|
||||
- Dynamic linker default paths (glibc: ld.so.conf, musl: ld-musl-<arch>.path)
|
||||
- Default system library paths.
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||
standard system library paths.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the library paths and the values are the names of the libraries
|
||||
@@ -130,71 +87,31 @@ def libraries_in_ld_and_system_library_path(
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
path_hints (list): list of paths to be searched. If ``None``, the default
|
||||
system paths are used.
|
||||
"""
|
||||
if path_hints:
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
else:
|
||||
search_paths = []
|
||||
|
||||
# Environment variables
|
||||
if sys.platform == "darwin":
|
||||
search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
|
||||
search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
|
||||
elif sys.platform.startswith("linux"):
|
||||
search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
|
||||
|
||||
# Dynamic linker paths
|
||||
search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
|
||||
|
||||
# Drop redundant paths
|
||||
search_paths = list(filter(os.path.isdir, search_paths))
|
||||
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
|
||||
try:
|
||||
host_compat = get_elf_compat(sys.executable)
|
||||
accept = lambda path: accept_elf(path, host_compat)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
accept = llnl.util.filesystem.is_readable_file
|
||||
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
# search path entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if accept(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from the system PATH paths.
|
||||
|
||||
For more details, see `libraries_in_ld_and_system_library_path` regarding
|
||||
return type and contents.
|
||||
|
||||
Args:
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
"""
|
||||
search_hints = (
|
||||
path_hints if path_hints is not None else spack.util.environment.get_path("PATH")
|
||||
path_hints = (
|
||||
path_hints
|
||||
or spack.util.environment.get_path("LD_LIBRARY_PATH")
|
||||
+ spack.util.environment.get_path("DYLD_LIBRARY_PATH")
|
||||
+ spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")
|
||||
+ spack.util.ld_so_conf.host_dynamic_linker_search_paths()
|
||||
)
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints)
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
# at the search root. Add both of those options to the search scheme
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
|
||||
if path_hints is None:
|
||||
# if no user provided path was given, add defaults to the search
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints))
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
|
||||
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
|
||||
# location, so we handle that case specifically.
|
||||
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
@@ -208,8 +125,19 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return []
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
@@ -317,8 +245,6 @@ def find(
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
if None, default paths are searched. If this
|
||||
is an empty list, nothing will be searched.
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
@@ -326,18 +252,13 @@ def find(
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
if initial_guess is None:
|
||||
initial_guess = self.default_path_hints()
|
||||
initial_guess.extend(common_windows_package_paths(pkg_cls))
|
||||
candidates = self.candidate_files(patterns=patterns, paths=initial_guess)
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return spack.util.environment.get_path("PATH")
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
@@ -377,7 +298,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints=paths)
|
||||
else libraries_in_windows_paths(paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
@@ -413,16 +334,21 @@ def by_path(
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
@@ -433,13 +359,9 @@ def by_path(
|
||||
if detected:
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except concurrent.futures.TimeoutError:
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
except Exception as e:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -137,7 +137,6 @@ class DirectiveMeta(type):
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_default_args: List[dict] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
@@ -200,16 +199,6 @@ def pop_from_context():
|
||||
"""Pop the last constraint from the context"""
|
||||
return DirectiveMeta._when_constraints_from_context.pop()
|
||||
|
||||
@staticmethod
|
||||
def push_default_args(default_args):
|
||||
"""Push default arguments"""
|
||||
DirectiveMeta._default_args.append(default_args)
|
||||
|
||||
@staticmethod
|
||||
def pop_default_args():
|
||||
"""Pop default arguments"""
|
||||
return DirectiveMeta._default_args.pop()
|
||||
|
||||
@staticmethod
|
||||
def directive(dicts=None):
|
||||
"""Decorator for Spack directives.
|
||||
@@ -270,13 +259,7 @@ def _decorator(decorated_function):
|
||||
directive_names.append(decorated_function.__name__)
|
||||
|
||||
@functools.wraps(decorated_function)
|
||||
def _wrapper(*args, **_kwargs):
|
||||
# First merge default args with kwargs
|
||||
kwargs = dict()
|
||||
for default_args in DirectiveMeta._default_args:
|
||||
kwargs.update(default_args)
|
||||
kwargs.update(_kwargs)
|
||||
|
||||
def _wrapper(*args, **kwargs):
|
||||
# Inject when arguments from the context
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
@@ -590,21 +573,17 @@ def _execute_extends(pkg):
|
||||
return _execute_extends
|
||||
|
||||
|
||||
@directive(dicts=("provided", "provided_together"))
|
||||
def provides(*specs, when: Optional[str] = None):
|
||||
"""Allows packages to provide a virtual dependency.
|
||||
|
||||
If a package provides "mpi", other packages can declare that they depend on "mpi",
|
||||
and spack can use the providing package to satisfy the dependency.
|
||||
|
||||
Args:
|
||||
*specs: virtual specs provided by this package
|
||||
when: condition when this provides clause needs to be considered
|
||||
@directive("provided")
|
||||
def provides(*specs, **kwargs):
|
||||
"""Allows packages to provide a virtual dependency. If a package provides
|
||||
'mpi', other packages can declare that they depend on "mpi", and spack
|
||||
can use the providing package to satisfy the dependency.
|
||||
"""
|
||||
|
||||
def _execute_provides(pkg):
|
||||
import spack.parser # Avoid circular dependency
|
||||
|
||||
when = kwargs.get("when")
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
@@ -612,18 +591,15 @@ def _execute_provides(pkg):
|
||||
# ``when`` specs for ``provides()`` need a name, as they are used
|
||||
# to build the ProviderIndex.
|
||||
when_spec.name = pkg.name
|
||||
spec_objs = [spack.spec.Spec(x) for x in specs]
|
||||
spec_names = [x.name for x in spec_objs]
|
||||
if len(spec_names) > 1:
|
||||
pkg.provided_together.setdefault(when_spec, []).append(set(spec_names))
|
||||
|
||||
for provided_spec in spec_objs:
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
for string in specs:
|
||||
for provided_spec in spack.parser.parse(string):
|
||||
if pkg.name == provided_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
|
||||
|
||||
if provided_spec not in pkg.provided:
|
||||
pkg.provided[provided_spec] = set()
|
||||
pkg.provided[provided_spec].add(when_spec)
|
||||
if provided_spec not in pkg.provided:
|
||||
pkg.provided[provided_spec] = set()
|
||||
pkg.provided[provided_spec].add(when_spec)
|
||||
|
||||
return _execute_provides
|
||||
|
||||
|
||||
@@ -339,7 +339,6 @@
|
||||
from .environment import (
|
||||
TOP_LEVEL_KEY,
|
||||
Environment,
|
||||
SpackEnvironmentConfigError,
|
||||
SpackEnvironmentError,
|
||||
SpackEnvironmentViewError,
|
||||
activate,
|
||||
@@ -373,7 +372,6 @@
|
||||
__all__ = [
|
||||
"TOP_LEVEL_KEY",
|
||||
"Environment",
|
||||
"SpackEnvironmentConfigError",
|
||||
"SpackEnvironmentError",
|
||||
"SpackEnvironmentViewError",
|
||||
"activate",
|
||||
|
||||
@@ -232,10 +232,6 @@ def to_dict(self):
|
||||
"pkg_ids": " ".join(self.all_pkg_identifiers),
|
||||
}
|
||||
|
||||
@property
|
||||
def empty(self):
|
||||
return len(self.roots) == 0
|
||||
|
||||
@staticmethod
|
||||
def from_env(
|
||||
env: ev.Environment,
|
||||
@@ -258,10 +254,15 @@ def from_env(
|
||||
jobserver: when enabled, make will invoke Spack with jobserver support. For
|
||||
dry-run this should be disabled.
|
||||
"""
|
||||
roots = env.all_matching_specs(*filter_specs) if filter_specs else env.concrete_roots()
|
||||
# If no specs are provided as a filter, build all the specs in the environment.
|
||||
if filter_specs:
|
||||
entrypoints = [env.matching_spec(s) for s in filter_specs]
|
||||
else:
|
||||
entrypoints = [s for _, s in env.concretized_specs()]
|
||||
|
||||
visitor = DepfileSpecVisitor(pkg_buildcache, dep_buildcache)
|
||||
traverse.traverse_breadth_first_with_visitor(
|
||||
roots, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
|
||||
entrypoints, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
|
||||
)
|
||||
|
||||
return MakefileModel(env, roots, visitor.adjacency_list, make_prefix, jobserver)
|
||||
return MakefileModel(env, entrypoints, visitor.adjacency_list, make_prefix, jobserver)
|
||||
|
||||
@@ -330,21 +330,16 @@ def create_in_dir(
|
||||
if with_view is None and keep_relative:
|
||||
return Environment(manifest_dir)
|
||||
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(manifest_dir)
|
||||
manifest = EnvironmentManifestFile(manifest_dir)
|
||||
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
|
||||
init_file = pathlib.Path(init_file)
|
||||
manifest.absolutify_dev_paths(init_file.parent)
|
||||
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
|
||||
init_file = pathlib.Path(init_file)
|
||||
manifest.absolutify_dev_paths(init_file.parent)
|
||||
|
||||
manifest.flush()
|
||||
|
||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||
shutil.rmtree(manifest_dir)
|
||||
raise e
|
||||
manifest.flush()
|
||||
|
||||
return Environment(manifest_dir)
|
||||
|
||||
@@ -396,13 +391,7 @@ def all_environments():
|
||||
|
||||
def _read_yaml(str_or_file):
|
||||
"""Read YAML from a file for round-trip parsing."""
|
||||
try:
|
||||
data = syaml.load_config(str_or_file)
|
||||
except syaml.SpackYAMLError as e:
|
||||
raise SpackEnvironmentConfigError(
|
||||
f"Invalid environment configuration detected: {e.message}"
|
||||
)
|
||||
|
||||
data = syaml.load_config(str_or_file)
|
||||
filename = getattr(str_or_file, "name", None)
|
||||
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
|
||||
return data, default_data
|
||||
@@ -787,18 +776,10 @@ def _re_read(self):
|
||||
"""Reinitialize the environment object."""
|
||||
self.clear(re_read=True)
|
||||
self.manifest = EnvironmentManifestFile(self.path)
|
||||
self._read(re_read=True)
|
||||
self._read()
|
||||
|
||||
def _read(self, re_read=False):
|
||||
# If the manifest has included files, then some of the information
|
||||
# (e.g., definitions) MAY be in those files. So we need to ensure
|
||||
# the config is populated with any associated spec lists in order
|
||||
# to fully construct the manifest state.
|
||||
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
if includes and not re_read:
|
||||
prepare_config_scope(self)
|
||||
|
||||
self._construct_state_from_manifest(re_read)
|
||||
def _read(self):
|
||||
self._construct_state_from_manifest()
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
@@ -812,30 +793,21 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, item):
|
||||
"""Process a single spec definition item."""
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
if when:
|
||||
name, spec_list = next(iter(entry.items()))
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _construct_state_from_manifest(self, re_read=False):
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Read manifest file and set up user specs."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
|
||||
if not re_read:
|
||||
for item in spack.config.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
|
||||
env_configuration = self.manifest[TOP_LEVEL_KEY]
|
||||
for item in env_configuration.get("definitions", []):
|
||||
self._process_definition(item)
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
if when:
|
||||
name, spec_list = next(iter(entry.items()))
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
spec_list = env_configuration.get(user_speclist_name, [])
|
||||
user_specs = SpecList(
|
||||
@@ -880,9 +852,7 @@ def clear(self, re_read=False):
|
||||
yaml, and need to be maintained when re-reading an existing
|
||||
environment.
|
||||
"""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
|
||||
self.dev_specs = {} # dev-build specs from yaml
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
@@ -1031,8 +1001,7 @@ def included_config_scopes(self):
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
"Unsupported URL scheme for environment include: {}".format(config_path)
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
@@ -1094,10 +1063,8 @@ def update_stale_references(self, from_list=None):
|
||||
from_list = next(iter(self.spec_lists.keys()))
|
||||
index = list(self.spec_lists.keys()).index(from_list)
|
||||
|
||||
# spec_lists is an OrderedDict to ensure lists read from the manifest
|
||||
# are maintainted in order, hence, all list entries after the modified
|
||||
# list may refer to the modified list requiring stale references to be
|
||||
# updated.
|
||||
# spec_lists is an OrderedDict, all list entries after the modified
|
||||
# list may refer to the modified list. Update stale references
|
||||
for i, (name, speclist) in enumerate(
|
||||
list(self.spec_lists.items())[index + 1 :], index + 1
|
||||
):
|
||||
@@ -1195,7 +1162,7 @@ def change_existing_spec(
|
||||
def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
err_msg_header = (
|
||||
f"Cannot remove '{query_spec}' from '{list_name}' definition "
|
||||
f"cannot remove {query_spec} from '{list_name}' definition "
|
||||
f"in {self.manifest.manifest_file}"
|
||||
)
|
||||
query_spec = Spec(query_spec)
|
||||
@@ -1226,10 +1193,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
list_to_change.remove(spec)
|
||||
self.update_stale_references(list_name)
|
||||
new_specs = set(self.user_specs)
|
||||
except spack.spec_list.SpecListError as e:
|
||||
except spack.spec_list.SpecListError:
|
||||
# define new specs list
|
||||
new_specs = set(self.user_specs)
|
||||
msg = str(e)
|
||||
msg = f"Spec '{spec}' is part of a spec matrix and "
|
||||
msg += f"cannot be removed from list '{list_to_change}'."
|
||||
if force:
|
||||
msg += " It will be removed from the concrete specs."
|
||||
# Mock new specs, so we can remove this spec from concrete spec lists
|
||||
@@ -1516,7 +1484,7 @@ def _concretize_separately(self, tests=False):
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, [str(x) for x in uspec_constraints], tests))
|
||||
args.append((i, uspec_constraints, tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
@@ -1552,19 +1520,12 @@ def _concretize_separately(self, tests=False):
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task,
|
||||
args,
|
||||
processes=num_procs,
|
||||
debug=tty.is_debug(),
|
||||
maxtaskperchild=1,
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug()
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{root_specs[i].colored_str}"
|
||||
)
|
||||
tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}")
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
@@ -2094,7 +2055,7 @@ def matching_spec(self, spec):
|
||||
|
||||
def removed_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all specs that will be
|
||||
removed on next concretize."""
|
||||
removed on nexg concretize."""
|
||||
needed = set()
|
||||
for s, c in self.concretized_specs():
|
||||
if s in self.user_specs:
|
||||
@@ -2439,7 +2400,6 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
spec_constraints = [Spec(x) for x in spec_constraints]
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
@@ -2753,7 +2713,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
self.changed = True
|
||||
|
||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||
"""Appends a user spec to the first active definition mathing the name passed as argument.
|
||||
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
@@ -2966,7 +2926,3 @@ class SpackEnvironmentError(spack.error.SpackError):
|
||||
|
||||
class SpackEnvironmentViewError(SpackEnvironmentError):
|
||||
"""Class for errors regarding view generation."""
|
||||
|
||||
|
||||
class SpackEnvironmentConfigError(SpackEnvironmentError):
|
||||
"""Class for Spack environment-specific configuration errors."""
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"""Service functions and classes to implement the hooks
|
||||
for Spack's command extensions.
|
||||
"""
|
||||
import difflib
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
@@ -177,19 +176,10 @@ class CommandNotFoundError(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, cmd_name):
|
||||
msg = (
|
||||
super().__init__(
|
||||
"{0} is not a recognized Spack command or extension command;"
|
||||
" check with `spack commands`.".format(cmd_name)
|
||||
)
|
||||
long_msg = None
|
||||
|
||||
similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands())
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg = "\nDid you mean one of the following commands?\n "
|
||||
long_msg += "\n ".join(similar)
|
||||
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class ExtensionNamingError(spack.error.SpackError):
|
||||
|
||||
@@ -28,7 +28,6 @@
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -42,7 +41,6 @@
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.oci.opener
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.git
|
||||
@@ -539,34 +537,6 @@ def fetch(self):
|
||||
tty.msg("Using cached archive: {0}".format(path))
|
||||
|
||||
|
||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(url, checksum, **kwargs)
|
||||
|
||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
file = self.stage.save_filename
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except urllib.error.URLError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
"""Superclass for version control system fetch strategies.
|
||||
|
||||
@@ -773,7 +743,8 @@ def git(self):
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
if self.git_version >= spack.version.Version("1.7.2"):
|
||||
self._git.add_default_arg("-c", "advice.detachedHead=false")
|
||||
self._git.add_default_arg("-c")
|
||||
self._git.add_default_arg("advice.detachedHead=false")
|
||||
|
||||
# If the user asked for insecure fetching, make that work
|
||||
# with git as well.
|
||||
|
||||
@@ -528,15 +528,10 @@ def node_entry(self, node):
|
||||
|
||||
def edge_entry(self, edge):
|
||||
colormap = {"build": "dodgerblue", "link": "crimson", "run": "goldenrod"}
|
||||
label = ""
|
||||
if edge.virtuals:
|
||||
label = f" xlabel=\"virtuals={','.join(edge.virtuals)}\""
|
||||
return (
|
||||
edge.parent.dag_hash(),
|
||||
edge.spec.dag_hash(),
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\""
|
||||
+ label
|
||||
+ "]",
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -3,22 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from typing import Optional, Set
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.spec
|
||||
|
||||
|
||||
def _for_each_enabled(
|
||||
spec: spack.spec.Spec, method_name: str, explicit: Optional[bool] = None
|
||||
) -> None:
|
||||
def _for_each_enabled(spec, method_name, explicit=None):
|
||||
"""Calls a method for each enabled module"""
|
||||
set_names: Set[str] = set(spack.config.get("modules", {}).keys())
|
||||
set_names = set(spack.config.get("modules", {}).keys())
|
||||
for name in set_names:
|
||||
enabled = spack.config.get(f"modules:{name}:enable")
|
||||
enabled = spack.config.get("modules:%s:enable" % name)
|
||||
if not enabled:
|
||||
tty.debug("NO MODULE WRITTEN: list of enabled module files is empty")
|
||||
continue
|
||||
@@ -33,7 +28,7 @@ def _for_each_enabled(
|
||||
tty.warn(msg.format(method_name, str(e)))
|
||||
|
||||
|
||||
def post_install(spec, explicit: bool):
|
||||
def post_install(spec, explicit):
|
||||
import spack.environment as ev # break import cycle
|
||||
|
||||
if ev.active_environment():
|
||||
|
||||
@@ -16,13 +16,11 @@
|
||||
import os.path
|
||||
import pstats
|
||||
import re
|
||||
import shlex
|
||||
import signal
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import List, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -51,6 +49,9 @@
|
||||
#: names of profile statistics
|
||||
stat_names = pstats.Stats.sort_arg_dict_default
|
||||
|
||||
#: top-level aliases for Spack commands
|
||||
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
|
||||
|
||||
#: help levels in order of detail (i.e., number of commands shown)
|
||||
levels = ["short", "long"]
|
||||
|
||||
@@ -358,10 +359,7 @@ def add_command(self, cmd_name):
|
||||
module = spack.cmd.get_module(cmd_name)
|
||||
|
||||
# build a list of aliases
|
||||
alias_list = []
|
||||
aliases = spack.config.get("config:aliases")
|
||||
if aliases:
|
||||
alias_list = [k for k, v in aliases.items() if shlex.split(v)[0] == cmd_name]
|
||||
alias_list = [k for k, v in aliases.items() if v == cmd_name]
|
||||
|
||||
subparser = self.subparsers.add_parser(
|
||||
cmd_name,
|
||||
@@ -672,6 +670,7 @@ def __init__(self, command_name, subprocess=False):
|
||||
Windows, where it is always False.
|
||||
"""
|
||||
self.parser = make_argument_parser()
|
||||
self.command = self.parser.add_command(command_name)
|
||||
self.command_name = command_name
|
||||
# TODO: figure out how to support this on windows
|
||||
self.subprocess = subprocess if sys.platform != "win32" else False
|
||||
@@ -703,14 +702,13 @@ def __call__(self, *argv, **kwargs):
|
||||
|
||||
if self.subprocess:
|
||||
p = sp.Popen(
|
||||
[spack.paths.spack_script] + prepend + [self.command_name] + list(argv),
|
||||
[spack.paths.spack_script, self.command_name] + prepend + list(argv),
|
||||
stdout=sp.PIPE,
|
||||
stderr=sp.STDOUT,
|
||||
)
|
||||
out, self.returncode = p.communicate()
|
||||
out = out.decode()
|
||||
else:
|
||||
command = self.parser.add_command(self.command_name)
|
||||
args, unknown = self.parser.parse_known_args(
|
||||
prepend + [self.command_name] + list(argv)
|
||||
)
|
||||
@@ -718,7 +716,7 @@ def __call__(self, *argv, **kwargs):
|
||||
out = io.StringIO()
|
||||
try:
|
||||
with log_output(out, echo=True):
|
||||
self.returncode = _invoke_command(command, self.parser, args, unknown)
|
||||
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
|
||||
|
||||
except SystemExit as e:
|
||||
self.returncode = e.code
|
||||
@@ -872,46 +870,6 @@ def restore_macos_dyld_vars():
|
||||
os.environ[dyld_var] = os.environ[stored_var_name]
|
||||
|
||||
|
||||
def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]:
|
||||
"""Resolves aliases in the given command.
|
||||
|
||||
Args:
|
||||
cmd_name: command name.
|
||||
cmd: command line arguments.
|
||||
|
||||
Returns:
|
||||
new command name and arguments.
|
||||
"""
|
||||
all_commands = spack.cmd.all_commands()
|
||||
aliases = spack.config.get("config:aliases")
|
||||
|
||||
if aliases:
|
||||
for key, value in aliases.items():
|
||||
if " " in key:
|
||||
tty.warn(
|
||||
f"Alias '{key}' (mapping to '{value}') contains a space"
|
||||
", which is not supported."
|
||||
)
|
||||
if key in all_commands:
|
||||
tty.warn(
|
||||
f"Alias '{key}' (mapping to '{value}') attempts to override"
|
||||
" built-in command."
|
||||
)
|
||||
|
||||
if cmd_name not in all_commands:
|
||||
alias = None
|
||||
|
||||
if aliases:
|
||||
alias = aliases.get(cmd_name)
|
||||
|
||||
if alias is not None:
|
||||
alias_parts = shlex.split(alias)
|
||||
cmd_name = alias_parts[0]
|
||||
cmd = alias_parts + cmd[1:]
|
||||
|
||||
return cmd_name, cmd
|
||||
|
||||
|
||||
def _main(argv=None):
|
||||
"""Logic for the main entry point for the Spack command.
|
||||
|
||||
@@ -1004,7 +962,7 @@ def _main(argv=None):
|
||||
|
||||
# Try to load the particular command the caller asked for.
|
||||
cmd_name = args.command[0]
|
||||
cmd_name, args.command = resolve_alias(cmd_name, args.command)
|
||||
cmd_name = aliases.get(cmd_name, cmd_name)
|
||||
|
||||
# set up a bootstrap context, if asked.
|
||||
# bootstrap context needs to include parsing the command, b/c things
|
||||
@@ -1016,10 +974,10 @@ def _main(argv=None):
|
||||
bootstrap_context = bootstrap.ensure_bootstrap_configuration()
|
||||
|
||||
with bootstrap_context:
|
||||
return finish_parse_and_run(parser, cmd_name, args.command, env_format_error)
|
||||
return finish_parse_and_run(parser, cmd_name, env_format_error)
|
||||
|
||||
|
||||
def finish_parse_and_run(parser, cmd_name, cmd, env_format_error):
|
||||
def finish_parse_and_run(parser, cmd_name, env_format_error):
|
||||
"""Finish parsing after we know the command to run."""
|
||||
# add the found command to the parser and re-run then re-parse
|
||||
command = parser.add_command(cmd_name)
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from typing import List, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.tty as tty
|
||||
@@ -27,18 +27,18 @@
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.version
|
||||
from spack.util.spack_yaml import syaml_dict
|
||||
from spack.version import VersionList
|
||||
|
||||
#: What schemes do we support
|
||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
|
||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs")
|
||||
|
||||
|
||||
def _url_or_path_to_url(url_or_path: str) -> str:
|
||||
@@ -230,12 +230,12 @@ def _get_value(self, attribute: str, direction: str):
|
||||
value = self._data.get(direction, {})
|
||||
|
||||
# Return top-level entry if only a URL was set.
|
||||
if isinstance(value, str) or attribute not in value:
|
||||
return self._data.get(attribute)
|
||||
if isinstance(value, str):
|
||||
return self._data.get(attribute, None)
|
||||
|
||||
return value[attribute]
|
||||
return self._data.get(direction, {}).get(attribute, None)
|
||||
|
||||
def get_url(self, direction: str) -> str:
|
||||
def get_url(self, direction: str):
|
||||
if direction not in ("fetch", "push"):
|
||||
raise ValueError(f"direction must be either 'fetch' or 'push', not {direction}")
|
||||
|
||||
@@ -255,21 +255,18 @@ def get_url(self, direction: str) -> str:
|
||||
elif "url" in info:
|
||||
url = info["url"]
|
||||
|
||||
if not url:
|
||||
raise ValueError(f"Mirror {self.name} has no URL configured")
|
||||
return _url_or_path_to_url(url) if url else None
|
||||
|
||||
return _url_or_path_to_url(url)
|
||||
|
||||
def get_access_token(self, direction: str) -> Optional[str]:
|
||||
def get_access_token(self, direction: str):
|
||||
return self._get_value("access_token", direction)
|
||||
|
||||
def get_access_pair(self, direction: str) -> Optional[List]:
|
||||
def get_access_pair(self, direction: str):
|
||||
return self._get_value("access_pair", direction)
|
||||
|
||||
def get_profile(self, direction: str) -> Optional[str]:
|
||||
def get_profile(self, direction: str):
|
||||
return self._get_value("profile", direction)
|
||||
|
||||
def get_endpoint_url(self, direction: str) -> Optional[str]:
|
||||
def get_endpoint_url(self, direction: str):
|
||||
return self._get_value("endpoint_url", direction)
|
||||
|
||||
|
||||
@@ -333,7 +330,7 @@ def from_json(stream, name=None):
|
||||
raise sjson.SpackJSONError("error parsing JSON mirror collection:", str(e)) from e
|
||||
|
||||
def to_dict(self, recursive=False):
|
||||
return syaml.syaml_dict(
|
||||
return syaml_dict(
|
||||
sorted(
|
||||
((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()),
|
||||
key=operator.itemgetter(0),
|
||||
@@ -375,7 +372,7 @@ def __len__(self):
|
||||
|
||||
|
||||
def _determine_extension(fetcher):
|
||||
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
|
||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||
if fetcher.expand_archive:
|
||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
||||
@@ -440,19 +437,6 @@ def __iter__(self):
|
||||
yield self.cosmetic_path
|
||||
|
||||
|
||||
class OCIImageLayout:
|
||||
"""Follow the OCI Image Layout Specification to archive blobs
|
||||
|
||||
Paths are of the form `blobs/<algorithm>/<digest>`
|
||||
"""
|
||||
|
||||
def __init__(self, digest: spack.oci.image.Digest) -> None:
|
||||
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
|
||||
|
||||
def __iter__(self):
|
||||
yield self.storage_path
|
||||
|
||||
|
||||
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
@@ -498,7 +482,7 @@ def get_all_versions(specs):
|
||||
|
||||
for version in pkg_cls.versions:
|
||||
version_spec = spack.spec.Spec(pkg_cls.name)
|
||||
version_spec.versions = spack.version.VersionList([version])
|
||||
version_spec.versions = VersionList([version])
|
||||
version_specs.append(version_spec)
|
||||
|
||||
return version_specs
|
||||
@@ -537,7 +521,7 @@ def get_matching_versions(specs, num_versions=1):
|
||||
# Generate only versions that satisfy the spec.
|
||||
if spec.concrete or v.intersects(spec.versions):
|
||||
s = spack.spec.Spec(pkg.name)
|
||||
s.versions = spack.version.VersionList([v])
|
||||
s.versions = VersionList([v])
|
||||
s.variants = spec.variants.copy()
|
||||
# This is needed to avoid hanging references during the
|
||||
# concretization phase
|
||||
@@ -607,14 +591,14 @@ def add(mirror: Mirror, scope=None):
|
||||
"""Add a named mirror in the given scope"""
|
||||
mirrors = spack.config.get("mirrors", scope=scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml.syaml_dict()
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if mirror.name in mirrors:
|
||||
tty.die("Mirror with name {} already exists.".format(mirror.name))
|
||||
|
||||
items = [(n, u) for n, u in mirrors.items()]
|
||||
items.insert(0, (mirror.name, mirror.to_dict()))
|
||||
mirrors = syaml.syaml_dict(items)
|
||||
mirrors = syaml_dict(items)
|
||||
spack.config.set("mirrors", mirrors, scope=scope)
|
||||
|
||||
|
||||
@@ -622,7 +606,7 @@ def remove(name, scope):
|
||||
"""Remove the named mirror in the given scope"""
|
||||
mirrors = spack.config.get("mirrors", scope=scope)
|
||||
if not mirrors:
|
||||
mirrors = syaml.syaml_dict()
|
||||
mirrors = syaml_dict()
|
||||
|
||||
if name not in mirrors:
|
||||
tty.die("No mirror with name %s" % name)
|
||||
|
||||
@@ -7,15 +7,10 @@
|
||||
include Tcl non-hierarchical modules, Lua hierarchical modules, and others.
|
||||
"""
|
||||
|
||||
from typing import Dict, Type
|
||||
|
||||
from .common import BaseModuleFileWriter, disable_modules
|
||||
from .common import disable_modules
|
||||
from .lmod import LmodModulefileWriter
|
||||
from .tcl import TclModulefileWriter
|
||||
|
||||
__all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"]
|
||||
|
||||
module_types: Dict[str, Type[BaseModuleFileWriter]] = {
|
||||
"tcl": TclModulefileWriter,
|
||||
"lmod": LmodModulefileWriter,
|
||||
}
|
||||
module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter}
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
import os.path
|
||||
import re
|
||||
import string
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty as tty
|
||||
@@ -50,7 +50,6 @@
|
||||
import spack.projections as proj
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.tengine as tengine
|
||||
import spack.util.environment
|
||||
@@ -396,14 +395,16 @@ class BaseConfiguration:
|
||||
|
||||
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
|
||||
|
||||
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
# Module where type(self) is defined
|
||||
m = inspect.getmodule(self)
|
||||
assert m is not None # make mypy happy
|
||||
self.module = m
|
||||
self.module = inspect.getmodule(self)
|
||||
# Spec for which we want to generate a module file
|
||||
self.spec = spec
|
||||
self.name = module_set_name
|
||||
# Software installation has been explicitly asked (get this information from
|
||||
# db when querying an existing module, like during a refresh or rm operations)
|
||||
if explicit is None:
|
||||
explicit = spec._installed_explicitly()
|
||||
self.explicit = explicit
|
||||
# Dictionary of configuration options that should be applied
|
||||
# to the spec
|
||||
@@ -457,11 +458,7 @@ def suffixes(self):
|
||||
if constraint in self.spec:
|
||||
suffixes.append(suffix)
|
||||
suffixes = list(dedupe(suffixes))
|
||||
# For hidden modules we can always add a fixed length hash as suffix, since it guards
|
||||
# against file name clashes, and the module is not exposed to the user anyways.
|
||||
if self.hidden:
|
||||
suffixes.append(self.spec.dag_hash(length=7))
|
||||
elif self.hash:
|
||||
if self.hash:
|
||||
suffixes.append(self.hash)
|
||||
return suffixes
|
||||
|
||||
@@ -494,6 +491,10 @@ def excluded(self):
|
||||
exclude_rules = conf.get("exclude", [])
|
||||
exclude_matches = [x for x in exclude_rules if spec.satisfies(x)]
|
||||
|
||||
# Should I exclude the module because it's implicit?
|
||||
exclude_implicits = conf.get("exclude_implicits", None)
|
||||
excluded_as_implicit = exclude_implicits and not self.explicit
|
||||
|
||||
def debug_info(line_header, match_list):
|
||||
if match_list:
|
||||
msg = "\t{0} : {1}".format(line_header, spec.cshort_spec)
|
||||
@@ -504,28 +505,16 @@ def debug_info(line_header, match_list):
|
||||
debug_info("INCLUDE", include_matches)
|
||||
debug_info("EXCLUDE", exclude_matches)
|
||||
|
||||
if not include_matches and exclude_matches:
|
||||
if excluded_as_implicit:
|
||||
msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec)
|
||||
tty.debug(msg)
|
||||
|
||||
is_excluded = exclude_matches or excluded_as_implicit
|
||||
if not include_matches and is_excluded:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
"""Returns True if the module has been hidden, False otherwise."""
|
||||
|
||||
# A few variables for convenience of writing the method
|
||||
spec = self.spec
|
||||
conf = self.module.configuration(self.name)
|
||||
|
||||
hidden_as_implicit = not self.explicit and conf.get(
|
||||
"hide_implicits", conf.get("exclude_implicits", False)
|
||||
)
|
||||
|
||||
if hidden_as_implicit:
|
||||
tty.debug(f"\tHIDDEN_AS_IMPLICIT : {spec.cshort_spec}")
|
||||
|
||||
return hidden_as_implicit
|
||||
|
||||
@property
|
||||
def context(self):
|
||||
return self.conf.get("context", {})
|
||||
@@ -554,7 +543,8 @@ def exclude_env_vars(self):
|
||||
def _create_list_for(self, what):
|
||||
include = []
|
||||
for item in self.conf[what]:
|
||||
if not self.module.make_configuration(item, self.name).excluded:
|
||||
conf = type(self)(item, self.name)
|
||||
if not conf.excluded:
|
||||
include.append(item)
|
||||
return include
|
||||
|
||||
@@ -733,9 +723,7 @@ def environment_modifications(self):
|
||||
# for that to work, globals have to be set on the package modules, and the
|
||||
# whole chain of setup_dependent_package has to be followed from leaf to spec.
|
||||
# So: just run it here, but don't collect env mods.
|
||||
spack.build_environment.SetupContext(
|
||||
spec, context=Context.RUN
|
||||
).set_all_package_py_globals()
|
||||
spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals()
|
||||
|
||||
# Then run setup_dependent_run_environment before setup_run_environment.
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
@@ -828,7 +816,8 @@ def autoload(self):
|
||||
def _create_module_list_of(self, what):
|
||||
m = self.conf.module
|
||||
name = self.conf.name
|
||||
return [m.make_layout(x, name).use_name for x in getattr(self.conf, what)]
|
||||
explicit = self.conf.explicit
|
||||
return [m.make_layout(x, name, explicit).use_name for x in getattr(self.conf, what)]
|
||||
|
||||
@tengine.context_property
|
||||
def verbose(self):
|
||||
@@ -837,19 +826,12 @@ def verbose(self):
|
||||
|
||||
|
||||
class BaseModuleFileWriter:
|
||||
default_template: str
|
||||
hide_cmd_format: str
|
||||
modulerc_header: List[str]
|
||||
|
||||
def __init__(
|
||||
self, spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> None:
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
self.spec = spec
|
||||
|
||||
# This class is meant to be derived. Get the module of the
|
||||
# actual writer.
|
||||
self.module = inspect.getmodule(self)
|
||||
assert self.module is not None # make mypy happy
|
||||
m = self.module
|
||||
|
||||
# Create the triplet of configuration/layout/context
|
||||
@@ -867,26 +849,6 @@ def __init__(
|
||||
name = type(self).__name__
|
||||
raise DefaultTemplateNotDefined(msg.format(name))
|
||||
|
||||
# Check if format for module hide command has been defined,
|
||||
# throw if not found
|
||||
try:
|
||||
self.hide_cmd_format
|
||||
except AttributeError:
|
||||
msg = "'{0}' object has no attribute 'hide_cmd_format'\n"
|
||||
msg += "Did you forget to define it in the class?"
|
||||
name = type(self).__name__
|
||||
raise HideCmdFormatNotDefined(msg.format(name))
|
||||
|
||||
# Check if modulerc header content has been defined,
|
||||
# throw if not found
|
||||
try:
|
||||
self.modulerc_header
|
||||
except AttributeError:
|
||||
msg = "'{0}' object has no attribute 'modulerc_header'\n"
|
||||
msg += "Did you forget to define it in the class?"
|
||||
name = type(self).__name__
|
||||
raise ModulercHeaderNotDefined(msg.format(name))
|
||||
|
||||
def _get_template(self):
|
||||
"""Gets the template that will be rendered for this spec."""
|
||||
# Get templates and put them in the order of importance:
|
||||
@@ -981,9 +943,6 @@ def write(self, overwrite=False):
|
||||
# Symlink defaults if needed
|
||||
self.update_module_defaults()
|
||||
|
||||
# record module hiddenness if implicit
|
||||
self.update_module_hiddenness()
|
||||
|
||||
def update_module_defaults(self):
|
||||
if any(self.spec.satisfies(default) for default in self.conf.defaults):
|
||||
# This spec matches a default, it needs to be symlinked to default
|
||||
@@ -994,60 +953,6 @@ def update_module_defaults(self):
|
||||
os.symlink(self.layout.filename, default_tmp)
|
||||
os.rename(default_tmp, default_path)
|
||||
|
||||
def update_module_hiddenness(self, remove=False):
|
||||
"""Update modulerc file corresponding to module to add or remove
|
||||
command that hides module depending on its hidden state.
|
||||
|
||||
Args:
|
||||
remove (bool): if True, hiddenness information for module is
|
||||
removed from modulerc.
|
||||
"""
|
||||
modulerc_path = self.layout.modulerc
|
||||
hide_module_cmd = self.hide_cmd_format % self.layout.use_name
|
||||
hidden = self.conf.hidden and not remove
|
||||
modulerc_exists = os.path.exists(modulerc_path)
|
||||
updated = False
|
||||
|
||||
if modulerc_exists:
|
||||
# retrieve modulerc content
|
||||
with open(modulerc_path, "r") as f:
|
||||
content = f.readlines()
|
||||
content = "".join(content).split("\n")
|
||||
# remove last empty item if any
|
||||
if len(content[-1]) == 0:
|
||||
del content[-1]
|
||||
already_hidden = hide_module_cmd in content
|
||||
|
||||
# remove hide command if module not hidden
|
||||
if already_hidden and not hidden:
|
||||
content.remove(hide_module_cmd)
|
||||
updated = True
|
||||
|
||||
# add hide command if module is hidden
|
||||
elif not already_hidden and hidden:
|
||||
if len(content) == 0:
|
||||
content = self.modulerc_header.copy()
|
||||
content.append(hide_module_cmd)
|
||||
updated = True
|
||||
else:
|
||||
content = self.modulerc_header.copy()
|
||||
if hidden:
|
||||
content.append(hide_module_cmd)
|
||||
updated = True
|
||||
|
||||
# no modulerc file change if no content update
|
||||
if updated:
|
||||
is_empty = content == self.modulerc_header or len(content) == 0
|
||||
# remove existing modulerc if empty
|
||||
if modulerc_exists and is_empty:
|
||||
os.remove(modulerc_path)
|
||||
# create or update modulerc
|
||||
elif content != self.modulerc_header:
|
||||
# ensure file ends with a newline character
|
||||
content.append("")
|
||||
with open(modulerc_path, "w") as f:
|
||||
f.write("\n".join(content))
|
||||
|
||||
def remove(self):
|
||||
"""Deletes the module file."""
|
||||
mod_file = self.layout.filename
|
||||
@@ -1055,7 +960,6 @@ def remove(self):
|
||||
try:
|
||||
os.remove(mod_file) # Remove the module file
|
||||
self.remove_module_defaults() # Remove default targeting module file
|
||||
self.update_module_hiddenness(remove=True) # Remove hide cmd in modulerc
|
||||
os.removedirs(
|
||||
os.path.dirname(mod_file)
|
||||
) # Remove all the empty directories from the leaf up
|
||||
@@ -1099,17 +1003,5 @@ class DefaultTemplateNotDefined(AttributeError, ModulesError):
|
||||
"""
|
||||
|
||||
|
||||
class HideCmdFormatNotDefined(AttributeError, ModulesError):
|
||||
"""Raised if the attribute 'hide_cmd_format' has not been specified
|
||||
in the derived classes.
|
||||
"""
|
||||
|
||||
|
||||
class ModulercHeaderNotDefined(AttributeError, ModulesError):
|
||||
"""Raised if the attribute 'modulerc_header' has not been specified
|
||||
in the derived classes.
|
||||
"""
|
||||
|
||||
|
||||
class ModulesTemplateNotFoundError(ModulesError, RuntimeError):
|
||||
"""Raised if the template for a module file was not found."""
|
||||
|
||||
@@ -6,7 +6,8 @@
|
||||
import collections
|
||||
import itertools
|
||||
import os.path
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
import posixpath
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
@@ -23,19 +24,18 @@
|
||||
|
||||
|
||||
#: lmod specific part of the configuration
|
||||
def configuration(module_set_name: str) -> dict:
|
||||
return spack.config.get(f"modules:{module_set_name}:lmod", {})
|
||||
def configuration(module_set_name):
|
||||
config_path = "modules:%s:lmod" % module_set_name
|
||||
config = spack.config.get(config_path, {})
|
||||
return config
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {}
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseConfiguration:
|
||||
def make_configuration(spec, module_set_name, explicit):
|
||||
"""Returns the lmod configuration for spec"""
|
||||
explicit = bool(spec._installed_explicitly()) if explicit is None else explicit
|
||||
key = (spec.dag_hash(), module_set_name, explicit)
|
||||
try:
|
||||
return configuration_registry[key]
|
||||
@@ -45,18 +45,16 @@ def make_configuration(
|
||||
)
|
||||
|
||||
|
||||
def make_layout(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseFileLayout:
|
||||
def make_layout(spec, module_set_name, explicit):
|
||||
"""Returns the layout information for spec"""
|
||||
return LmodFileLayout(make_configuration(spec, module_set_name, explicit))
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return LmodFileLayout(conf)
|
||||
|
||||
|
||||
def make_context(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseContext:
|
||||
def make_context(spec, module_set_name, explicit):
|
||||
"""Returns the context information for spec"""
|
||||
return LmodContext(make_configuration(spec, module_set_name, explicit))
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return LmodContext(conf)
|
||||
|
||||
|
||||
def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||
@@ -99,7 +97,10 @@ def guess_core_compilers(name, store=False) -> List[spack.spec.CompilerSpec]:
|
||||
class LmodConfiguration(BaseConfiguration):
|
||||
"""Configuration class for lmod module files."""
|
||||
|
||||
default_projections = {"all": "{name}/{version}"}
|
||||
# Note: Posixpath is used here as well as below as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_projections = {"all": posixpath.join("{name}", "{version}")}
|
||||
|
||||
@property
|
||||
def core_compilers(self) -> List[spack.spec.CompilerSpec]:
|
||||
@@ -231,13 +232,6 @@ def missing(self):
|
||||
"""Returns the list of tokens that are not available."""
|
||||
return [x for x in self.hierarchy_tokens if x not in self.available]
|
||||
|
||||
@property
|
||||
def hidden(self):
|
||||
# Never hide a module that opens a hierarchy
|
||||
if any(self.spec.package.provides(x) for x in self.hierarchy_tokens):
|
||||
return False
|
||||
return super().hidden
|
||||
|
||||
|
||||
class LmodFileLayout(BaseFileLayout):
|
||||
"""File layout for lmod module files."""
|
||||
@@ -273,16 +267,12 @@ def filename(self):
|
||||
hierarchy_name = os.path.join(*parts)
|
||||
|
||||
# Compute the absolute path
|
||||
return os.path.join(
|
||||
fullname = os.path.join(
|
||||
self.arch_dirname, # root for lmod files on this architecture
|
||||
hierarchy_name, # relative path
|
||||
f"{self.use_name}.{self.extension}", # file name
|
||||
".".join([self.use_name, self.extension]), # file name
|
||||
)
|
||||
|
||||
@property
|
||||
def modulerc(self):
|
||||
"""Returns the modulerc file associated with current module file"""
|
||||
return os.path.join(os.path.dirname(self.filename), f".modulerc.{self.extension}")
|
||||
return fullname
|
||||
|
||||
def token_to_path(self, name, value):
|
||||
"""Transforms a hierarchy token into the corresponding path part.
|
||||
@@ -315,7 +305,9 @@ def path_part_fmt(token):
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
return f"{path_part_fmt(token=value)}-{value.dag_hash(length=7)}"
|
||||
path = path_part_fmt(token=value)
|
||||
path = "-".join([path, value.dag_hash(length=7)])
|
||||
return path
|
||||
|
||||
@property
|
||||
def available_path_parts(self):
|
||||
@@ -327,7 +319,8 @@ def available_path_parts(self):
|
||||
# List of services that are part of the hierarchy
|
||||
hierarchy = self.conf.hierarchy_tokens
|
||||
# Tokenize each part that is both in the hierarchy and available
|
||||
return [self.token_to_path(x, available[x]) for x in hierarchy if x in available]
|
||||
parts = [self.token_to_path(x, available[x]) for x in hierarchy if x in available]
|
||||
return parts
|
||||
|
||||
@property
|
||||
@lang.memoized
|
||||
@@ -445,7 +438,7 @@ def missing(self):
|
||||
@lang.memoized
|
||||
def unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked unconditionally."""
|
||||
layout = make_layout(self.spec, self.conf.name)
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
return [os.path.join(*parts) for parts in layout.unlocked_paths[None]]
|
||||
|
||||
@tengine.context_property
|
||||
@@ -453,7 +446,7 @@ def conditionally_unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked conditionally.
|
||||
Each item in the list is a tuple with the structure (condition, path).
|
||||
"""
|
||||
layout = make_layout(self.spec, self.conf.name)
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
value = []
|
||||
conditional_paths = layout.unlocked_paths
|
||||
conditional_paths.pop(None)
|
||||
@@ -475,11 +468,7 @@ def manipulate_path(token):
|
||||
class LmodModulefileWriter(BaseModuleFileWriter):
|
||||
"""Writer class for lmod module files."""
|
||||
|
||||
default_template = "modules/modulefile.lua"
|
||||
|
||||
modulerc_header = []
|
||||
|
||||
hide_cmd_format = 'hide_version("%s")'
|
||||
default_template = posixpath.join("modules", "modulefile.lua")
|
||||
|
||||
|
||||
class CoreCompilersNotFoundError(spack.error.SpackError, KeyError):
|
||||
|
||||
@@ -6,30 +6,28 @@
|
||||
"""This module implements the classes necessary to generate Tcl
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import os.path
|
||||
from typing import Dict, Optional, Tuple
|
||||
import posixpath
|
||||
from typing import Any, Dict
|
||||
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.tengine as tengine
|
||||
|
||||
from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFileWriter
|
||||
|
||||
|
||||
#: Tcl specific part of the configuration
|
||||
def configuration(module_set_name: str) -> dict:
|
||||
return spack.config.get(f"modules:{module_set_name}:tcl", {})
|
||||
def configuration(module_set_name):
|
||||
config_path = "modules:%s:tcl" % module_set_name
|
||||
config = spack.config.get(config_path, {})
|
||||
return config
|
||||
|
||||
|
||||
# Caches the configuration {spec_hash: configuration}
|
||||
configuration_registry: Dict[Tuple[str, str, bool], BaseConfiguration] = {}
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseConfiguration:
|
||||
def make_configuration(spec, module_set_name, explicit):
|
||||
"""Returns the tcl configuration for spec"""
|
||||
explicit = bool(spec._installed_explicitly()) if explicit is None else explicit
|
||||
key = (spec.dag_hash(), module_set_name, explicit)
|
||||
try:
|
||||
return configuration_registry[key]
|
||||
@@ -39,18 +37,16 @@ def make_configuration(
|
||||
)
|
||||
|
||||
|
||||
def make_layout(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseFileLayout:
|
||||
def make_layout(spec, module_set_name, explicit):
|
||||
"""Returns the layout information for spec"""
|
||||
return TclFileLayout(make_configuration(spec, module_set_name, explicit))
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return TclFileLayout(conf)
|
||||
|
||||
|
||||
def make_context(
|
||||
spec: spack.spec.Spec, module_set_name: str, explicit: Optional[bool] = None
|
||||
) -> BaseContext:
|
||||
def make_context(spec, module_set_name, explicit):
|
||||
"""Returns the context information for spec"""
|
||||
return TclContext(make_configuration(spec, module_set_name, explicit))
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return TclContext(conf)
|
||||
|
||||
|
||||
class TclConfiguration(BaseConfiguration):
|
||||
@@ -60,11 +56,6 @@ class TclConfiguration(BaseConfiguration):
|
||||
class TclFileLayout(BaseFileLayout):
|
||||
"""File layout for tcl module files."""
|
||||
|
||||
@property
|
||||
def modulerc(self):
|
||||
"""Returns the modulerc file associated with current module file"""
|
||||
return os.path.join(os.path.dirname(self.filename), ".modulerc")
|
||||
|
||||
|
||||
class TclContext(BaseContext):
|
||||
"""Context class for tcl module files."""
|
||||
@@ -78,8 +69,7 @@ def prerequisites(self):
|
||||
class TclModulefileWriter(BaseModuleFileWriter):
|
||||
"""Writer class for tcl module files."""
|
||||
|
||||
default_template = "modules/modulefile.tcl"
|
||||
|
||||
modulerc_header = ["#%Module4.7"]
|
||||
|
||||
hide_cmd_format = "module-hide --soft --hidden-loaded %s"
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_template = posixpath.join("modules", "modulefile.tcl")
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
"""
|
||||
import functools
|
||||
import inspect
|
||||
from contextlib import contextmanager
|
||||
|
||||
from llnl.util.lang import caller_locals
|
||||
|
||||
@@ -272,13 +271,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
spack.directives.DirectiveMeta.pop_from_context()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def default_args(**kwargs):
|
||||
spack.directives.DirectiveMeta.push_default_args(kwargs)
|
||||
yield
|
||||
spack.directives.DirectiveMeta.pop_default_args()
|
||||
|
||||
|
||||
class MultiMethodError(spack.error.SpackError):
|
||||
"""Superclass for multimethod dispatch errors"""
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -1,235 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
|
||||
import spack.spec
|
||||
|
||||
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
||||
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
||||
# case sensitive though.
|
||||
alphanumeric_with_uppercase = r"[a-zA-Z0-9]+"
|
||||
separator = r"(?:[._]|__|[-]+)"
|
||||
localhost = r"localhost"
|
||||
domainNameComponent = r"(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])"
|
||||
optionalPort = r"(?::[0-9]+)?"
|
||||
tag = r"[\w][\w.-]{0,127}"
|
||||
digestPat = r"[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][0-9a-fA-F]{32,}"
|
||||
ipv6address = r"\[(?:[a-fA-F0-9:]+)\]"
|
||||
|
||||
# domain name
|
||||
domainName = rf"{domainNameComponent}(?:\.{domainNameComponent})*"
|
||||
host = rf"(?:{domainName}|{ipv6address})"
|
||||
domainAndPort = rf"{host}{optionalPort}"
|
||||
|
||||
# image name
|
||||
pathComponent = rf"{alphanumeric_with_uppercase}(?:{separator}{alphanumeric_with_uppercase})*"
|
||||
remoteName = rf"{pathComponent}(?:\/{pathComponent})*"
|
||||
namePat = rf"(?:{domainAndPort}\/)?{remoteName}"
|
||||
|
||||
# Regex for a full image reference, with 3 groups: name, tag, digest
|
||||
referencePat = re.compile(rf"^({namePat})(?::({tag}))?(?:@({digestPat}))?$")
|
||||
|
||||
# Regex for splitting the name into domain and path components
|
||||
anchoredNameRegexp = re.compile(rf"^(?:({domainAndPort})\/)?({remoteName})$")
|
||||
|
||||
|
||||
def ensure_sha256_checksum(oci_blob: str):
|
||||
"""Validate that the reference is of the format sha256:<checksum>
|
||||
Return the checksum if valid, raise ValueError otherwise."""
|
||||
if ":" not in oci_blob:
|
||||
raise ValueError(f"Invalid OCI blob format: {oci_blob}")
|
||||
alg, checksum = oci_blob.split(":", 1)
|
||||
if alg != "sha256":
|
||||
raise ValueError(f"Unsupported OCI blob checksum algorithm: {alg}")
|
||||
if len(checksum) != 64:
|
||||
raise ValueError(f"Invalid OCI blob checksum length: {len(checksum)}")
|
||||
return checksum
|
||||
|
||||
|
||||
class Digest:
|
||||
"""Represents a digest in the format <algorithm>:<digest>.
|
||||
Currently only supports sha256 digests."""
|
||||
|
||||
__slots__ = ["algorithm", "digest"]
|
||||
|
||||
def __init__(self, *, algorithm: str, digest: str) -> None:
|
||||
self.algorithm = algorithm
|
||||
self.digest = digest
|
||||
|
||||
def __eq__(self, __value: object) -> bool:
|
||||
if not isinstance(__value, Digest):
|
||||
return NotImplemented
|
||||
return self.algorithm == __value.algorithm and self.digest == __value.digest
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string: str) -> "Digest":
|
||||
return cls(algorithm="sha256", digest=ensure_sha256_checksum(string))
|
||||
|
||||
@classmethod
|
||||
def from_sha256(cls, digest: str) -> "Digest":
|
||||
return cls(algorithm="sha256", digest=digest)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.algorithm}:{self.digest}"
|
||||
|
||||
|
||||
class ImageReference:
|
||||
"""A parsed image of the form domain/name:tag[@digest].
|
||||
The digest is optional, and domain and tag are automatically
|
||||
filled out with defaults when parsed from string."""
|
||||
|
||||
__slots__ = ["domain", "name", "tag", "digest"]
|
||||
|
||||
def __init__(
|
||||
self, *, domain: str, name: str, tag: str = "latest", digest: Optional[Digest] = None
|
||||
):
|
||||
self.domain = domain
|
||||
self.name = name
|
||||
self.tag = tag
|
||||
self.digest = digest
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, string) -> "ImageReference":
|
||||
match = referencePat.match(string)
|
||||
if not match:
|
||||
raise ValueError(f"Invalid image reference: {string}")
|
||||
|
||||
image, tag, digest = match.groups()
|
||||
|
||||
assert isinstance(image, str)
|
||||
assert isinstance(tag, (str, type(None)))
|
||||
assert isinstance(digest, (str, type(None)))
|
||||
|
||||
match = anchoredNameRegexp.match(image)
|
||||
|
||||
# This can never happen, since the regex is implied
|
||||
# by the regex above. It's just here to make mypy happy.
|
||||
assert match, f"Invalid image reference: {string}"
|
||||
|
||||
domain, name = match.groups()
|
||||
|
||||
assert isinstance(domain, (str, type(None)))
|
||||
assert isinstance(name, str)
|
||||
|
||||
# Fill out defaults like docker would do...
|
||||
# Based on github.com/distribution/distribution: allow short names like "ubuntu"
|
||||
# and "user/repo" to be interpreted as "library/ubuntu" and "user/repo:latest
|
||||
# Not sure if Spack should follow Docker, but it's what people expect...
|
||||
if not domain:
|
||||
domain = "index.docker.io"
|
||||
name = f"library/{name}"
|
||||
elif (
|
||||
"." not in domain
|
||||
and ":" not in domain
|
||||
and domain != "localhost"
|
||||
and domain == domain.lower()
|
||||
):
|
||||
name = f"{domain}/{name}"
|
||||
domain = "index.docker.io"
|
||||
|
||||
# Lowercase the image name. This is enforced by Docker, although the OCI spec isn't clear?
|
||||
# We do this anyways, cause for example in Github Actions the <organization>/<repository>
|
||||
# part can have uppercase, and may be interpolated when specifying the relevant OCI image.
|
||||
name = name.lower()
|
||||
|
||||
if not tag:
|
||||
tag = "latest"
|
||||
|
||||
# sha256 is currently the only algorithm that
|
||||
# we implement, even though the spec allows for more
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
|
||||
return cls(domain=domain, name=name, tag=tag, digest=digest)
|
||||
|
||||
def manifest_url(self) -> str:
|
||||
digest_or_tag = self.digest or self.tag
|
||||
return f"https://{self.domain}/v2/{self.name}/manifests/{digest_or_tag}"
|
||||
|
||||
def blob_url(self, digest: Union[str, Digest]) -> str:
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
return f"https://{self.domain}/v2/{self.name}/blobs/{digest}"
|
||||
|
||||
def with_digest(self, digest: Union[str, Digest]) -> "ImageReference":
|
||||
if isinstance(digest, str):
|
||||
digest = Digest.from_string(digest)
|
||||
return ImageReference(domain=self.domain, name=self.name, tag=self.tag, digest=digest)
|
||||
|
||||
def with_tag(self, tag: str) -> "ImageReference":
|
||||
return ImageReference(domain=self.domain, name=self.name, tag=tag, digest=self.digest)
|
||||
|
||||
def uploads_url(self, digest: Optional[Digest] = None) -> str:
|
||||
url = f"https://{self.domain}/v2/{self.name}/blobs/uploads/"
|
||||
if digest:
|
||||
url += f"?digest={digest}"
|
||||
return url
|
||||
|
||||
def tags_url(self) -> str:
|
||||
return f"https://{self.domain}/v2/{self.name}/tags/list"
|
||||
|
||||
def endpoint(self, path: str = "") -> str:
|
||||
return urllib.parse.urljoin(f"https://{self.domain}/v2/", path)
|
||||
|
||||
def __str__(self) -> str:
|
||||
s = f"{self.domain}/{self.name}"
|
||||
if self.tag:
|
||||
s += f":{self.tag}"
|
||||
if self.digest:
|
||||
s += f"@{self.digest}"
|
||||
return s
|
||||
|
||||
def __eq__(self, __value: object) -> bool:
|
||||
if not isinstance(__value, ImageReference):
|
||||
return NotImplemented
|
||||
return (
|
||||
self.domain == __value.domain
|
||||
and self.name == __value.name
|
||||
and self.tag == __value.tag
|
||||
and self.digest == __value.digest
|
||||
)
|
||||
|
||||
|
||||
def _ensure_valid_tag(tag: str) -> str:
|
||||
"""Ensure a tag is valid for an OCI registry."""
|
||||
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
||||
if len(sanitized) > 128:
|
||||
return sanitized[:64] + sanitized[-64:]
|
||||
return sanitized
|
||||
|
||||
|
||||
def default_tag(spec: "spack.spec.Spec") -> str:
|
||||
"""Return a valid, default image tag for a spec."""
|
||||
return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
||||
|
||||
|
||||
#: Default OCI index tag
|
||||
default_index_tag = "index.spack"
|
||||
|
||||
|
||||
def tag_is_spec(tag: str) -> bool:
|
||||
"""Check if a tag is likely a Spec"""
|
||||
return tag.endswith(".spack") and tag != default_index_tag
|
||||
|
||||
|
||||
def default_config(architecture: str, os: str):
|
||||
return {
|
||||
"architecture": architecture,
|
||||
"os": os,
|
||||
"rootfs": {"type": "layers", "diff_ids": []},
|
||||
"config": {"Env": []},
|
||||
}
|
||||
|
||||
|
||||
def default_manifest():
|
||||
return {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
"config": {"mediaType": "application/vnd.oci.image.config.v1+json"},
|
||||
"layers": [],
|
||||
}
|
||||
@@ -1,381 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.client import HTTPResponse
|
||||
from typing import NamedTuple, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.opener
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
import spack.util.crypto
|
||||
|
||||
from .image import Digest, ImageReference
|
||||
|
||||
|
||||
class Blob(NamedTuple):
|
||||
compressed_digest: Digest
|
||||
uncompressed_digest: Digest
|
||||
size: int
|
||||
|
||||
|
||||
def create_tarball(spec: spack.spec.Spec, tarfile_path):
|
||||
buildinfo = spack.binary_distribution.get_buildinfo_dict(spec)
|
||||
return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo)
|
||||
|
||||
|
||||
def _log_upload_progress(digest: Digest, size: int, elapsed: float):
|
||||
elapsed = max(elapsed, 0.001) # guard against division by zero
|
||||
tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)")
|
||||
|
||||
|
||||
def with_query_param(url: str, param: str, value: str) -> str:
|
||||
"""Add a query parameter to a URL
|
||||
|
||||
Args:
|
||||
url: The URL to add the parameter to.
|
||||
param: The parameter name.
|
||||
value: The parameter value.
|
||||
|
||||
Returns:
|
||||
The URL with the parameter added.
|
||||
"""
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
query = urllib.parse.parse_qs(parsed.query)
|
||||
if param in query:
|
||||
query[param].append(value)
|
||||
else:
|
||||
query[param] = [value]
|
||||
return urllib.parse.urlunparse(
|
||||
parsed._replace(query=urllib.parse.urlencode(query, doseq=True))
|
||||
)
|
||||
|
||||
|
||||
def upload_blob(
|
||||
ref: ImageReference,
|
||||
file: str,
|
||||
digest: Digest,
|
||||
force: bool = False,
|
||||
small_file_size: int = 0,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> bool:
|
||||
"""Uploads a blob to an OCI registry
|
||||
|
||||
We only do monolithic uploads, even though it's very simple to do chunked.
|
||||
Observed problems with chunked uploads:
|
||||
(1) it's slow, many sequential requests, (2) some registries set an *unknown*
|
||||
max chunk size, and the spec doesn't say how to obtain it
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
file: The file to upload.
|
||||
digest: The digest of the file.
|
||||
force: Whether to force upload the blob, even if it already exists.
|
||||
small_file_size: For files at most this size, attempt
|
||||
to do a single POST request instead of POST + PUT.
|
||||
Some registries do no support single requests, and others
|
||||
do not specify what size they support in single POST.
|
||||
For now this feature is disabled by default (0KB)
|
||||
|
||||
Returns:
|
||||
True if the blob was uploaded, False if it already existed.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
# Test if the blob already exists, if so, early exit.
|
||||
if not force and blob_exists(ref, digest, _urlopen):
|
||||
return False
|
||||
|
||||
start = time.time()
|
||||
|
||||
with open(file, "rb") as f:
|
||||
file_size = os.fstat(f.fileno()).st_size
|
||||
|
||||
# For small blobs, do a single POST request.
|
||||
# The spec says that registries MAY support this
|
||||
if file_size <= small_file_size:
|
||||
request = Request(
|
||||
url=ref.uploads_url(digest),
|
||||
method="POST",
|
||||
data=f,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": str(file_size),
|
||||
},
|
||||
)
|
||||
else:
|
||||
request = Request(
|
||||
url=ref.uploads_url(), method="POST", headers={"Content-Length": "0"}
|
||||
)
|
||||
|
||||
response = _urlopen(request)
|
||||
|
||||
# Created the blob in one go.
|
||||
if response.status == 201:
|
||||
_log_upload_progress(digest, file_size, time.time() - start)
|
||||
return True
|
||||
|
||||
# Otherwise, do another PUT request.
|
||||
spack.oci.opener.ensure_status(response, 202)
|
||||
assert "Location" in response.headers
|
||||
|
||||
# Can be absolute or relative, joining handles both
|
||||
upload_url = with_query_param(
|
||||
ref.endpoint(response.headers["Location"]), "digest", str(digest)
|
||||
)
|
||||
f.seek(0)
|
||||
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=upload_url,
|
||||
method="PUT",
|
||||
data=f,
|
||||
headers={
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": str(file_size),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
|
||||
# print elapsed time and # MB/s
|
||||
_log_upload_progress(digest, file_size, time.time() - start)
|
||||
return True
|
||||
|
||||
|
||||
def upload_manifest(
|
||||
ref: ImageReference,
|
||||
oci_manifest: dict,
|
||||
tag: bool = True,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
):
|
||||
"""Uploads a manifest/index to a registry
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
oci_manifest: The OCI manifest or index.
|
||||
tag: When true, use the tag, otherwise use the digest,
|
||||
this is relevant for multi-arch images, where the
|
||||
tag is an index, referencing the manifests by digest.
|
||||
|
||||
Returns:
|
||||
The digest and size of the uploaded manifest.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
data = json.dumps(oci_manifest, separators=(",", ":")).encode()
|
||||
digest = Digest.from_sha256(hashlib.sha256(data).hexdigest())
|
||||
size = len(data)
|
||||
|
||||
if not tag:
|
||||
ref = ref.with_digest(digest)
|
||||
|
||||
response = _urlopen(
|
||||
Request(
|
||||
url=ref.manifest_url(),
|
||||
method="PUT",
|
||||
data=data,
|
||||
headers={"Content-Type": oci_manifest["mediaType"]},
|
||||
)
|
||||
)
|
||||
|
||||
spack.oci.opener.ensure_status(response, 201)
|
||||
return digest, size
|
||||
|
||||
|
||||
def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
|
||||
"""Given an OCI based mirror, extract the URL and image name from it"""
|
||||
url = mirror.push_url
|
||||
if not url.startswith("oci://"):
|
||||
raise ValueError(f"Mirror {mirror} is not an OCI mirror")
|
||||
return ImageReference.from_string(url[6:])
|
||||
|
||||
|
||||
def blob_exists(
|
||||
ref: ImageReference, digest: Digest, _urlopen: spack.oci.opener.MaybeOpen = None
|
||||
) -> bool:
|
||||
"""Checks if a blob exists in an OCI registry"""
|
||||
try:
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
response = _urlopen(Request(url=ref.blob_url(digest), method="HEAD"))
|
||||
return response.status == 200
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.getcode() == 404:
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def copy_missing_layers(
|
||||
src: ImageReference,
|
||||
dst: ImageReference,
|
||||
architecture: str,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> Tuple[dict, dict]:
|
||||
"""Copy image layers from src to dst for given architecture.
|
||||
|
||||
Args:
|
||||
src: The source image reference.
|
||||
dst: The destination image reference.
|
||||
architecture: The architecture (when referencing an index)
|
||||
|
||||
Returns:
|
||||
Tuple of manifest and config of the base image.
|
||||
"""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
manifest, config = get_manifest_and_config(src, architecture, _urlopen=_urlopen)
|
||||
|
||||
# Get layer digests
|
||||
digests = [Digest.from_string(layer["digest"]) for layer in manifest["layers"]]
|
||||
|
||||
# Filter digests that are don't exist in the registry
|
||||
missing_digests = [
|
||||
digest for digest in digests if not blob_exists(dst, digest, _urlopen=_urlopen)
|
||||
]
|
||||
|
||||
if not missing_digests:
|
||||
return manifest, config
|
||||
|
||||
# Pull missing blobs, push them to the registry
|
||||
with spack.stage.StageComposite.from_iterable(
|
||||
make_stage(url=src.blob_url(digest), digest=digest, _urlopen=_urlopen)
|
||||
for digest in missing_digests
|
||||
) as stages:
|
||||
stages.fetch()
|
||||
stages.check()
|
||||
stages.cache_local()
|
||||
|
||||
for stage, digest in zip(stages, missing_digests):
|
||||
# No need to check existince again, force=True.
|
||||
upload_blob(
|
||||
dst, file=stage.save_filename, force=True, digest=digest, _urlopen=_urlopen
|
||||
)
|
||||
|
||||
return manifest, config
|
||||
|
||||
|
||||
#: OCI manifest content types (including docker type)
|
||||
manifest_content_type = [
|
||||
"application/vnd.oci.image.manifest.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.v2+json",
|
||||
]
|
||||
|
||||
#: OCI index content types (including docker type)
|
||||
index_content_type = [
|
||||
"application/vnd.oci.image.index.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.list.v2+json",
|
||||
]
|
||||
|
||||
#: All OCI manifest / index content types
|
||||
all_content_type = manifest_content_type + index_content_type
|
||||
|
||||
|
||||
def get_manifest_and_config(
|
||||
ref: ImageReference,
|
||||
architecture="amd64",
|
||||
recurse=3,
|
||||
_urlopen: spack.oci.opener.MaybeOpen = None,
|
||||
) -> Tuple[dict, dict]:
|
||||
"""Recursively fetch manifest and config for a given image reference
|
||||
with a given architecture.
|
||||
|
||||
Args:
|
||||
ref: The image reference.
|
||||
architecture: The architecture (when referencing an index)
|
||||
recurse: How many levels of index to recurse into.
|
||||
|
||||
Returns:
|
||||
A tuple of (manifest, config)"""
|
||||
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
|
||||
# Get manifest
|
||||
response: HTTPResponse = _urlopen(
|
||||
Request(url=ref.manifest_url(), headers={"Accept": ", ".join(all_content_type)})
|
||||
)
|
||||
|
||||
# Recurse when we find an index
|
||||
if response.headers["Content-Type"] in index_content_type:
|
||||
if recurse == 0:
|
||||
raise Exception("Maximum recursion depth reached while fetching OCI manifest")
|
||||
|
||||
index = json.load(response)
|
||||
manifest_meta = next(
|
||||
manifest
|
||||
for manifest in index["manifests"]
|
||||
if manifest["platform"]["architecture"] == architecture
|
||||
)
|
||||
|
||||
return get_manifest_and_config(
|
||||
ref.with_digest(manifest_meta["digest"]),
|
||||
architecture=architecture,
|
||||
recurse=recurse - 1,
|
||||
_urlopen=_urlopen,
|
||||
)
|
||||
|
||||
# Otherwise, require a manifest
|
||||
if response.headers["Content-Type"] not in manifest_content_type:
|
||||
raise Exception(f"Unknown content type {response.headers['Content-Type']}")
|
||||
|
||||
manifest = json.load(response)
|
||||
|
||||
# Download, verify and cache config file
|
||||
config_digest = Digest.from_string(manifest["config"]["digest"])
|
||||
with make_stage(ref.blob_url(config_digest), config_digest, _urlopen=_urlopen) as stage:
|
||||
stage.fetch()
|
||||
stage.check()
|
||||
stage.cache_local()
|
||||
with open(stage.save_filename, "rb") as f:
|
||||
config = json.load(f)
|
||||
|
||||
return manifest, config
|
||||
|
||||
|
||||
#: Same as upload_manifest, but with retry wrapper
|
||||
upload_manifest_with_retry = spack.oci.opener.default_retry(upload_manifest)
|
||||
|
||||
#: Same as upload_blob, but with retry wrapper
|
||||
upload_blob_with_retry = spack.oci.opener.default_retry(upload_blob)
|
||||
|
||||
#: Same as get_manifest_and_config, but with retry wrapper
|
||||
get_manifest_and_config_with_retry = spack.oci.opener.default_retry(get_manifest_and_config)
|
||||
|
||||
#: Same as copy_missing_layers, but with retry wrapper
|
||||
copy_missing_layers_with_retry = spack.oci.opener.default_retry(copy_missing_layers)
|
||||
|
||||
|
||||
def make_stage(
|
||||
url: str, digest: Digest, keep: bool = False, _urlopen: spack.oci.opener.MaybeOpen = None
|
||||
) -> spack.stage.Stage:
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
|
||||
url, checksum=digest.digest, _urlopen=_urlopen
|
||||
)
|
||||
# Use blobs/<alg>/<encoded> as the cache path, which follows
|
||||
# the OCI Image Layout Specification. What's missing though,
|
||||
# is the `oci-layout` and `index.json` files, which are
|
||||
# required by the spec.
|
||||
return spack.stage.Stage(
|
||||
fetch_strategy,
|
||||
mirror_paths=spack.mirror.OCIImageLayout(digest),
|
||||
name=digest.digest,
|
||||
keep=keep,
|
||||
)
|
||||
@@ -1,442 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""All the logic for OCI fetching and authentication"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from enum import Enum, auto
|
||||
from http.client import HTTPResponse
|
||||
from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.config
|
||||
import spack.mirror
|
||||
import spack.parser
|
||||
import spack.repo
|
||||
import spack.util.web
|
||||
|
||||
from .image import ImageReference
|
||||
|
||||
|
||||
def _urlopen():
|
||||
opener = create_opener()
|
||||
|
||||
def dispatch_open(fullurl, data=None, timeout=None):
|
||||
timeout = timeout or spack.config.get("config:connect_timeout", 10)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
OpenType = Callable[..., HTTPResponse]
|
||||
MaybeOpen = Optional[OpenType]
|
||||
|
||||
#: Opener that automatically uses OCI authentication based on mirror config
|
||||
urlopen: OpenType = llnl.util.lang.Singleton(_urlopen)
|
||||
|
||||
|
||||
SP = r" "
|
||||
OWS = r"[ \t]*"
|
||||
BWS = OWS
|
||||
HTAB = r"\t"
|
||||
VCHAR = r"\x21-\x7E"
|
||||
tchar = r"[!#$%&'*+\-.^_`|~0-9A-Za-z]"
|
||||
token = rf"{tchar}+"
|
||||
obs_text = r"\x80-\xFF"
|
||||
qdtext = rf"[{HTAB}{SP}\x21\x23-\x5B\x5D-\x7E{obs_text}]"
|
||||
quoted_pair = rf"\\([{HTAB}{SP}{VCHAR}{obs_text}])"
|
||||
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
|
||||
|
||||
|
||||
class TokenType(spack.parser.TokenBase):
|
||||
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
|
||||
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
|
||||
TOKEN = rf"{tchar}+"
|
||||
EQUALS = rf"{BWS}={BWS}"
|
||||
COMMA = rf"{OWS},{OWS}"
|
||||
SPACE = r" +"
|
||||
EOF = r"$"
|
||||
ANY = r"."
|
||||
|
||||
|
||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
||||
|
||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
||||
|
||||
|
||||
class State(Enum):
|
||||
CHALLENGE = auto()
|
||||
AUTH_PARAM_LIST_START = auto()
|
||||
AUTH_PARAM = auto()
|
||||
NEXT_IN_LIST = auto()
|
||||
AUTH_PARAM_OR_SCHEME = auto()
|
||||
|
||||
|
||||
def tokenize(input: str):
|
||||
scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
|
||||
|
||||
for match in iter(scanner.match, None): # type: ignore[var-annotated]
|
||||
yield spack.parser.Token(
|
||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
||||
match.group(), # type: ignore[attr-defined]
|
||||
match.start(), # type: ignore[attr-defined]
|
||||
match.end(), # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
|
||||
class Challenge:
|
||||
__slots__ = ["scheme", "params"]
|
||||
|
||||
def __init__(
|
||||
self, scheme: Optional[str] = None, params: Optional[List[Tuple[str, str]]] = None
|
||||
) -> None:
|
||||
self.scheme = scheme or ""
|
||||
self.params = params or []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"Challenge({self.scheme}, {self.params})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return (
|
||||
isinstance(other, Challenge)
|
||||
and self.scheme == other.scheme
|
||||
and self.params == other.params
|
||||
)
|
||||
|
||||
|
||||
def parse_www_authenticate(input: str):
|
||||
"""Very basic parsing of www-authenticate parsing (RFC7235 section 4.1)
|
||||
Notice: this omits token68 support."""
|
||||
|
||||
# auth-scheme = token
|
||||
# auth-param = token BWS "=" BWS ( token / quoted-string )
|
||||
# challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ]
|
||||
# WWW-Authenticate = 1#challenge
|
||||
|
||||
challenges: List[Challenge] = []
|
||||
|
||||
_unquote = re.compile(quoted_pair).sub
|
||||
unquote = lambda s: _unquote(r"\1", s[1:-1])
|
||||
|
||||
mode: State = State.CHALLENGE
|
||||
tokens = tokenize(input)
|
||||
|
||||
current_challenge = Challenge()
|
||||
|
||||
def extract_auth_param(input: str) -> Tuple[str, str]:
|
||||
key, value = input.split("=", 1)
|
||||
key = key.rstrip()
|
||||
value = value.lstrip()
|
||||
if value.startswith('"'):
|
||||
value = unquote(value)
|
||||
return key, value
|
||||
|
||||
while True:
|
||||
token: spack.parser.Token = next(tokens)
|
||||
|
||||
if mode == State.CHALLENGE:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
current_challenge.scheme = token.value
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_LIST_START:
|
||||
if token.kind == TokenType.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
# Challenge without param list, followed by another challenge.
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge()
|
||||
mode = State.CHALLENGE
|
||||
elif token.kind == TokenType.SPACE:
|
||||
# A space means it must be followed by param list
|
||||
mode = State.AUTH_PARAM
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.NEXT_IN_LIST:
|
||||
if token.kind == TokenType.EOF:
|
||||
challenges.append(current_challenge)
|
||||
break
|
||||
elif token.kind == TokenType.COMMA:
|
||||
mode = State.AUTH_PARAM_OR_SCHEME
|
||||
else:
|
||||
raise ValueError(token)
|
||||
|
||||
elif mode == State.AUTH_PARAM_OR_SCHEME:
|
||||
if token.kind == TokenType.EOF:
|
||||
raise ValueError(token)
|
||||
elif token.kind == TokenType.TOKEN:
|
||||
challenges.append(current_challenge)
|
||||
current_challenge = Challenge(token.value)
|
||||
mode = State.AUTH_PARAM_LIST_START
|
||||
elif token.kind == TokenType.AUTH_PARAM:
|
||||
key, value = extract_auth_param(token.value)
|
||||
current_challenge.params.append((key, value))
|
||||
mode = State.NEXT_IN_LIST
|
||||
|
||||
return challenges
|
||||
|
||||
|
||||
class RealmServiceScope(NamedTuple):
|
||||
realm: str
|
||||
service: str
|
||||
scope: str
|
||||
|
||||
|
||||
class UsernamePassword(NamedTuple):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
def get_bearer_challenge(challenges: List[Challenge]) -> Optional[RealmServiceScope]:
|
||||
# Find a challenge that we can handle (currently only Bearer)
|
||||
challenge = next((c for c in challenges if c.scheme == "Bearer"), None)
|
||||
|
||||
if challenge is None:
|
||||
return None
|
||||
|
||||
# Get realm / service / scope from challenge
|
||||
realm = next((v for k, v in challenge.params if k == "realm"), None)
|
||||
service = next((v for k, v in challenge.params if k == "service"), None)
|
||||
scope = next((v for k, v in challenge.params if k == "scope"), None)
|
||||
|
||||
if realm is None or service is None or scope is None:
|
||||
return None
|
||||
|
||||
return RealmServiceScope(realm, service, scope)
|
||||
|
||||
|
||||
class OCIAuthHandler(urllib.request.BaseHandler):
|
||||
def __init__(self, credentials_provider: Callable[[str], Optional[UsernamePassword]]):
|
||||
"""
|
||||
Args:
|
||||
credentials_provider: A function that takes a domain and may return a UsernamePassword.
|
||||
"""
|
||||
self.credentials_provider = credentials_provider
|
||||
|
||||
# Cached bearer tokens for a given domain.
|
||||
self.cached_tokens: Dict[str, str] = {}
|
||||
|
||||
def obtain_bearer_token(self, registry: str, challenge: RealmServiceScope, timeout) -> str:
|
||||
# See https://docs.docker.com/registry/spec/auth/token/
|
||||
|
||||
query = urllib.parse.urlencode(
|
||||
{"service": challenge.service, "scope": challenge.scope, "client_id": "spack"}
|
||||
)
|
||||
|
||||
parsed = urllib.parse.urlparse(challenge.realm)._replace(
|
||||
query=query, fragment="", params=""
|
||||
)
|
||||
|
||||
# Don't send credentials over insecure transport.
|
||||
if parsed.scheme != "https":
|
||||
raise ValueError(
|
||||
f"Cannot login to {registry} over insecure {parsed.scheme} connection"
|
||||
)
|
||||
|
||||
request = Request(urllib.parse.urlunparse(parsed))
|
||||
|
||||
# I guess we shouldn't cache this, since we don't know
|
||||
# the context in which it's used (may depend on config)
|
||||
pair = self.credentials_provider(registry)
|
||||
|
||||
if pair is not None:
|
||||
encoded = base64.b64encode(f"{pair.username}:{pair.password}".encode("utf-8")).decode(
|
||||
"utf-8"
|
||||
)
|
||||
request.add_unredirected_header("Authorization", f"Basic {encoded}")
|
||||
|
||||
# Do a GET request.
|
||||
response = self.parent.open(request, timeout=timeout)
|
||||
|
||||
# Read the response and parse the JSON
|
||||
response_json = json.load(response)
|
||||
|
||||
# Get the token from the response
|
||||
token = response_json["token"]
|
||||
|
||||
# Remember the last obtained token for this registry
|
||||
# Note: we should probably take into account realm, service and scope
|
||||
# so we can store multiple tokens for the same registry.
|
||||
self.cached_tokens[registry] = token
|
||||
|
||||
return token
|
||||
|
||||
def https_request(self, req: Request):
|
||||
# Eagerly add the bearer token to the request if no
|
||||
# auth header is set yet, to avoid 401s in multiple
|
||||
# requests to the same registry.
|
||||
|
||||
# Use has_header, not .headers, since there are two
|
||||
# types of headers (redirected and unredirected)
|
||||
if req.has_header("Authorization"):
|
||||
return req
|
||||
|
||||
parsed = urllib.parse.urlparse(req.full_url)
|
||||
token = self.cached_tokens.get(parsed.netloc)
|
||||
|
||||
if not token:
|
||||
return req
|
||||
|
||||
req.add_unredirected_header("Authorization", f"Bearer {token}")
|
||||
return req
|
||||
|
||||
def http_error_401(self, req: Request, fp, code, msg, headers):
|
||||
# Login failed, avoid infinite recursion where we go back and
|
||||
# forth between auth server and registry
|
||||
if hasattr(req, "login_attempted"):
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp
|
||||
)
|
||||
|
||||
# On 401 Unauthorized, parse the WWW-Authenticate header
|
||||
# to determine what authentication is required
|
||||
if "WWW-Authenticate" not in headers:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
"Cannot login to registry, missing WWW-Authenticate header",
|
||||
headers,
|
||||
fp,
|
||||
)
|
||||
|
||||
header_value = headers["WWW-Authenticate"]
|
||||
|
||||
try:
|
||||
challenge = get_bearer_challenge(parse_www_authenticate(header_value))
|
||||
except ValueError as e:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}",
|
||||
headers,
|
||||
fp,
|
||||
) from e
|
||||
|
||||
# If there is no bearer challenge, we can't handle it
|
||||
if not challenge:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, unsupported authentication scheme: {header_value}",
|
||||
headers,
|
||||
fp,
|
||||
)
|
||||
|
||||
# Get the token from the auth handler
|
||||
try:
|
||||
token = self.obtain_bearer_token(
|
||||
registry=urllib.parse.urlparse(req.get_full_url()).netloc,
|
||||
challenge=challenge,
|
||||
timeout=req.timeout,
|
||||
)
|
||||
except ValueError as e:
|
||||
raise urllib.error.HTTPError(
|
||||
req.full_url,
|
||||
code,
|
||||
f"Cannot login to registry, failed to obtain bearer token: {e}",
|
||||
headers,
|
||||
fp,
|
||||
) from e
|
||||
|
||||
# Add the token to the request
|
||||
req.add_unredirected_header("Authorization", f"Bearer {token}")
|
||||
setattr(req, "login_attempted", True)
|
||||
|
||||
return self.parent.open(req, timeout=req.timeout)
|
||||
|
||||
|
||||
def credentials_from_mirrors(
|
||||
domain: str, *, mirrors: Optional[Iterable[spack.mirror.Mirror]] = None
|
||||
) -> Optional[UsernamePassword]:
|
||||
"""Filter out OCI registry credentials from a list of mirrors."""
|
||||
|
||||
mirrors = mirrors or spack.mirror.MirrorCollection().values()
|
||||
|
||||
for mirror in mirrors:
|
||||
# Prefer push credentials over fetch. Unlikely that those are different
|
||||
# but our config format allows it.
|
||||
for direction in ("push", "fetch"):
|
||||
pair = mirror.get_access_pair(direction)
|
||||
if pair is None:
|
||||
continue
|
||||
url = mirror.get_url(direction)
|
||||
if not url.startswith("oci://"):
|
||||
continue
|
||||
try:
|
||||
parsed = ImageReference.from_string(url[6:])
|
||||
except ValueError:
|
||||
continue
|
||||
if parsed.domain == domain:
|
||||
return UsernamePassword(*pair)
|
||||
return None
|
||||
|
||||
|
||||
def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
urllib.request.HTTPRedirectHandler(),
|
||||
urllib.request.HTTPErrorProcessor(),
|
||||
OCIAuthHandler(credentials_from_mirrors),
|
||||
]:
|
||||
opener.add_handler(handler)
|
||||
return opener
|
||||
|
||||
|
||||
def ensure_status(response: HTTPResponse, status: int):
|
||||
"""Raise an error if the response status is not the expected one."""
|
||||
if response.status == status:
|
||||
return
|
||||
|
||||
raise urllib.error.HTTPError(
|
||||
response.geturl(), response.status, response.reason, response.info(), None
|
||||
)
|
||||
|
||||
|
||||
def default_retry(f, retries: int = 3, sleep=None):
|
||||
sleep = sleep or time.sleep
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except urllib.error.HTTPError as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
continue
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
@@ -5,12 +5,10 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util import windows_registry as winreg
|
||||
from spack.version import Version
|
||||
|
||||
from ._operating_system import OperatingSystem
|
||||
@@ -33,6 +31,43 @@ class WindowsOs(OperatingSystem):
|
||||
10.
|
||||
"""
|
||||
|
||||
# Find MSVC directories using vswhere
|
||||
comp_search_paths = []
|
||||
vs_install_paths = []
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if root:
|
||||
try:
|
||||
extra_args = {"encoding": "mbcs", "errors": "strict"}
|
||||
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
|
||||
[
|
||||
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
],
|
||||
**extra_args,
|
||||
).strip()
|
||||
vs_install_paths = paths.split("\n")
|
||||
msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
|
||||
for p in msvc_paths:
|
||||
comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
comp_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(
|
||||
str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin"
|
||||
)
|
||||
)
|
||||
)
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
pass
|
||||
if comp_search_paths:
|
||||
compiler_search_paths = comp_search_paths
|
||||
|
||||
def __init__(self):
|
||||
plat_ver = windows_version()
|
||||
if plat_ver < Version("10"):
|
||||
@@ -41,71 +76,3 @@ def __init__(self):
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def vs_install_paths(self):
|
||||
vs_install_paths = []
|
||||
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
|
||||
if root:
|
||||
try:
|
||||
extra_args = {"encoding": "mbcs", "errors": "strict"}
|
||||
paths = subprocess.check_output( # type: ignore[call-overload] # novermin
|
||||
[
|
||||
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
|
||||
"-prerelease",
|
||||
"-requires",
|
||||
"Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
|
||||
"-property",
|
||||
"installationPath",
|
||||
"-products",
|
||||
"*",
|
||||
],
|
||||
**extra_args,
|
||||
).strip()
|
||||
vs_install_paths = paths.split("\n")
|
||||
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
|
||||
pass
|
||||
return vs_install_paths
|
||||
|
||||
@property
|
||||
def msvc_paths(self):
|
||||
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
# First Strategy: Find MSVC directories using vswhere
|
||||
_compiler_search_paths = []
|
||||
for p in self.msvc_paths:
|
||||
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
_compiler_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
)
|
||||
# Second strategy: Find MSVC via the registry
|
||||
msft = winreg.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft", winreg.HKEY.HKEY_LOCAL_MACHINE
|
||||
)
|
||||
vs_entries = msft.find_subkeys(r"VisualStudio_.*")
|
||||
vs_paths = []
|
||||
|
||||
def clean_vs_path(path):
|
||||
path = path.split(",")[0].lstrip("@")
|
||||
return str((pathlib.Path(path).parent / "..\\..").resolve())
|
||||
|
||||
for entry in vs_entries:
|
||||
try:
|
||||
val = entry.get_subkey("Capabilities").get_value("ApplicationDescription").value
|
||||
vs_paths.append(clean_vs_path(val))
|
||||
except FileNotFoundError as e:
|
||||
if hasattr(e, "winerror"):
|
||||
if e.winerror == 2:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise
|
||||
|
||||
_compiler_search_paths.extend(vs_paths)
|
||||
return _compiler_search_paths
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
UpstreamPackageError,
|
||||
)
|
||||
from spack.mixins import filter_compiler_wrappers
|
||||
from spack.multimethod import default_args, when
|
||||
from spack.multimethod import when
|
||||
from spack.package_base import (
|
||||
DependencyConflictError,
|
||||
build_system_flags,
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
Here is the EBNF grammar for a spec::
|
||||
|
||||
spec = [name] [node_options] { ^[edge_properties] node } |
|
||||
spec = [name] [node_options] { ^ node } |
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
@@ -14,8 +14,7 @@
|
||||
[name] [node_options] hash |
|
||||
filename
|
||||
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
edge_properties = [ { bool_variant | key_value } ]
|
||||
node_options = [@(version_list|version_pair)] [%compiler] { variant }
|
||||
|
||||
hash = / id
|
||||
filename = (.|/|[a-zA-Z0-9-_]*/)([a-zA-Z0-9-_./]*)(.json|.yaml)
|
||||
@@ -65,9 +64,9 @@
|
||||
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.deptypes
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
@@ -98,9 +97,9 @@
|
||||
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||
QUOTED_VALUE = r"[\"']+(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\\s]+)[\"']+"
|
||||
|
||||
VERSION = r"=?(?:[a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"(?:(?:{VERSION})?:(?:{VERSION}(?!\s*=))?)"
|
||||
VERSION_LIST = rf"(?:{VERSION_RANGE}|{VERSION})(?:\s*,\s*(?:{VERSION_RANGE}|{VERSION}))*"
|
||||
VERSION = r"=?([a-zA-Z0-9_][a-zA-Z_0-9\-\.]*\b)"
|
||||
VERSION_RANGE = rf"({VERSION}\s*:\s*{VERSION}(?!\s*=)|:\s*{VERSION}(?!\s*=)|{VERSION}\s*:|:)"
|
||||
VERSION_LIST = rf"({VERSION_RANGE}|{VERSION})(\s*[,]\s*({VERSION_RANGE}|{VERSION}))*"
|
||||
|
||||
|
||||
class TokenBase(enum.Enum):
|
||||
@@ -128,8 +127,6 @@ class TokenType(TokenBase):
|
||||
"""
|
||||
|
||||
# Dependency
|
||||
START_EDGE_PROPERTIES = r"(?:\^\[)"
|
||||
END_EDGE_PROPERTIES = r"(?:\])"
|
||||
DEPENDENCY = r"(?:\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
|
||||
@@ -167,7 +164,7 @@ class Token:
|
||||
__slots__ = "kind", "value", "start", "end"
|
||||
|
||||
def __init__(
|
||||
self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
self, kind: TokenType, value: str, start: Optional[int] = None, end: Optional[int] = None
|
||||
):
|
||||
self.kind = kind
|
||||
self.value = value
|
||||
@@ -267,8 +264,8 @@ def tokens(self) -> List[Token]:
|
||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
||||
|
||||
def next_spec(
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
self, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> Optional[spack.spec.Spec]:
|
||||
"""Return the next spec parsed from text.
|
||||
|
||||
Args:
|
||||
@@ -284,15 +281,16 @@ def next_spec(
|
||||
initial_spec = initial_spec or spack.spec.Spec()
|
||||
root_spec = SpecNodeParser(self.ctx).parse(initial_spec)
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
|
||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||
edge_properties.setdefault("depflag", 0)
|
||||
edge_properties.setdefault("virtuals", ())
|
||||
dependency = self._parse_node(root_spec)
|
||||
root_spec._add_dependency(dependency, **edge_properties)
|
||||
if self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = SpecNodeParser(self.ctx).parse()
|
||||
|
||||
if dependency is None:
|
||||
msg = (
|
||||
"this dependency sigil needs to be followed by a package name "
|
||||
"or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
elif self.ctx.accept(TokenType.DEPENDENCY):
|
||||
dependency = self._parse_node(root_spec)
|
||||
root_spec._add_dependency(dependency, depflag=0, virtuals=())
|
||||
|
||||
else:
|
||||
@@ -300,19 +298,7 @@ def next_spec(
|
||||
|
||||
return root_spec
|
||||
|
||||
def _parse_node(self, root_spec):
|
||||
dependency = SpecNodeParser(self.ctx).parse()
|
||||
if dependency is None:
|
||||
msg = (
|
||||
"the dependency sigil and any optional edge attributes must be followed by a "
|
||||
"package name or a node attribute (version, variant, etc.)"
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
return dependency
|
||||
|
||||
def all_specs(self) -> List["spack.spec.Spec"]:
|
||||
def all_specs(self) -> List[spack.spec.Spec]:
|
||||
"""Return all the specs that remain to be parsed"""
|
||||
return list(iter(self.next_spec, None))
|
||||
|
||||
@@ -327,9 +313,7 @@ def __init__(self, ctx):
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
|
||||
def parse(
|
||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> Optional["spack.spec.Spec"]:
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
|
||||
Args:
|
||||
@@ -430,7 +414,7 @@ class FileParser:
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
|
||||
def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
def parse(self, initial_spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Parse a spec tree from a specfile.
|
||||
|
||||
Args:
|
||||
@@ -453,42 +437,7 @@ def parse(self, initial_spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
return initial_spec
|
||||
|
||||
|
||||
class EdgeAttributeParser:
|
||||
__slots__ = "ctx", "literal_str"
|
||||
|
||||
def __init__(self, ctx, literal_str):
|
||||
self.ctx = ctx
|
||||
self.literal_str = literal_str
|
||||
|
||||
def parse(self):
|
||||
attributes = {}
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ").split(",")
|
||||
attributes[name] = value
|
||||
if name not in ("deptypes", "virtuals"):
|
||||
msg = (
|
||||
"the only edge attributes that are currently accepted "
|
||||
'are "deptypes" and "virtuals"'
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||
elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
|
||||
break
|
||||
else:
|
||||
msg = "unexpected token in edge attributes"
|
||||
raise SpecParsingError(msg, self.ctx.next_token, self.literal_str)
|
||||
|
||||
# Turn deptypes=... to depflag representation
|
||||
if "deptypes" in attributes:
|
||||
deptype_string = attributes.pop("deptypes")
|
||||
attributes["depflag"] = spack.deptypes.canonicalize(deptype_string)
|
||||
return attributes
|
||||
|
||||
|
||||
def parse(text: str) -> List["spack.spec.Spec"]:
|
||||
def parse(text: str) -> List[spack.spec.Spec]:
|
||||
"""Parse text into a list of strings
|
||||
|
||||
Args:
|
||||
@@ -501,8 +450,8 @@ def parse(text: str) -> List["spack.spec.Spec"]:
|
||||
|
||||
|
||||
def parse_one_or_raise(
|
||||
text: str, initial_spec: Optional["spack.spec.Spec"] = None
|
||||
) -> "spack.spec.Spec":
|
||||
text: str, initial_spec: Optional[spack.spec.Spec] = None
|
||||
) -> spack.spec.Spec:
|
||||
"""Parse exactly one spec from text and return it, or raise
|
||||
|
||||
Args:
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to manage providers of virtual dependencies"""
|
||||
import itertools
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
import spack.error
|
||||
@@ -10,6 +11,33 @@
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
|
||||
def _cross_provider_maps(lmap, rmap):
|
||||
"""Return a dictionary that combines constraint requests from both input.
|
||||
|
||||
Args:
|
||||
lmap: main provider map
|
||||
rmap: provider map with additional constraints
|
||||
"""
|
||||
# TODO: this is pretty darned nasty, and inefficient, but there
|
||||
# TODO: are not that many vdeps in most specs.
|
||||
result = {}
|
||||
for lspec, rspec in itertools.product(lmap, rmap):
|
||||
try:
|
||||
constrained = lspec.constrained(rspec)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
continue
|
||||
|
||||
# lp and rp are left and right provider specs.
|
||||
for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
|
||||
if lp_spec.name == rp_spec.name:
|
||||
try:
|
||||
const = lp_spec.constrained(rp_spec, deps=False)
|
||||
result.setdefault(constrained, set()).add(const)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
continue
|
||||
return result
|
||||
|
||||
|
||||
class _IndexBase:
|
||||
#: This is a dict of dicts used for finding providers of particular
|
||||
#: virtual dependencies. The dict of dicts looks like:
|
||||
@@ -53,6 +81,29 @@ def providers_for(self, virtual_spec):
|
||||
def __contains__(self, name):
|
||||
return name in self.providers
|
||||
|
||||
def satisfies(self, other):
|
||||
"""Determine if the providers of virtual specs are compatible.
|
||||
|
||||
Args:
|
||||
other: another provider index
|
||||
|
||||
Returns:
|
||||
True if the providers are compatible, False otherwise.
|
||||
"""
|
||||
common = set(self.providers) & set(other.providers)
|
||||
if not common:
|
||||
return True
|
||||
|
||||
# This ensures that some provider in other COULD satisfy the
|
||||
# vpkg constraints on self.
|
||||
result = {}
|
||||
for name in common:
|
||||
crossed = _cross_provider_maps(self.providers[name], other.providers[name])
|
||||
if crossed:
|
||||
result[name] = crossed
|
||||
|
||||
return all(c in result for c in common)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.providers == other.providers
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import abc
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import difflib
|
||||
import errno
|
||||
import functools
|
||||
import importlib
|
||||
@@ -1517,18 +1516,7 @@ def __init__(self, name, repo=None):
|
||||
long_msg = "Did you mean to specify a filename with './{0}'?"
|
||||
long_msg = long_msg.format(name)
|
||||
else:
|
||||
long_msg = "Use 'spack create' to create a new package."
|
||||
|
||||
if not repo:
|
||||
repo = spack.repo.PATH
|
||||
|
||||
# We need to compare the base package name
|
||||
pkg_name = name.rsplit(".", 1)[-1]
|
||||
similar = difflib.get_close_matches(pkg_name, repo.all_package_names())
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg += "\n\nDid you mean one of the following packages?\n "
|
||||
long_msg += "\n ".join(similar)
|
||||
long_msg = "You may need to run 'spack clean -m'."
|
||||
|
||||
super().__init__(msg, long_msg)
|
||||
self.name = name
|
||||
|
||||
@@ -62,25 +62,3 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
|
||||
|
||||
Validator = llnl.util.lang.Singleton(_make_validator)
|
||||
|
||||
spec_list_schema = {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"matrix": {
|
||||
"type": "array",
|
||||
"items": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"exclude": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
{"type": "null"},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
@@ -92,7 +92,6 @@
|
||||
"url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
|
||||
"additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["terminal_title"],
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for definitions
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
import spack.schema
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
"definitions": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {"when": {"type": "string"}},
|
||||
"patternProperties": {r"^(?!when$)\w*": spack.schema.spec_list_schema},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Spack definitions configuration file schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
@@ -12,11 +12,34 @@
|
||||
|
||||
import spack.schema.gitlab_ci # DEPRECATED
|
||||
import spack.schema.merged
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
spec_list_schema = {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"matrix": {
|
||||
"type": "array",
|
||||
"items": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"exclude": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
{"type": "null"},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
projections_scheme = spack.schema.projections.properties["projections"]
|
||||
|
||||
schema = {
|
||||
@@ -52,7 +75,16 @@
|
||||
}
|
||||
},
|
||||
},
|
||||
"specs": spack.schema.spec_list_schema,
|
||||
"definitions": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {"when": {"type": "string"}},
|
||||
"patternProperties": {r"^(?!when$)\w*": spec_list_schema},
|
||||
},
|
||||
},
|
||||
"specs": spec_list_schema,
|
||||
"view": {
|
||||
"anyOf": [
|
||||
{"type": "boolean"},
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.container
|
||||
import spack.schema.definitions
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -33,7 +32,6 @@
|
||||
spack.schema.config.properties,
|
||||
spack.schema.container.properties,
|
||||
spack.schema.ci.properties,
|
||||
spack.schema.definitions.properties,
|
||||
spack.schema.mirrors.properties,
|
||||
spack.schema.modules.properties,
|
||||
spack.schema.packages.properties,
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
||||
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
||||
spec_regex = (
|
||||
r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|"
|
||||
r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|"
|
||||
r"whitelist|blacklist|" # DEPRECATED: remove in 0.20.
|
||||
r"include|exclude|" # use these more inclusive/consistent options
|
||||
r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
|
||||
@@ -89,7 +89,6 @@
|
||||
"exclude": array_of_strings,
|
||||
"exclude_implicits": {"type": "boolean", "default": False},
|
||||
"defaults": array_of_strings,
|
||||
"hide_implicits": {"type": "boolean", "default": False},
|
||||
"naming_scheme": {"type": "string"}, # Can we be more specific here?
|
||||
"projections": projections_scheme,
|
||||
"all": module_file_configuration,
|
||||
@@ -188,52 +187,3 @@
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
# deprecated keys and their replacements
|
||||
old_to_new_key = {"exclude_implicits": "hide_implicits"}
|
||||
|
||||
|
||||
def update_keys(data, key_translations):
|
||||
"""Change blacklist/whitelist to exclude/include.
|
||||
|
||||
Arguments:
|
||||
data (dict): data from a valid modules configuration.
|
||||
key_translations (dict): A dictionary of keys to translate to
|
||||
their respective values.
|
||||
|
||||
Return:
|
||||
(bool) whether anything was changed in data
|
||||
"""
|
||||
changed = False
|
||||
|
||||
if isinstance(data, dict):
|
||||
keys = list(data.keys())
|
||||
for key in keys:
|
||||
value = data[key]
|
||||
|
||||
translation = key_translations.get(key)
|
||||
if translation:
|
||||
data[translation] = data.pop(key)
|
||||
changed = True
|
||||
|
||||
changed |= update_keys(value, key_translations)
|
||||
|
||||
elif isinstance(data, list):
|
||||
for elt in data:
|
||||
changed |= update_keys(elt, key_translations)
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def update(data):
|
||||
"""Update the data in place to remove deprecated properties.
|
||||
|
||||
Args:
|
||||
data (dict): dictionary to be updated
|
||||
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
# translate blacklist/whitelist to exclude/include
|
||||
return update_keys(data, old_to_new_key)
|
||||
|
||||
@@ -8,66 +8,6 @@
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
permissions = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"read": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"write": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"group": {"type": "string"},
|
||||
},
|
||||
}
|
||||
|
||||
variants = {"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]}
|
||||
|
||||
requirements = {
|
||||
"oneOf": [
|
||||
# 'require' can be a list of requirement_groups.
|
||||
# each requirement group is a list of one or more
|
||||
# specs. Either at least one or exactly one spec
|
||||
# in the group must be satisfied (depending on
|
||||
# whether you use "any_of" or "one_of",
|
||||
# repectively)
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"one_of": {"type": "array", "items": {"type": "string"}},
|
||||
"any_of": {"type": "array", "items": {"type": "string"}},
|
||||
"spec": {"type": "string"},
|
||||
"message": {"type": "string"},
|
||||
"when": {"type": "string"},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
},
|
||||
# Shorthand for a single requirement group with
|
||||
# one member
|
||||
{"type": "string"},
|
||||
]
|
||||
}
|
||||
|
||||
permissions = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"read": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"write": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"group": {"type": "string"},
|
||||
},
|
||||
}
|
||||
|
||||
package_attributes = {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {r"\w+": {}},
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
@@ -75,14 +15,57 @@
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"all": { # package name
|
||||
"patternProperties": {
|
||||
r"\w[\w-]*": { # package name
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"version": {}, # Here only to warn users on ignored properties
|
||||
"require": {
|
||||
"oneOf": [
|
||||
# 'require' can be a list of requirement_groups.
|
||||
# each requirement group is a list of one or more
|
||||
# specs. Either at least one or exactly one spec
|
||||
# in the group must be satisfied (depending on
|
||||
# whether you use "any_of" or "one_of",
|
||||
# repectively)
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"one_of": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"any_of": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"spec": {"type": "string"},
|
||||
"message": {"type": "string"},
|
||||
"when": {"type": "string"},
|
||||
},
|
||||
},
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
},
|
||||
# Shorthand for a single requirement group with
|
||||
# one member
|
||||
{"type": "string"},
|
||||
]
|
||||
},
|
||||
"version": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# version strings (type should be string, number is still possible
|
||||
# but deprecated. this is to avoid issues with e.g. 3.10 -> 3.1)
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
},
|
||||
"target": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
@@ -95,10 +78,22 @@
|
||||
"items": {"type": "string"},
|
||||
}, # compiler specs
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
"permissions": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"read": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"write": {"type": "string", "enum": ["user", "group", "world"]},
|
||||
"group": {"type": "string"},
|
||||
},
|
||||
},
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"package_attributes": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"patternProperties": {r"\w+": {}},
|
||||
},
|
||||
"providers": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
@@ -111,40 +106,12 @@
|
||||
}
|
||||
},
|
||||
},
|
||||
"variants": variants,
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
r"(?!^all$)(^\w[\w-]*)": { # package name
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"require": requirements,
|
||||
"version": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# version strings
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
"variants": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
]
|
||||
},
|
||||
"target": {}, # Here only to warn users on ignored properties
|
||||
"compiler": {}, # Here only to warn users on ignored properties
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"providers": {}, # Here only to warn users on ignored properties
|
||||
"variants": variants,
|
||||
"externals": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@@ -160,14 +127,6 @@
|
||||
},
|
||||
},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting compiler, target or provider preferences in a package "
|
||||
"specific section of packages.yaml is deprecated, and will be removed in "
|
||||
"v0.22.\n\n\tThese preferences will be ignored by Spack. You "
|
||||
"can set them only in the 'all' section of the same file.\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import Callable, Dict, List, NamedTuple, Optional, Sequence, Set, Tuple, Union
|
||||
from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -338,13 +338,6 @@ def __getattr__(self, name):
|
||||
|
||||
fn = AspFunctionBuilder()
|
||||
|
||||
TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]
|
||||
|
||||
|
||||
def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunction]:
|
||||
"""Transformation that removes all "node" and "virtual_node" from the input list of facts."""
|
||||
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
|
||||
|
||||
|
||||
def _create_counter(specs, tests):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
@@ -692,7 +685,7 @@ def extract_args(model, predicate_name):
|
||||
class ErrorHandler:
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
self.full_model = None
|
||||
self.error_args = extract_args(model, "error")
|
||||
|
||||
def multiple_values_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
@@ -700,48 +693,6 @@ def multiple_values_error(self, attribute, pkg):
|
||||
def no_value_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
|
||||
def _get_cause_tree(
|
||||
self,
|
||||
cause: Tuple[str, str],
|
||||
conditions: Dict[str, str],
|
||||
condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]],
|
||||
seen: Set,
|
||||
indent: str = " ",
|
||||
) -> List[str]:
|
||||
"""
|
||||
Implementation of recursion for self.get_cause_tree. Much of this operates on tuples
|
||||
(condition_id, set_id) in which the latter idea means that the condition represented by
|
||||
the former held in the condition set represented by the latter.
|
||||
"""
|
||||
seen = set(seen) | set(cause)
|
||||
parents = [c for e, c in condition_causes if e == cause and c not in seen]
|
||||
local = "required because %s " % conditions[cause[0]]
|
||||
|
||||
return [indent + local] + [
|
||||
c
|
||||
for parent in parents
|
||||
for c in self._get_cause_tree(
|
||||
parent, conditions, condition_causes, seen, indent=indent + " "
|
||||
)
|
||||
]
|
||||
|
||||
def get_cause_tree(self, cause: Tuple[str, str]) -> List[str]:
|
||||
"""
|
||||
Get the cause tree associated with the given cause.
|
||||
|
||||
Arguments:
|
||||
cause: The root cause of the tree (final condition)
|
||||
|
||||
Returns:
|
||||
A list of strings describing the causes, formatted to display tree structure.
|
||||
"""
|
||||
conditions: Dict[str, str] = dict(extract_args(self.full_model, "condition_reason"))
|
||||
condition_causes: List[Tuple[Tuple[str, str], Tuple[str, str]]] = list(
|
||||
((Effect, EID), (Cause, CID))
|
||||
for Effect, EID, Cause, CID in extract_args(self.full_model, "condition_cause")
|
||||
)
|
||||
return self._get_cause_tree(cause, conditions, condition_causes, set())
|
||||
|
||||
def handle_error(self, msg, *args):
|
||||
"""Handle an error state derived by the solver."""
|
||||
if msg == "multiple_values_error":
|
||||
@@ -750,31 +701,14 @@ def handle_error(self, msg, *args):
|
||||
if msg == "no_value_error":
|
||||
return self.no_value_error(*args)
|
||||
|
||||
try:
|
||||
idx = args.index("startcauses")
|
||||
except ValueError:
|
||||
msg_args = args
|
||||
causes = []
|
||||
else:
|
||||
msg_args = args[:idx]
|
||||
cause_args = args[idx + 1 :]
|
||||
cause_args_conditions = cause_args[::2]
|
||||
cause_args_ids = cause_args[1::2]
|
||||
causes = list(zip(cause_args_conditions, cause_args_ids))
|
||||
|
||||
msg = msg.format(*msg_args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
msg = msg.format(*args)
|
||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
|
||||
for cause in set(causes):
|
||||
for c in self.get_cause_tree(cause):
|
||||
msg += f"\n{c}"
|
||||
|
||||
return msg
|
||||
|
||||
def message(self, errors) -> str:
|
||||
@@ -786,31 +720,11 @@ def message(self, errors) -> str:
|
||||
return "\n".join([header] + messages)
|
||||
|
||||
def raise_if_errors(self):
|
||||
initial_error_args = extract_args(self.model, "error")
|
||||
if not initial_error_args:
|
||||
if not self.error_args:
|
||||
return
|
||||
|
||||
error_causation = clingo.Control()
|
||||
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
errors_lp = parent_dir / "error_messages.lp"
|
||||
|
||||
def on_model(model):
|
||||
self.full_model = model.symbols(shown=True, terms=True)
|
||||
|
||||
with error_causation.backend() as backend:
|
||||
for atom in self.model:
|
||||
atom_id = backend.add_atom(atom)
|
||||
backend.add_rule([atom_id], [], choice=False)
|
||||
|
||||
error_causation.load(str(errors_lp))
|
||||
error_causation.ground([("base", []), ("error_messages", [])])
|
||||
_ = error_causation.solve(on_model=on_model)
|
||||
|
||||
# No choices so there will be only one model
|
||||
error_args = extract_args(self.full_model, "error")
|
||||
errors = sorted(
|
||||
[(int(priority), msg, args) for priority, msg, *args in error_args], reverse=True
|
||||
[(int(priority), msg, args) for priority, msg, *args in self.error_args], reverse=True
|
||||
)
|
||||
msg = self.message(errors)
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
@@ -948,12 +862,6 @@ def visit(node):
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
|
||||
for spec in specs:
|
||||
if self._compiler_flags_has_propagation(spec.compiler_flags):
|
||||
self.control.load(os.path.join(parent_dir, "propagation.lp"))
|
||||
break
|
||||
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -981,6 +889,14 @@ def on_model(model):
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
if solve_result.satisfiable and self._model_has_cycles(models):
|
||||
tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]")
|
||||
self.control.load(os.path.join(parent_dir, "cycle_detection.lp"))
|
||||
self.control.ground([("no_cycle", [])])
|
||||
models.clear()
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
@@ -1017,7 +933,7 @@ def on_model(model):
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)"
|
||||
% (sym.name, ", ".join([str(s) for s in intermediate_repr(sym.arguments)]))
|
||||
% (sym.name, ", ".join(intermediate_repr(sym.arguments)))
|
||||
)
|
||||
|
||||
elif cores:
|
||||
@@ -1034,11 +950,25 @@ def on_model(model):
|
||||
|
||||
return result, timer, self.control.statistics
|
||||
|
||||
def _compiler_flags_has_propagation(self, flags):
|
||||
for _, flag_vals in flags.items():
|
||||
if any(val.propagate for val in flag_vals):
|
||||
return True
|
||||
return False
|
||||
def _model_has_cycles(self, models):
|
||||
"""Returns true if the best model has cycles in it"""
|
||||
cycle_detection = clingo.Control()
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
lp_file = parent_dir / "cycle_detection.lp"
|
||||
|
||||
min_cost, best_model = min(models)
|
||||
with cycle_detection.backend() as backend:
|
||||
for atom in best_model:
|
||||
if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"':
|
||||
symbol = fn.depends_on(atom.arguments[1], atom.arguments[2])
|
||||
atom_id = backend.add_atom(symbol.symbol())
|
||||
backend.add_rule([atom_id], [], choice=False)
|
||||
|
||||
cycle_detection.load(str(lp_file))
|
||||
cycle_detection.ground([("base", []), ("no_cycle", [])])
|
||||
cycle_result = cycle_detection.solve()
|
||||
|
||||
return cycle_result.unsatisfiable
|
||||
|
||||
|
||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||
@@ -1215,7 +1145,7 @@ def conflict_rules(self, pkg):
|
||||
default_msg = "{0}: '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0}: conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_msg = f"conflict is triggered when {str(trigger)}"
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_spec = spack.spec.Spec(trigger)
|
||||
trigger_id = self.condition(
|
||||
trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg
|
||||
@@ -1227,11 +1157,7 @@ def conflict_rules(self, pkg):
|
||||
conflict_msg = no_constraint_msg.format(pkg.name, trigger)
|
||||
else:
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
|
||||
spec_for_msg = (
|
||||
spack.spec.Spec(pkg.name) if constraint == spack.spec.Spec() else constraint
|
||||
)
|
||||
constraint_msg = f"conflict applies to spec {str(spec_for_msg)}"
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg))
|
||||
@@ -1270,9 +1196,32 @@ def compiler_facts(self):
|
||||
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
|
||||
|
||||
for weight, (compiler_id, cspec) in enumerate(matches):
|
||||
f = fn.compiler_weight(compiler_id, weight)
|
||||
f = fn.default_compiler_preference(compiler_id, weight)
|
||||
self.gen.fact(f)
|
||||
|
||||
def package_compiler_defaults(self, pkg):
|
||||
"""Facts about packages' compiler prefs."""
|
||||
|
||||
packages = spack.config.get("packages")
|
||||
pkg_prefs = packages.get(pkg.name)
|
||||
if not pkg_prefs or "compiler" not in pkg_prefs:
|
||||
return
|
||||
|
||||
compiler_list = self.possible_compilers
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False)
|
||||
matches = sorted(compiler_list, key=lambda x: ppk(x.spec))
|
||||
|
||||
for i, compiler in enumerate(reversed(matches)):
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.node_compiler_preference(
|
||||
compiler.spec.name, compiler.spec.version, -i * 100
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
def package_requirement_rules(self, pkg):
|
||||
rules = self.requirement_rules_from_package_py(pkg)
|
||||
rules.extend(self.requirement_rules_from_packages_yaml(pkg))
|
||||
@@ -1364,6 +1313,9 @@ def pkg_rules(self, pkg, tests):
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
@@ -1387,7 +1339,7 @@ def trigger_rules(self):
|
||||
self.gen.h2("Trigger conditions")
|
||||
for name in self._trigger_cache:
|
||||
cache = self._trigger_cache[name]
|
||||
for (spec_str, _), (trigger_id, requirements) in cache.items():
|
||||
for spec_str, (trigger_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
@@ -1400,7 +1352,7 @@ def effect_rules(self):
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
cache = self._effect_cache[name]
|
||||
for (spec_str, _), (effect_id, requirements) in cache.items():
|
||||
for spec_str, (effect_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
@@ -1499,26 +1451,18 @@ def variant_rules(self, pkg):
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def condition(
|
||||
self,
|
||||
required_spec: spack.spec.Spec,
|
||||
imposed_spec: Optional[spack.spec.Spec] = None,
|
||||
name: Optional[str] = None,
|
||||
msg: Optional[str] = None,
|
||||
transform_required: Optional[TransformFunction] = None,
|
||||
transform_imposed: Optional[TransformFunction] = remove_node,
|
||||
):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
required_spec: the constraints that triggers this condition
|
||||
imposed_spec: the constraints that are imposed when this condition is triggered
|
||||
name: name for `required_spec` (required if required_spec is anonymous, ignored if not)
|
||||
msg: description of the condition
|
||||
transform_required: transformation applied to facts from the required spec. Defaults
|
||||
to leave facts as they are.
|
||||
transform_imposed: transformation applied to facts from the imposed spec. Defaults
|
||||
to removing "node" and "virtual_node" facts.
|
||||
required_spec (spack.spec.Spec): the spec that triggers this condition
|
||||
imposed_spec (spack.spec.Spec or None): the spec with constraints that
|
||||
are imposed when this condition is triggered
|
||||
name (str or None): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
msg (str or None): description of the condition
|
||||
node (bool): if False does not emit "node" or "virtual_node" requirements
|
||||
from the imposed spec
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -1536,14 +1480,10 @@ def condition(
|
||||
|
||||
cache = self._trigger_cache[named_cond.name]
|
||||
|
||||
named_cond_key = (str(named_cond), transform_required)
|
||||
named_cond_key = str(named_cond)
|
||||
if named_cond_key not in cache:
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
|
||||
if transform_required:
|
||||
requirements = transform_required(named_cond, requirements)
|
||||
|
||||
cache[named_cond_key] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
@@ -1552,14 +1492,14 @@ def condition(
|
||||
return condition_id
|
||||
|
||||
cache = self._effect_cache[named_cond.name]
|
||||
imposed_spec_key = (str(imposed_spec), transform_imposed)
|
||||
imposed_spec_key = str(imposed_spec)
|
||||
if imposed_spec_key not in cache:
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
|
||||
|
||||
if transform_imposed:
|
||||
requirements = transform_imposed(imposed_spec, requirements)
|
||||
|
||||
if not node:
|
||||
requirements = list(
|
||||
filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
|
||||
)
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
@@ -1590,17 +1530,6 @@ def package_provider_rules(self, pkg):
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
for when, sets_of_virtuals in pkg.provided_together.items():
|
||||
condition_id = self.condition(
|
||||
when, name=pkg.name, msg="Virtuals are provided together"
|
||||
)
|
||||
for set_id, virtuals_together in enumerate(sets_of_virtuals):
|
||||
for name in virtuals_together:
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
"""Translate 'depends_on' directives into ASP logic."""
|
||||
for _, conditions in sorted(pkg.dependencies.items()):
|
||||
@@ -1619,33 +1548,22 @@ def package_dependencies_rules(self, pkg):
|
||||
if not depflag:
|
||||
continue
|
||||
|
||||
msg = f"{pkg.name} depends on {dep.spec}"
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += f" when {cond}"
|
||||
msg += " when %s" % cond
|
||||
else:
|
||||
pass
|
||||
|
||||
def track_dependencies(input_spec, requirements):
|
||||
return requirements + [fn.attr("track_dependencies", input_spec.name)]
|
||||
|
||||
def dependency_holds(input_spec, requirements):
|
||||
return remove_node(input_spec, requirements) + [
|
||||
fn.attr(
|
||||
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
|
||||
)
|
||||
for t in dt.ALL_FLAGS
|
||||
if t & depflag
|
||||
]
|
||||
|
||||
self.condition(
|
||||
cond,
|
||||
dep.spec,
|
||||
name=pkg.name,
|
||||
msg=msg,
|
||||
transform_required=track_dependencies,
|
||||
transform_imposed=dependency_holds,
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
|
||||
)
|
||||
|
||||
for t in dt.ALL_FLAGS:
|
||||
if t & depflag:
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_preferences(self, pkg_name, func):
|
||||
@@ -1659,7 +1577,6 @@ def virtual_preferences(self, pkg_name, func):
|
||||
for i, provider in enumerate(providers):
|
||||
provider_name = spack.spec.Spec(provider).name
|
||||
func(vspec, provider_name, i)
|
||||
self.gen.newline()
|
||||
|
||||
def provider_defaults(self):
|
||||
self.gen.h2("Default virtual providers")
|
||||
@@ -1740,17 +1657,8 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
when_spec = spack.spec.Spec(pkg_name)
|
||||
|
||||
try:
|
||||
# With virtual we want to emit "node" and "virtual_node" in imposed specs
|
||||
transform: Optional[TransformFunction] = remove_node
|
||||
if virtual:
|
||||
transform = None
|
||||
|
||||
member_id = self.condition(
|
||||
required_spec=when_spec,
|
||||
imposed_spec=spec,
|
||||
name=pkg_name,
|
||||
transform_imposed=transform,
|
||||
msg=f"{spec_str} is a requirement for package {pkg_name}",
|
||||
required_spec=when_spec, imposed_spec=spec, name=pkg_name, node=virtual
|
||||
)
|
||||
except Exception as e:
|
||||
# Do not raise if the rule comes from the 'all' subsection, since usability
|
||||
@@ -1813,16 +1721,8 @@ def external_packages(self):
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
|
||||
def external_imposition(input_spec, _):
|
||||
return [fn.attr("external_conditions_hold", input_spec.name, local_idx)]
|
||||
|
||||
self.condition(
|
||||
spec,
|
||||
spack.spec.Spec(spec.name),
|
||||
msg=msg,
|
||||
transform_imposed=external_imposition,
|
||||
)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx)))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1852,8 +1752,8 @@ def preferred_variants(self, pkg_name):
|
||||
fn.variant_default_value_from_packages_yaml(pkg_name, variant.name, value)
|
||||
)
|
||||
|
||||
def target_preferences(self):
|
||||
key_fn = spack.package_prefs.PackagePrefs("all", "target")
|
||||
def target_preferences(self, pkg_name):
|
||||
key_fn = spack.package_prefs.PackagePrefs(pkg_name, "target")
|
||||
|
||||
if not self.target_specs_cache:
|
||||
self.target_specs_cache = [
|
||||
@@ -1863,25 +1763,17 @@ def target_preferences(self):
|
||||
|
||||
package_targets = self.target_specs_cache[:]
|
||||
package_targets.sort(key=key_fn)
|
||||
|
||||
offset = 0
|
||||
best_default = self.default_targets[0][1]
|
||||
for i, preferred in enumerate(package_targets):
|
||||
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
|
||||
# types of flags that can be on specs
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
# flags from compilers.yaml
|
||||
compilers = all_compilers_in_config()
|
||||
for compiler in compilers:
|
||||
for name, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(
|
||||
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
|
||||
)
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(
|
||||
pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset)
|
||||
)
|
||||
)
|
||||
|
||||
def spec_clauses(self, *args, **kwargs):
|
||||
"""Wrap a call to `_spec_clauses()` into a try/except block that
|
||||
@@ -1933,8 +1825,8 @@ class Head:
|
||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||
node_flag = fn.attr("node_flag_set")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagation_candidate = fn.attr("node_flag_propagation_candidate")
|
||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagate = fn.attr("variant_propagate")
|
||||
|
||||
class Body:
|
||||
node = fn.attr("node")
|
||||
@@ -1947,8 +1839,8 @@ class Body:
|
||||
node_compiler_version = fn.attr("node_compiler_version")
|
||||
node_flag = fn.attr("node_flag")
|
||||
node_flag_source = fn.attr("node_flag_source")
|
||||
node_flag_propagation_candidate = fn.attr("node_flag_propagation_candidate")
|
||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||
variant_propagate = fn.attr("variant_propagate")
|
||||
|
||||
f = Body if body else Head
|
||||
|
||||
@@ -1997,9 +1889,7 @@ class Body:
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
if variant.propagate:
|
||||
clauses.append(
|
||||
f.variant_propagation_candidate(spec.name, vname, value, spec.name)
|
||||
)
|
||||
clauses.append(f.variant_propagate(spec.name, vname, value, spec.name))
|
||||
|
||||
# Tell the concretizer that this is a possible value for the
|
||||
# variant, to account for things like int/str values where we
|
||||
@@ -2032,9 +1922,7 @@ class Body:
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(
|
||||
f.node_flag_propagation_candidate(spec.name, flag_type, flag, spec.name)
|
||||
)
|
||||
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
||||
|
||||
# dependencies
|
||||
if spec.concrete:
|
||||
@@ -2043,16 +1931,6 @@ class Body:
|
||||
clauses.append(fn.attr("package_hash", spec.name, spec._package_hash))
|
||||
clauses.append(fn.attr("hash", spec.name, spec.dag_hash()))
|
||||
|
||||
edges = spec.edges_from_dependents()
|
||||
virtuals = [x for x in itertools.chain.from_iterable([edge.virtuals for edge in edges])]
|
||||
if not body:
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("provider_set", spec.name, virtual))
|
||||
clauses.append(fn.attr("virtual_node", virtual))
|
||||
else:
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
||||
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
@@ -2339,8 +2217,6 @@ def target_defaults(self, specs):
|
||||
|
||||
self.default_targets = list(sorted(set(self.default_targets)))
|
||||
|
||||
self.target_preferences()
|
||||
|
||||
def virtual_providers(self):
|
||||
self.gen.h2("Virtual providers")
|
||||
msg = (
|
||||
@@ -2662,6 +2538,7 @@ def setup(
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2("Package preferences: %s" % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
|
||||
self.gen.h1("Develop specs")
|
||||
# Inject dev_path from environment
|
||||
@@ -2687,45 +2564,20 @@ def setup(
|
||||
self.define_target_constraints()
|
||||
|
||||
def literal_specs(self, specs):
|
||||
for spec in specs:
|
||||
for idx, spec in enumerate(specs):
|
||||
self.gen.h2("Spec: %s" % str(spec))
|
||||
condition_id = next(self._condition_id_counter)
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
self.gen.fact(fn.literal(idx))
|
||||
|
||||
# Special condition triggered by "literal_solved"
|
||||
self.gen.fact(fn.literal(trigger_id))
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, f"{spec} requested from CLI"))
|
||||
|
||||
# Effect imposes the spec
|
||||
imposed_spec_key = str(spec), None
|
||||
cache = self._effect_cache[spec.name]
|
||||
msg = (
|
||||
"literal specs have different requirements. clear cache before computing literals"
|
||||
)
|
||||
assert imposed_spec_key not in cache, msg
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(spec)
|
||||
root_name = spec.name
|
||||
for clause in requirements:
|
||||
clause_name = clause.args[0]
|
||||
if clause_name == "variant_set":
|
||||
requirements.append(
|
||||
fn.attr("variant_default_value_from_cli", *clause.args[1:])
|
||||
self.gen.fact(fn.literal(idx, "virtual_root" if spec.virtual else "root", spec.name))
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(fn.literal(idx, *clause.args))
|
||||
if clause.args[0] == "variant_set":
|
||||
self.gen.fact(
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
elif clause_name in ("node", "virtual_node", "hash"):
|
||||
# These facts are needed to compute the "condition_set" of the root
|
||||
pkg_name = clause.args[1]
|
||||
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
|
||||
|
||||
requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(trigger_id))
|
||||
|
||||
self.effect_rules()
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def validate_and_define_versions_from_requirements(
|
||||
self, *, allow_deprecated: bool, require_checksum: bool
|
||||
@@ -3301,11 +3153,10 @@ def __init__(self, provided, conflicts):
|
||||
msg = (
|
||||
"Spack concretizer internal error. Please submit a bug report and include the "
|
||||
"command, environment if applicable and the following error message."
|
||||
f"\n {provided} is unsatisfiable"
|
||||
f"\n {provided} is unsatisfiable, errors are:"
|
||||
)
|
||||
|
||||
if conflicts:
|
||||
msg += ", errors are:" + "".join([f"\n {conflict}" for conflict in conflicts])
|
||||
msg += "".join([f"\n {conflict}" for conflict in conflicts])
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -10,8 +10,9 @@
|
||||
% ID of the nodes in the "root" link-run sub-DAG
|
||||
#const min_dupe_id = 0.
|
||||
|
||||
#const direct_link_run = 0.
|
||||
#const direct_build = 1.
|
||||
#const link_run = 0.
|
||||
#const direct_link_run =1.
|
||||
#const direct_build = 2.
|
||||
|
||||
% Allow clingo to create nodes
|
||||
{ attr("node", node(0..X-1, Package)) } :- max_dupes(Package, X), not virtual(Package).
|
||||
@@ -29,21 +30,23 @@
|
||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("node_flag_source", PackageNode, _, _), not attr("node", PackageNode).
|
||||
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
||||
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
||||
:- attr("node_flag_source", ParentNode, _, _), not attr("node", ParentNode).
|
||||
:- attr("node_flag_source", _, _, ChildNode), not attr("node", ChildNode).
|
||||
:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode), internal_error("virtual node with no provider").
|
||||
:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode), internal_error("provider with no virtual node").
|
||||
:- provider(PackageNode, _), not attr("node", PackageNode), internal_error("provider with no real node").
|
||||
|
||||
:- attr("root", node(ID, PackageNode)), ID > min_dupe_id, internal_error("root with a non-minimal duplicate ID").
|
||||
:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode).
|
||||
:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode).
|
||||
:- provider(PackageNode, _), not attr("node", PackageNode).
|
||||
|
||||
:- attr("root", node(ID, PackageNode)), ID > min_dupe_id.
|
||||
|
||||
% Nodes in the "root" unification set cannot depend on non-root nodes if the dependency is "link" or "run"
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("link dependency out of the root unification set").
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("run dependency out of the root unification set").
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)).
|
||||
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)).
|
||||
|
||||
% Rules on "unification sets", i.e. on sets of nodes allowing a single configuration of any given package
|
||||
unify(SetID, PackageName) :- unification_set(SetID, node(_, PackageName)).
|
||||
@@ -83,24 +86,22 @@ unification_set(SetID, VirtualNode)
|
||||
%----
|
||||
|
||||
% In the "root" unification set only ID = 0 are allowed
|
||||
:- unification_set("root", node(ID, _)), ID != 0, internal_error("root unification set has node with non-zero unification set ID").
|
||||
:- unification_set("root", node(ID, _)), ID != 0.
|
||||
|
||||
% In the "root" unification set we allow only packages from the link-run possible subDAG
|
||||
:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package), internal_error("package outside possible link/run graph in root unification set").
|
||||
:- unification_set("root", node(_, Package)), not possible_in_link_run(Package), not virtual(Package).
|
||||
|
||||
% Each node must belong to at least one unification set
|
||||
:- attr("node", PackageNode), not unification_set(_, PackageNode), internal_error("node belongs to no unification set").
|
||||
:- attr("node", PackageNode), not unification_set(_, PackageNode).
|
||||
|
||||
% Cannot have a node with an ID, if lower ID of the same package are not used
|
||||
:- attr("node", node(ID1, Package)),
|
||||
not attr("node", node(ID2, Package)),
|
||||
max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1,
|
||||
internal_error("node skipped id number").
|
||||
max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1.
|
||||
|
||||
:- attr("virtual_node", node(ID1, Package)),
|
||||
not attr("virtual_node", node(ID2, Package)),
|
||||
max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1,
|
||||
internal_error("virtual node skipped id number").
|
||||
max_dupes(Package, X), ID1=0..X-1, ID2=0..X-1, ID2 < ID1.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Map literal input specs to facts that drive the solve
|
||||
@@ -112,30 +113,26 @@ unification_set(SetID, VirtualNode)
|
||||
multiple_nodes_attribute("node_flag_source").
|
||||
multiple_nodes_attribute("depends_on").
|
||||
multiple_nodes_attribute("virtual_on_edge").
|
||||
multiple_nodes_attribute("provider_set").
|
||||
|
||||
trigger_condition_holds(TriggerID, node(min_dupe_id, Package)) :-
|
||||
solve_literal(TriggerID),
|
||||
pkg_fact(Package, condition_trigger(_, TriggerID)),
|
||||
literal(TriggerID).
|
||||
% Map constraint on the literal ID to facts on the node
|
||||
attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID).
|
||||
attr(Name, node(min_dupe_id, A1), A2) :- literal(LiteralID, Name, A1, A2), solve_literal(LiteralID).
|
||||
attr(Name, node(min_dupe_id, A1), A2, A3) :- literal(LiteralID, Name, A1, A2, A3), solve_literal(LiteralID), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(min_dupe_id, A1), A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), solve_literal(LiteralID).
|
||||
|
||||
trigger_node(TriggerID, Node, Node) :-
|
||||
trigger_condition_holds(TriggerID, Node),
|
||||
literal(TriggerID).
|
||||
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct
|
||||
% the condition_set/2 manually below
|
||||
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
|
||||
% Special cases where nodes occur in arguments other than A1
|
||||
attr("node_flag_source", node(min_dupe_id, A1), A2, node(min_dupe_id, A3)) :- literal(LiteralID, "node_flag_source", A1, A2, A3), solve_literal(LiteralID).
|
||||
attr("depends_on", node(min_dupe_id, A1), node(min_dupe_id, A2), A3) :- literal(LiteralID, "depends_on", A1, A2, A3), solve_literal(LiteralID).
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
solve_literal(TriggerID),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
imposed_constraint(EffectID, "root", Package).
|
||||
explicitly_requested_root(node(min_dupe_id, A1)) :- literal(LiteralID, "root", A1), solve_literal(LiteralID).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
#defined literal/3.
|
||||
#defined literal/4.
|
||||
#defined literal/5.
|
||||
#defined literal/6.
|
||||
|
||||
% Attributes for node packages which must have a single value
|
||||
attr_single_value("version").
|
||||
@@ -233,8 +230,7 @@ possible_version_weight(node(ID, Package), Weight)
|
||||
|
||||
1 { version_weight(node(ID, Package), Weight) : pkg_fact(Package, version_declared(Version, Weight)) } 1
|
||||
:- attr("version", node(ID, Package), Version),
|
||||
attr("node", node(ID, Package)),
|
||||
internal_error("version weights must exist and be unique").
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
@@ -248,8 +244,7 @@ possible_version_weight(node(ID, Package), Weight)
|
||||
% bound on the choice rule to avoid false positives with the error below
|
||||
1 { attr("version", node(ID, Package), Version) : pkg_fact(Package, version_satisfies(Constraint, Version)) }
|
||||
:- attr("node_version_satisfies", node(ID, Package), Constraint),
|
||||
pkg_fact(Package, version_satisfies(Constraint, _)),
|
||||
internal_error("must choose a single version to satisfy version constraints").
|
||||
pkg_fact(Package, version_satisfies(Constraint, _)).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
@@ -318,11 +313,9 @@ trigger_condition_holds(ID, RequestorNode) :-
|
||||
attr(Name, node(X, A1)) : condition_requirement(ID, Name, A1), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
attr(Name, node(X, A1), A2) : condition_requirement(ID, Name, A1, A2), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
|
||||
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
|
||||
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
% Special cases
|
||||
attr("node_flag_source", node(X, A1), A2, node(Y, A3)) : condition_requirement(ID, "node_flag_source", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A3));
|
||||
attr("node_flag_propagation_candidate", node(X, A1), A2, A3, node(Y, A4)) : condition_reuqirement(ID, "node_flag_propagation_candidate", A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A4));
|
||||
% if the structure of the program ever could change so that node_flag_source can be in a condition, we will need a case for it here.
|
||||
not cannot_hold(ID, PackageNode).
|
||||
|
||||
condition_holds(ConditionID, node(X, Package))
|
||||
@@ -364,7 +357,7 @@ imposed_nodes(ConditionID, PackageNode, node(X, A1))
|
||||
|
||||
% Conditions that hold impose may impose constraints on other specs
|
||||
attr(Name, node(X, A1)) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
@@ -375,25 +368,6 @@ attr("node_flag_source", node(X, A1), A2, node(Y, A3))
|
||||
imposed_constraint(ID, "node_flag_source", A1, A2, A3),
|
||||
condition_set(node(Y, A3), node(X, A1)).
|
||||
|
||||
% For node flag propagation, we need to look at the condition_set of the source, since it is the ancestor
|
||||
% of the package on which we want to impose the constraint
|
||||
attr("node_flag_propagation_candidate", node(X, A1), A2, A3, node(Y, A4))
|
||||
:- impose(ID, node(X, A1)),
|
||||
imposed_constraint(ID, "node_flag_propagation_candidate", A1, A2, A3, A4),
|
||||
condition_set(node(Y, A4), node(X, A1)).
|
||||
|
||||
% if the structure of the program ever could change so that node_flag_source can be in a condition, we will need a case for it here.
|
||||
|
||||
% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
|
||||
% might appear in the HEAD of a rule
|
||||
attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual))
|
||||
:- solve_literal(TriggerID),
|
||||
trigger_and_effect(_, TriggerID, EffectID),
|
||||
impose(EffectID, _),
|
||||
imposed_constraint(EffectID, "provider_set", Provider, Virtual).
|
||||
|
||||
provider(ProviderNode, VirtualNode) :- attr("provider_set", ProviderNode, VirtualNode).
|
||||
|
||||
% Here we can't use the condition set because it's a recursive definition, that doesn't define the
|
||||
% node index, and leads to unsatisfiability. Hence we say that one and only one node index must
|
||||
% satisfy the dependency.
|
||||
@@ -453,11 +427,24 @@ depends_on(PackageNode, DependencyNode) :- attr("depends_on", PackageNode, Depen
|
||||
% concrete. We chop off dependencies for externals, and dependencies of
|
||||
% concrete specs don't need to be resolved -- they arise from the concrete
|
||||
% specs themselves.
|
||||
attr("track_dependencies", Node) :- build(Node), not external(Node).
|
||||
dependency_holds(node(NodeID, Package), Dependency, Type) :-
|
||||
pkg_fact(Package, dependency_condition(ID, Dependency)),
|
||||
dependency_type(ID, Type),
|
||||
build(node(NodeID, Package)),
|
||||
not external(node(NodeID, Package)),
|
||||
condition_holds(ID, node(NodeID, Package)).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(EffectID, node(NodeID, Package)) :-
|
||||
not dependency_holds(node(NodeID, Package), Dependency, _),
|
||||
attr("node", node(NodeID, Package)),
|
||||
pkg_fact(Package, dependency_condition(ID, Dependency)),
|
||||
pkg_fact(Package, condition_effect(ID, EffectID)).
|
||||
|
||||
% If a dependency holds on a package node, there must be one and only one dependency node satisfying it
|
||||
1 { attr("depends_on", PackageNode, node(0..Y-1, Dependency), Type) : max_dupes(Dependency, Y) } 1
|
||||
:- attr("dependency_holds", PackageNode, Dependency, Type),
|
||||
:- dependency_holds(PackageNode, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
|
||||
% all nodes in the graph must be reachable from some root
|
||||
@@ -489,25 +476,10 @@ error(1, Msg)
|
||||
% Virtual dependencies
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% If the provider is set from the command line, its weight is 0
|
||||
possible_provider_weight(ProviderNode, VirtualNode, 0, "Set on the command line")
|
||||
:- attr("provider_set", ProviderNode, VirtualNode).
|
||||
|
||||
% Enforces all virtuals to be provided, if multiple of them are provided together
|
||||
error(100, "Package '{0}' needs to provide both '{1}' and '{2}' together, but provides only '{1}'", Package, Virtual1, Virtual2)
|
||||
:- condition_holds(ID, node(X, Package)),
|
||||
pkg_fact(Package, provided_together(ID, SetID, Virtual1)),
|
||||
pkg_fact(Package, provided_together(ID, SetID, Virtual2)),
|
||||
Virtual1 != Virtual2,
|
||||
attr("virtual_on_incoming_edges", node(X, Package), Virtual1),
|
||||
not attr("virtual_on_incoming_edges", node(X, Package), Virtual2),
|
||||
attr("virtual_node", node(_, Virtual1)),
|
||||
attr("virtual_node", node(_, Virtual2)).
|
||||
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
node_depends_on_virtual(PackageNode, Virtual, Type)
|
||||
:- attr("dependency_holds", PackageNode, Virtual, Type),
|
||||
:- dependency_holds(PackageNode, Virtual, Type),
|
||||
virtual(Virtual),
|
||||
not external(PackageNode).
|
||||
|
||||
@@ -517,14 +489,11 @@ node_depends_on_virtual(PackageNode, Virtual) :- node_depends_on_virtual(Package
|
||||
:- node_depends_on_virtual(PackageNode, Virtual, Type).
|
||||
|
||||
attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
:- attr("dependency_holds", PackageNode, Virtual, Type),
|
||||
:- dependency_holds(PackageNode, Virtual, Type),
|
||||
attr("depends_on", PackageNode, ProviderNode, Type),
|
||||
provider(ProviderNode, node(_, Virtual)),
|
||||
not external(PackageNode).
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
|
||||
:- node_depends_on_virtual(PackageNode, Virtual).
|
||||
@@ -532,10 +501,6 @@ attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
|
||||
error(100, "'{0}' cannot be a provider for the '{1}' virtual", Package, Virtual)
|
||||
:- attr("provider_set", node(min_dupe_id, Package), node(min_dupe_id, Virtual)),
|
||||
not virtual_condition_holds( node(min_dupe_id, Package), Virtual).
|
||||
|
||||
error(100, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- attr("virtual_node", node(X, Virtual)),
|
||||
not provider(_, node(X, Virtual)).
|
||||
@@ -556,6 +521,20 @@ attr("root", PackageNode) :- attr("virtual_root", VirtualNode), provider(Package
|
||||
attr("node", PackageNode), virtual_condition_holds(PackageNode, Virtual) } 1
|
||||
:- attr("virtual_node", node(X, Virtual)).
|
||||
|
||||
% If a spec is selected as a provider, it is for all the virtual it could provide
|
||||
:- provider(PackageNode, node(X, Virtual1)),
|
||||
virtual_condition_holds(PackageNode, Virtual2),
|
||||
Virtual2 != Virtual1,
|
||||
unification_set(SetID, PackageNode),
|
||||
unification_set(SetID, node(X, Virtual2)),
|
||||
not provider(PackageNode, node(X, Virtual2)).
|
||||
|
||||
% If a spec is a dependency, and could provide a needed virtual, it must be a provider
|
||||
:- node_depends_on_virtual(PackageNode, Virtual),
|
||||
depends_on(PackageNode, PossibleProviderNode),
|
||||
virtual_condition_holds(PossibleProviderNode, Virtual),
|
||||
not attr("virtual_on_edge", PackageNode, PossibleProviderNode, Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
virtual_condition_holds(node(ProviderID, Provider), Virtual) :- virtual_condition_holds(ID, node(ProviderID, Provider), Virtual).
|
||||
virtual_condition_holds(ID, node(ProviderID, Provider), Virtual) :-
|
||||
@@ -582,8 +561,6 @@ do_not_impose(EffectID, node(X, Package))
|
||||
not virtual_condition_holds(PackageNode, Virtual),
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined provided_together/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependency weights
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -600,15 +577,21 @@ possible_provider_weight(DependencyNode, VirtualNode, 0, "external")
|
||||
:- provider(DependencyNode, VirtualNode),
|
||||
external(DependencyNode).
|
||||
|
||||
% A provider mentioned in packages.yaml can use a weight
|
||||
% according to its priority in the list of providers
|
||||
possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "packages_yaml")
|
||||
:- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)),
|
||||
depends_on(node(ID, Package), node(DependencyID, Dependency)),
|
||||
pkg_fact(Package, provider_preference(Virtual, Dependency, Weight)).
|
||||
|
||||
% A provider mentioned in the default configuration can use a weight
|
||||
% according to its priority in the list of providers
|
||||
possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default")
|
||||
:- provider(node(ProviderID, Provider), node(VirtualID, Virtual)),
|
||||
default_provider_preference(Virtual, Provider, Weight).
|
||||
possible_provider_weight(node(DependencyID, Dependency), node(VirtualID, Virtual), Weight, "default")
|
||||
:- provider(node(DependencyID, Dependency), node(VirtualID, Virtual)),
|
||||
default_provider_preference(Virtual, Dependency, Weight).
|
||||
|
||||
% Any provider can use 100 as a weight, which is very high and discourage its use
|
||||
possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback")
|
||||
:- provider(node(ProviderID, Provider), VirtualNode).
|
||||
possible_provider_weight(node(DependencyID, Dependency), VirtualNode, 100, "fallback") :- provider(node(DependencyID, Dependency), VirtualNode).
|
||||
|
||||
% do not warn if generated program contains none of these.
|
||||
#defined virtual/1.
|
||||
@@ -626,11 +609,11 @@ possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback
|
||||
pkg_fact(Package, version_declared(Version, Weight, "external")) }
|
||||
:- external(node(ID, Package)).
|
||||
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec version", Package)
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(node(ID, Package)),
|
||||
not external_version(node(ID, Package), _, _).
|
||||
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy a unique configured external spec version", Package)
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(node(ID, Package)),
|
||||
2 { external_version(node(ID, Package), Version, Weight) }.
|
||||
|
||||
@@ -659,15 +642,18 @@ external(PackageNode) :- attr("external_spec_selected", PackageNode, _).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
attr("external_spec_selected", node(ID, Package), LocalIndex) :-
|
||||
attr("external_conditions_hold", node(ID, Package), LocalIndex),
|
||||
external_conditions_hold(node(ID, Package), LocalIndex),
|
||||
attr("node", node(ID, Package)),
|
||||
not attr("hash", node(ID, Package), _).
|
||||
|
||||
external_conditions_hold(node(PackageID, Package), LocalIndex) :-
|
||||
pkg_fact(Package, possible_external(ID, LocalIndex)), condition_holds(ID, node(PackageID, Package)).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _).
|
||||
not external_conditions_hold(node(ID, Package), _).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Config required semantics
|
||||
@@ -710,18 +696,15 @@ requirement_group_satisfied(node(ID, Package), X) :-
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
|
||||
|
||||
{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :-
|
||||
requirement_group_member(ConditionID, Package, RequirementID),
|
||||
activate_requirement(node(ID, Package), RequirementID),
|
||||
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue).
|
||||
{ attr("node_flag", node(ID, A1), A2, A3) } :-
|
||||
requirement_group_member(Y, Package, X),
|
||||
activate_requirement(node(ID, Package), X),
|
||||
imposed_constraint(Y,"node_flag_set", A1, A2, A3).
|
||||
|
||||
{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :-
|
||||
requirement_group_member(ConditionID, Package1, RequirementID),
|
||||
activate_requirement(node(NodeID1, Package1), RequirementID),
|
||||
pkg_fact(Package1, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2),
|
||||
imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)).
|
||||
{ attr("node_flag_source", node(ID, A1), A2, node(ID, A3)) } :-
|
||||
requirement_group_member(Y, Package, X),
|
||||
activate_requirement(node(ID, Package), X),
|
||||
imposed_constraint(Y,"node_flag_source", A1, A2, A3).
|
||||
|
||||
requirement_weight(node(ID, Package), Group, W) :-
|
||||
W = #min {
|
||||
@@ -768,36 +751,23 @@ node_has_variant(node(ID, Package), Variant) :-
|
||||
pkg_fact(Package, variant(Variant)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% Variant propagation is forwarded to dependencies
|
||||
attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :-
|
||||
attr("variant_propagate", PackageNode, Variant, Value, Source) :-
|
||||
attr("node", PackageNode),
|
||||
depends_on(ParentNode, PackageNode),
|
||||
attr("variant_value", node(_, Source), Variant, Value),
|
||||
attr("variant_propagation_candidate", ParentNode, Variant, _, Source).
|
||||
attr("variant_propagate", ParentNode, Variant, Value, Source),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
% If the node is a candidate, and it has the variant and value,
|
||||
% then those variant and value should be propagated
|
||||
attr("variant_propagate", node(ID, Package), Variant, Value, Source) :-
|
||||
attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source),
|
||||
attr("variant_value", node(ID, Package), Variant, Value) :-
|
||||
attr("node", node(ID, Package)),
|
||||
node_has_variant(node(ID, Package), Variant),
|
||||
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
||||
not attr("variant_set", node(ID, Package), Variant).
|
||||
attr("variant_propagate", node(ID, Package), Variant, Value, _),
|
||||
pkg_fact(Package, variant_possible_value(Variant, Value)).
|
||||
|
||||
% Propagate the value, if there is the corresponding attribute
|
||||
attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants)
|
||||
variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _).
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
not attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||
|
||||
% Cannot receive different values from different sources on the same variant
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
|
||||
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
|
||||
node_has_variant(node(X, Package), Variant),
|
||||
Value1 < Value2, Source1 < Source2.
|
||||
Value1 < Value2.
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
@@ -893,15 +863,13 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
||||
:- variant_default_value(Package, Variant, Value),
|
||||
node_has_variant(node(ID, Package), Variant),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
not attr("variant_propagate", node(ID, Package), Variant, _, _),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% The variant is set in an external spec
|
||||
external_with_variant_set(node(NodeID, Package), Variant, Value)
|
||||
:- attr("variant_value", node(NodeID, Package), Variant, Value),
|
||||
condition_requirement(TriggerID, "variant_value", Package, Variant, Value),
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
imposed_constraint(EffectID, "external_conditions_hold", Package, _),
|
||||
condition_requirement(ID, "variant_value", Package, Variant, Value),
|
||||
pkg_fact(Package, possible_external(ID, _)),
|
||||
external(node(NodeID, Package)),
|
||||
attr("node", node(NodeID, Package)).
|
||||
|
||||
@@ -1077,7 +1045,7 @@ attr("node_target", PackageNode, Target)
|
||||
node_target_weight(node(ID, Package), Weight)
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("node_target", node(ID, Package), Target),
|
||||
target_weight(Target, Weight).
|
||||
pkg_fact(Package, target_weight(Target, Weight)).
|
||||
|
||||
% compatibility rules for targets among nodes
|
||||
node_target_match(ParentNode, DependencyNode)
|
||||
@@ -1199,17 +1167,23 @@ compiler_mismatch_required(PackageNode, DependencyNode)
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
node_compiler_weight(node(ID, Package), Weight)
|
||||
compiler_weight(node(ID, Package), Weight)
|
||||
:- node_compiler(node(ID, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
compiler_weight(CompilerID, Weight).
|
||||
|
||||
node_compiler_weight(node(ID, Package), 100)
|
||||
pkg_fact(Package, node_compiler_preference(Compiler, V, Weight)).
|
||||
compiler_weight(node(ID, Package), Weight)
|
||||
:- node_compiler(node(ID, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
not compiler_weight(CompilerID, _).
|
||||
not pkg_fact(Package, node_compiler_preference(Compiler, V, _)),
|
||||
default_compiler_preference(CompilerID, Weight).
|
||||
compiler_weight(node(ID, Package), 100)
|
||||
:- node_compiler(node(ID, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, V),
|
||||
not pkg_fact(Package, node_compiler_preference(Compiler, V, _)),
|
||||
not default_compiler_preference(CompilerID, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
@@ -1217,22 +1191,52 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
||||
not node_compiler(node(ID, Package), _).
|
||||
|
||||
#defined node_compiler_preference/4.
|
||||
#defined compiler_weight/3.
|
||||
#defined default_compiler_preference/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Compiler flags
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% propagate flags when compiler match
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType)
|
||||
:- same_compiler(PackageNode, DependencyNode),
|
||||
not attr("node_flag_set", DependencyNode, FlagType, _),
|
||||
flag_type(FlagType).
|
||||
|
||||
same_compiler(PackageNode, DependencyNode)
|
||||
:- depends_on(PackageNode, DependencyNode),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
node_compiler(DependencyNode, CompilerID),
|
||||
compiler_id(CompilerID).
|
||||
|
||||
node_flag_inherited(DependencyNode, FlagType, Flag)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
% Ensure propagation
|
||||
:- node_flag_inherited(PackageNode, FlagType, Flag),
|
||||
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
depends_on(Source1, Package),
|
||||
depends_on(Source2, Package),
|
||||
attr("node_flag_propagate", Source1, FlagType),
|
||||
attr("node_flag_propagate", Source2, FlagType),
|
||||
can_inherit_flags(Source1, Package, FlagType),
|
||||
can_inherit_flags(Source2, Package, FlagType),
|
||||
Source1 < Source2.
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode)
|
||||
:- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
|
||||
attr("node_flag_source", DependencyNode, FlagType, Q)
|
||||
:- attr("node_flag_source", PackageNode, FlagType, Q),
|
||||
node_flag_inherited(DependencyNode, FlagType, _),
|
||||
attr("node_flag_propagate", PackageNode, FlagType).
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", PackageNode, FlagType, Flag)
|
||||
:- compiler_flag(CompilerID, FlagType, Flag),
|
||||
@@ -1253,30 +1257,7 @@ attr("node_flag_compiler_default", PackageNode)
|
||||
|
||||
% if a flag is set to something or inherited, it's included
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||
|
||||
attr("node_flag_propagate", PackageNode, FlagType, Flag, Source) :-
|
||||
attr("node_flag_propagation_candidate", PackageNode, FlagType, Flag, Source),
|
||||
not attr("node_flag_set", PackageNode, FlagType, _).
|
||||
|
||||
% source is a node() attribute
|
||||
attr("node_flag_propagation_candidate", PackageNode, FlagType, Flag, Source) :-
|
||||
same_compiler(ParentNode, PackageNode),
|
||||
attr("node_flag_propagation_candidate", ParentNode, FlagType, _, Source),
|
||||
attr("node_flag", Source, FlagType, Flag),
|
||||
flag_type(FlagType).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, PackageNode, FlagType) :-
|
||||
same_compiler(Source1, PackageNode),
|
||||
same_compiler(Source2, PackageNode),
|
||||
attr("node_flag_propagate", _, FlagType, _, Source1),
|
||||
attr("node_flag_propagate", _, FlagType, _, Source2),
|
||||
Source1 < Source2.
|
||||
|
||||
attr("node_flag_source", PackageNode, FlagType, Q)
|
||||
:- attr("node_flag_propagate", PackageNode, FlagType, _, Q).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_propagate", PackageNode, FlagType, Flag, _).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- node_flag_inherited(PackageNode, FlagType, Flag).
|
||||
|
||||
% if no node flags are set for a type, there are no flags.
|
||||
attr("no_flags", PackageNode, FlagType)
|
||||
@@ -1344,10 +1325,6 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG
|
||||
#edge (A, B) : depends_on(A, B).
|
||||
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
@@ -1534,7 +1511,7 @@ opt_criterion(15, "non-preferred compilers").
|
||||
#minimize{ 0@15: #true }.
|
||||
#minimize{
|
||||
Weight@15+Priority,PackageNode
|
||||
: node_compiler_weight(PackageNode, Weight),
|
||||
: compiler_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
|
||||
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
@@ -0,0 +1,21 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Avoid cycles in the DAG
|
||||
%
|
||||
% Some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them. Note that these rules are quite
|
||||
% demanding on both grounding and solving, since they need to compute and
|
||||
% consider all possible paths between pair of nodes.
|
||||
%=============================================================================
|
||||
|
||||
|
||||
#program no_cycle.
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, A).
|
||||
|
||||
#defined depends_on/2.
|
||||
@@ -24,29 +24,4 @@
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
|
||||
% for error causation
|
||||
#show condition_reason/2.
|
||||
|
||||
% For error messages to use later
|
||||
#show pkg_fact/2.
|
||||
#show condition_holds/2.
|
||||
#show imposed_constraint/3.
|
||||
#show imposed_constraint/4.
|
||||
#show imposed_constraint/5.
|
||||
#show imposed_constraint/6.
|
||||
#show condition_requirement/3.
|
||||
#show condition_requirement/4.
|
||||
#show condition_requirement/5.
|
||||
#show condition_requirement/6.
|
||||
#show node_has_variant/2.
|
||||
#show build/1.
|
||||
#show external/1.
|
||||
#show external_version/3.
|
||||
#show trigger_and_effect/3.
|
||||
#show unification_set/2.
|
||||
#show provider/2.
|
||||
#show condition_nodes/3.
|
||||
#show trigger_node/3.
|
||||
#show imposed_nodes/3.
|
||||
|
||||
% debug
|
||||
|
||||
@@ -1,239 +0,0 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% This logic program adds detailed error messages to Spack's concretizer
|
||||
%=============================================================================
|
||||
|
||||
#program error_messages.
|
||||
|
||||
% Create a causal tree between trigger conditions by locating the effect conditions
|
||||
% that are triggers for another condition. Condition2 is caused by Condition1
|
||||
condition_cause(Condition2, ID2, Condition1, ID1) :-
|
||||
condition_holds(Condition2, node(ID2, Package2)),
|
||||
pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
|
||||
condition_requirement(Trigger, Name, Package),
|
||||
condition_nodes(Trigger, TriggerNode, node(ID, Package)),
|
||||
trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
|
||||
attr(Name, node(ID, Package)),
|
||||
condition_holds(Condition1, node(ID1, Package1)),
|
||||
pkg_fact(Package1, condition_effect(Condition1, Effect)),
|
||||
imposed_constraint(Effect, Name, Package),
|
||||
imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
|
||||
|
||||
condition_cause(Condition2, ID2, Condition1, ID1) :-
|
||||
condition_holds(Condition2, node(ID2, Package2)),
|
||||
pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
|
||||
condition_requirement(Trigger, Name, Package, A1),
|
||||
condition_nodes(Trigger, TriggerNode, node(ID, Package)),
|
||||
trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
|
||||
attr(Name, node(ID, Package), A1),
|
||||
condition_holds(Condition1, node(ID1, Package1)),
|
||||
pkg_fact(Package1, condition_effect(Condition1, Effect)),
|
||||
imposed_constraint(Effect, Name, Package, A1),
|
||||
imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
|
||||
|
||||
condition_cause(Condition2, ID2, Condition1, ID1) :-
|
||||
condition_holds(Condition2, node(ID2, Package2)),
|
||||
pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
|
||||
condition_requirement(Trigger, Name, Package, A1, A2),
|
||||
condition_nodes(Trigger, TriggerNode, node(ID, Package)),
|
||||
trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
|
||||
attr(Name, node(ID, Package), A1, A2),
|
||||
condition_holds(Condition1, node(ID1, Package1)),
|
||||
pkg_fact(Package1, condition_effect(Condition1, Effect)),
|
||||
imposed_constraint(Effect, Name, Package, A1, A2),
|
||||
imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
|
||||
|
||||
condition_cause(Condition2, ID2, Condition1, ID1) :-
|
||||
condition_holds(Condition2, node(ID2, Package2)),
|
||||
pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
|
||||
condition_requirement(Trigger, Name, Package, A1, A2, A3),
|
||||
condition_nodes(Trigger, TriggerNode, node(ID, Package)),
|
||||
trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
|
||||
attr(Name, node(ID, Package), A1, A2, A3),
|
||||
condition_holds(Condition1, node(ID1, Package1)),
|
||||
pkg_fact(Package1, condition_effect(Condition1, Effect)),
|
||||
imposed_constraint(Effect, Name, Package, A1, A2, A3),
|
||||
imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)).
|
||||
|
||||
% special condition cause for dependency conditions
|
||||
% we can't simply impose the existence of the node for dependency conditions
|
||||
% because we need to allow for the choice of which dupe ID the node gets
|
||||
condition_cause(Condition2, ID2, Condition1, ID1) :-
|
||||
condition_holds(Condition2, node(ID2, Package2)),
|
||||
pkg_fact(Package2, condition_trigger(Condition2, Trigger)),
|
||||
condition_requirement(Trigger, "node", Package),
|
||||
condition_nodes(Trigger, TriggerNode, node(ID, Package)),
|
||||
trigger_node(Trigger, TriggerNode, node(ID2, Package2)),
|
||||
attr("node", node(ID, Package)),
|
||||
condition_holds(Condition1, node(ID1, Package1)),
|
||||
pkg_fact(Package1, condition_effect(Condition1, Effect)),
|
||||
imposed_constraint(Effect, "dependency_holds", Parent, Package, Type),
|
||||
imposed_nodes(Effect, node(ID1, Package1), node(ID, Package)),
|
||||
attr("depends_on", node(X, Parent), node(ID, Package), Type).
|
||||
|
||||
% The literal startcauses is used to separate the variables that are part of the error from the
|
||||
% ones describing the causal tree of the error. After startcauses, each successive pair must be
|
||||
% a condition and a condition_set id for which it holds.
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "Cannot satisfy '{0}@{1}'", Package, Constraint, startcauses, ConstraintCause, CauseID)
|
||||
:- attr("node_version_satisfies", node(ID, Package), Constraint),
|
||||
pkg_fact(TriggerPkg, condition_effect(ConstraintCause, EffectID)),
|
||||
imposed_constraint(EffectID, "node_version_satisfies", Package, Constraint),
|
||||
condition_holds(ConstraintCause, node(CauseID, TriggerPkg)),
|
||||
attr("version", node(ID, Package), Version),
|
||||
not pkg_fact(Package, version_satisfies(Constraint, Version)).
|
||||
|
||||
error(0, "Cannot satisfy '{0}@{1}' and '{0}@{2}", Package, Constraint1, Constraint2, startcauses, Cause1, C1ID, Cause2, C2ID)
|
||||
:- attr("node_version_satisfies", node(ID, Package), Constraint1),
|
||||
pkg_fact(TriggerPkg1, condition_effect(Cause1, EffectID1)),
|
||||
imposed_constraint(EffectID1, "node_version_satisfies", Package, Constraint1),
|
||||
condition_holds(Cause1, node(C1ID, TriggerPkg1)),
|
||||
% two constraints
|
||||
attr("node_version_satisfies", node(ID, Package), Constraint2),
|
||||
pkg_fact(TriggerPkg2, condition_effect(Cause2, EffectID2)),
|
||||
imposed_constraint(EffectID2, "node_version_satisfies", Package, Constraint2),
|
||||
condition_holds(Cause2, node(C2ID, TriggerPkg2)),
|
||||
% version chosen
|
||||
attr("version", node(ID, Package), Version),
|
||||
% version satisfies one but not the other
|
||||
pkg_fact(Package, version_satisfies(Constraint1, Version)),
|
||||
not pkg_fact(Package, version_satisfies(Constraint2, Version)).
|
||||
|
||||
% causation tracking error for no or multiple virtual providers
|
||||
error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, Cause, CID)
|
||||
:- attr("virtual_node", node(X, Virtual)),
|
||||
not provider(_, node(X, Virtual)),
|
||||
imposed_constraint(EID, "dependency_holds", Parent, Virtual, Type),
|
||||
pkg_fact(TriggerPkg, condition_effect(Cause, EID)),
|
||||
condition_holds(Cause, node(CID, TriggerPkg)).
|
||||
|
||||
|
||||
% At most one variant value for single-valued variants
|
||||
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
|
||||
:- attr("node", node(X, Package)),
|
||||
node_has_variant(node(X, Package), Variant),
|
||||
pkg_fact(Package, variant_single_value(Variant)),
|
||||
build(node(X, Package)),
|
||||
attr("variant_value", node(X, Package), Variant, Value1),
|
||||
imposed_constraint(EID1, "variant_set", Package, Variant, Value1),
|
||||
pkg_fact(TriggerPkg1, condition_effect(Cause1, EID1)),
|
||||
condition_holds(Cause1, node(X, TriggerPkg1)),
|
||||
attr("variant_value", node(X, Package), Variant, Value2),
|
||||
imposed_constraint(EID2, "variant_set", Package, Variant, Value2),
|
||||
pkg_fact(TriggerPkg2, condition_effect(Cause2, EID2)),
|
||||
condition_holds(Cause2, node(X, TriggerPkg2)),
|
||||
Value1 < Value2. % see[1] in concretize.lp
|
||||
|
||||
% Externals have to specify external conditions
|
||||
error(0, "Attempted to use external for {0} which does not satisfy any configured external spec version", Package, startcauses, ExternalCause, CID)
|
||||
:- external(node(ID, Package)),
|
||||
attr("external_spec_selected", node(ID, Package), Index),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, Index),
|
||||
pkg_fact(TriggerPkg, condition_effect(ExternalCause, EID)),
|
||||
condition_holds(ExternalCause, node(CID, TriggerPkg)),
|
||||
not external_version(node(ID, Package), _, _).
|
||||
|
||||
error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}') is an external constraint for {0} which was not satisfied", Package, Name, A1)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, _),
|
||||
trigger_and_effect(Package, TID, EID),
|
||||
condition_requirement(TID, Name, A1),
|
||||
not attr(Name, node(_, A1)).
|
||||
|
||||
error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, _),
|
||||
trigger_and_effect(Package, TID, EID),
|
||||
condition_requirement(TID, Name, A1, A2),
|
||||
not attr(Name, node(_, A1), A2).
|
||||
|
||||
error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, _),
|
||||
trigger_and_effect(Package, TID, EID),
|
||||
condition_requirement(TID, Name, A1, A2, A3),
|
||||
not attr(Name, node(_, A1), A2, A3).
|
||||
|
||||
error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n 'Spec({0} {1}={2})' is an external constraint for {0} which was not satisfied\n 'Spec({0} {1}={3})' required", Package, Variant, Value, OtherValue, startcauses, OtherValueCause, CID)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, _),
|
||||
trigger_and_effect(Package, TID, EID),
|
||||
condition_requirement(TID, "variant_value", Package, Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
attr("variant_value", node(ID, Package), Variant, OtherValue),
|
||||
imposed_constraint(EID2, "variant_set", Package, Variant, OtherValue),
|
||||
pkg_fact(TriggerPkg, condition_effect(OtherValueCause, EID2)),
|
||||
condition_holds(OtherValueCause, node(CID, TriggerPkg)).
|
||||
|
||||
error(0, "Attempted to build package {0} which is not buildable and does not have a satisfying external\n attr('{1}', '{2}', '{3}', '{4}', '{5}') is an external constraint for {0} which was not satisfied", Package, Name, A1, A2, A3, A4)
|
||||
:- external(node(ID, Package)),
|
||||
not attr("external_conditions_hold", node(ID, Package), _),
|
||||
imposed_constraint(EID, "external_conditions_hold", Package, _),
|
||||
trigger_and_effect(Package, TID, EID),
|
||||
condition_requirement(TID, Name, A1, A2, A3, A4),
|
||||
not attr(Name, node(_, A1), A2, A3, A4).
|
||||
|
||||
% error message with causes for conflicts
|
||||
error(0, Msg, startcauses, TriggerID, ID1, ConstraintID, ID2)
|
||||
:- attr("node", node(ID, Package)),
|
||||
pkg_fact(Package, conflict(TriggerID, ConstraintID, Msg)),
|
||||
% node(ID1, TriggerPackage) is node(ID2, Package) in most, but not all, cases
|
||||
condition_holds(TriggerID, node(ID1, TriggerPackage)),
|
||||
condition_holds(ConstraintID, node(ID2, Package)),
|
||||
unification_set(X, node(ID2, Package)),
|
||||
unification_set(X, node(ID1, TriggerPackage)),
|
||||
not external(node(ID, Package)), % ignore conflicts for externals
|
||||
not attr("hash", node(ID, Package), _). % ignore conflicts for installed packages
|
||||
|
||||
% variables to show
|
||||
#show error/2.
|
||||
#show error/3.
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
#show error/8.
|
||||
#show error/9.
|
||||
#show error/10.
|
||||
#show error/11.
|
||||
|
||||
#show condition_cause/4.
|
||||
#show condition_reason/2.
|
||||
|
||||
% Define all variables used to avoid warnings at runtime when the model doesn't happen to have one
|
||||
#defined error/2.
|
||||
#defined error/3.
|
||||
#defined error/4.
|
||||
#defined error/5.
|
||||
#defined error/6.
|
||||
#defined attr/2.
|
||||
#defined attr/3.
|
||||
#defined attr/4.
|
||||
#defined attr/5.
|
||||
#defined pkg_fact/2.
|
||||
#defined imposed_constraint/3.
|
||||
#defined imposed_constraint/4.
|
||||
#defined imposed_constraint/5.
|
||||
#defined imposed_constraint/6.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
#defined condition_requirement/6.
|
||||
#defined condition_holds/2.
|
||||
#defined unification_set/2.
|
||||
#defined external/1.
|
||||
#defined trigger_and_effect/3.
|
||||
#defined build/1.
|
||||
#defined node_has_variant/2.
|
||||
#defined provider/2.
|
||||
#defined external_version/3.
|
||||
@@ -11,14 +11,19 @@
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
|
||||
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
|
||||
|
||||
% Root node
|
||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
@@ -21,4 +21,4 @@
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% TODO: Later
|
||||
%=============================================================================
|
||||
|
||||
% propagate flags when compiler match
|
||||
attr("node_flag_propagate", PackageNode, FlagType, Flag, Source) :- % source is a node() attribute
|
||||
attr("node_flag_propagation_candidate", PackageNode, FlagType, Flag, Source),
|
||||
not attr("node_flag_set", PackageNode, FlagType, _).
|
||||
|
||||
attr("node_flag_propagation_candidate", PackageNode, FlagType, Flag, Source) :- % source is a node() attribute
|
||||
same_compiler(ParentNode, PackageNode),
|
||||
attr("node_flag_propagation_candidate", ParentNode, FlagType, _, Source),
|
||||
attr("node_flag", Source, FlagType, Flag),
|
||||
flag_type(FlagType).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, PackageNode, FlagType) :-
|
||||
same_compiler(Source1, PackageNode),
|
||||
same_compiler(Source2, PackageNode),
|
||||
attr("node_flag_propagate", _, FlagType, _, Source1),
|
||||
attr("node_flag_propagate", _, FlagType, _, Source2),
|
||||
Source1 < Source2.
|
||||
|
||||
attr("node_flag_source", PackageNode, FlagType, Q)
|
||||
:- attr("node_flag_propagate", PackageNode, FlagType, _, Q).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_propagate", PackageNode, FlagType, Flag, _).
|
||||
@@ -59,7 +59,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.path
|
||||
import llnl.string
|
||||
@@ -75,7 +75,6 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.parser
|
||||
import spack.patch
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
@@ -1319,6 +1318,8 @@ def __init__(
|
||||
self.external_path = external_path
|
||||
self.external_module = external_module
|
||||
"""
|
||||
import spack.parser
|
||||
|
||||
# Copy if spec_like is a Spec.
|
||||
if isinstance(spec_like, Spec):
|
||||
self._dup(spec_like)
|
||||
@@ -1464,26 +1465,6 @@ def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""
|
||||
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
|
||||
|
||||
@property
|
||||
def edge_attributes(self) -> str:
|
||||
"""Helper method to print edge attributes in spec literals"""
|
||||
edges = self.edges_from_dependents()
|
||||
if not edges:
|
||||
return ""
|
||||
|
||||
union = DependencySpec(parent=Spec(), spec=self, depflag=0, virtuals=())
|
||||
for edge in edges:
|
||||
union.update_deptypes(edge.depflag)
|
||||
union.update_virtuals(edge.virtuals)
|
||||
deptypes_str = (
|
||||
f"deptypes={','.join(dt.flag_to_tuple(union.depflag))}" if union.depflag else ""
|
||||
)
|
||||
virtuals_str = f"virtuals={','.join(union.virtuals)}" if union.virtuals else ""
|
||||
if not deptypes_str and not virtuals_str:
|
||||
return ""
|
||||
result = f"{deptypes_str} {virtuals_str}".strip()
|
||||
return f"[{result}]"
|
||||
|
||||
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
|
||||
@@ -3708,15 +3689,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if other.concrete and self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
|
||||
elif self.concrete:
|
||||
return self.satisfies(other)
|
||||
|
||||
elif other.concrete:
|
||||
return other.satisfies(self)
|
||||
|
||||
# From here we know both self and other are not concrete
|
||||
self_hash = self.abstract_hash
|
||||
other_hash = other.abstract_hash
|
||||
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
|
||||
|
||||
if (
|
||||
self_hash
|
||||
@@ -3805,6 +3779,10 @@ def _intersects_dependencies(self, other):
|
||||
repository=spack.repo.PATH, specs=other.traverse(), restrict=True
|
||||
)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
if not self_index.satisfies(other_index):
|
||||
return False
|
||||
|
||||
# These two loops handle cases where there is an overly restrictive
|
||||
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
|
||||
# compatible with mpich2)
|
||||
@@ -3902,46 +3880,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
return False
|
||||
|
||||
# If we arrived here, then rhs is abstract. At the moment we don't care about the edge
|
||||
# structure of an abstract DAG, so we check if any edge could satisfy the properties
|
||||
# we ask for.
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
if rhs_edge.spec.virtual:
|
||||
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
|
||||
|
||||
if not rhs_edge.virtuals:
|
||||
continue
|
||||
|
||||
if not lhs_edges:
|
||||
# Construct a map of the link/run subDAG + direct "build" edges,
|
||||
# keyed by dependency name
|
||||
for lhs_edge in self.traverse_edges(
|
||||
root=False, cover="edges", deptype=("link", "run")
|
||||
):
|
||||
lhs_edges[lhs_edge.spec.name].add(lhs_edge)
|
||||
for virtual_name in lhs_edge.virtuals:
|
||||
lhs_edges[virtual_name].add(lhs_edge)
|
||||
|
||||
build_edges = self.edges_to_dependencies(depflag=dt.BUILD)
|
||||
for lhs_edge in build_edges:
|
||||
lhs_edges[lhs_edge.spec.name].add(lhs_edge)
|
||||
for virtual_name in lhs_edge.virtuals:
|
||||
lhs_edges[virtual_name].add(lhs_edge)
|
||||
|
||||
# We don't have edges to this dependency
|
||||
current_dependency_name = rhs_edge.spec.name
|
||||
if current_dependency_name not in lhs_edges:
|
||||
return False
|
||||
|
||||
for virtual in rhs_edge.virtuals:
|
||||
has_virtual = any(
|
||||
virtual in edge.virtuals for edge in lhs_edges[current_dependency_name]
|
||||
)
|
||||
if not has_virtual:
|
||||
return False
|
||||
|
||||
# Edges have been checked above already, hence deps=False
|
||||
# structure of an abstract DAG - hence the deps=False parameter.
|
||||
return all(
|
||||
any(lhs.satisfies(rhs, deps=False) for lhs in self.traverse(root=False))
|
||||
for rhs in other.traverse(root=False)
|
||||
@@ -4143,7 +4082,9 @@ def __getitem__(self, name):
|
||||
"""
|
||||
query_parameters = name.split(":")
|
||||
if len(query_parameters) > 2:
|
||||
raise KeyError("key has more than one ':' symbol. At most one is admitted.")
|
||||
msg = "key has more than one ':' symbol."
|
||||
msg += " At most one is admitted."
|
||||
raise KeyError(msg)
|
||||
|
||||
name, query_parameters = query_parameters[0], query_parameters[1:]
|
||||
if query_parameters:
|
||||
@@ -4168,17 +4109,11 @@ def __getitem__(self, name):
|
||||
itertools.chain(
|
||||
# Regular specs
|
||||
(x for x in order() if x.name == name),
|
||||
(
|
||||
x
|
||||
for x in order()
|
||||
if (not x.virtual)
|
||||
and any(name in edge.virtuals for edge in x.edges_from_dependents())
|
||||
),
|
||||
(x for x in order() if (not x.virtual) and x.package.provides(name)),
|
||||
)
|
||||
)
|
||||
except StopIteration:
|
||||
raise KeyError(f"No spec with name {name} in {self}")
|
||||
raise KeyError("No spec with name %s in %s" % (name, self))
|
||||
|
||||
if self._concrete:
|
||||
return SpecBuildInterface(value, name, query_parameters)
|
||||
@@ -4556,26 +4491,10 @@ def format_path(
|
||||
return str(path_ctor(*output_path_components))
|
||||
|
||||
def __str__(self):
|
||||
root_str = [self.format()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
sorted_nodes = [self] + sorted(
|
||||
self.traverse(root=False), key=lambda x: x.name or x.abstract_hash
|
||||
)
|
||||
sorted_dependencies = [
|
||||
d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
|
||||
]
|
||||
spec_str = " ^".join(root_str + sorted_dependencies)
|
||||
return spec_str.strip()
|
||||
|
||||
@property
|
||||
def colored_str(self):
|
||||
root_str = [self.cformat()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
)
|
||||
sorted_dependencies = [
|
||||
d.cformat("{edge_attributes} " + DISPLAY_FORMAT) for d in sorted_dependencies
|
||||
]
|
||||
spec_str = " ^".join(root_str + sorted_dependencies)
|
||||
spec_str = " ^".join(d.format() for d in sorted_nodes)
|
||||
return spec_str.strip()
|
||||
|
||||
def install_status(self):
|
||||
|
||||
@@ -93,8 +93,8 @@ def remove(self, spec):
|
||||
if (isinstance(s, str) and not s.startswith("$")) and Spec(s) == Spec(spec)
|
||||
]
|
||||
if not remove:
|
||||
msg = f"Cannot remove {spec} from SpecList {self.name}.\n"
|
||||
msg += f"Either {spec} is not in {self.name} or {spec} is "
|
||||
msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
|
||||
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
|
||||
msg += "expanded from a matrix and cannot be removed directly."
|
||||
raise SpecListError(msg)
|
||||
|
||||
@@ -133,8 +133,9 @@ def _parse_reference(self, name):
|
||||
|
||||
# Make sure the reference is valid
|
||||
if name not in self._reference:
|
||||
msg = f"SpecList '{self.name}' refers to named list '{name}'"
|
||||
msg += " which does not appear in its reference dict."
|
||||
msg = "SpecList %s refers to " % self.name
|
||||
msg += "named list %s " % name
|
||||
msg += "which does not appear in its reference dict"
|
||||
raise UndefinedReferenceError(msg)
|
||||
|
||||
return (name, sigil)
|
||||
|
||||
@@ -37,7 +37,6 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.resource
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.lock
|
||||
@@ -456,7 +455,6 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
mirror_urls = [
|
||||
url_util.join(mirror.fetch_url, rel_path)
|
||||
for mirror in spack.mirror.MirrorCollection(source=True).values()
|
||||
if not mirror.fetch_url.startswith("oci://")
|
||||
for rel_path in self.mirror_paths
|
||||
]
|
||||
|
||||
@@ -660,14 +658,8 @@ def destroy(self):
|
||||
|
||||
|
||||
class ResourceStage(Stage):
|
||||
def __init__(
|
||||
self,
|
||||
fetch_strategy: fs.FetchStrategy,
|
||||
root: Stage,
|
||||
resource: spack.resource.Resource,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(fetch_strategy, **kwargs)
|
||||
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
|
||||
super().__init__(url_or_fetch_strategy, **kwargs)
|
||||
self.root_stage = root
|
||||
self.resource = resource
|
||||
|
||||
@@ -878,7 +870,6 @@ def interactive_version_filter(
|
||||
url_dict: Dict[StandardVersion, str],
|
||||
known_versions: Iterable[StandardVersion] = (),
|
||||
*,
|
||||
initial_verion_filter: Optional[VersionList] = None,
|
||||
url_changes: Set[StandardVersion] = set(),
|
||||
input: Callable[..., str] = input,
|
||||
) -> Optional[Dict[StandardVersion, str]]:
|
||||
@@ -892,10 +883,9 @@ def interactive_version_filter(
|
||||
Filtered dictionary of versions to URLs or None if the user wants to quit
|
||||
"""
|
||||
# Find length of longest string in the list for padding
|
||||
version_filter = initial_verion_filter or VersionList([":"])
|
||||
max_len = max(len(str(v)) for v in url_dict) if url_dict else 0
|
||||
sorted_and_filtered = [v for v in url_dict if v.satisfies(version_filter)]
|
||||
sorted_and_filtered.sort(reverse=True)
|
||||
sorted_and_filtered = sorted(url_dict.keys(), reverse=True)
|
||||
version_filter = VersionList([":"])
|
||||
max_len = max(len(str(v)) for v in sorted_and_filtered)
|
||||
orig_url_dict = url_dict # only copy when using editor to modify
|
||||
print_header = True
|
||||
VERSION_COLOR = spack.spec.VERSION_COLOR
|
||||
@@ -903,20 +893,21 @@ def interactive_version_filter(
|
||||
if print_header:
|
||||
has_filter = version_filter != VersionList([":"])
|
||||
header = []
|
||||
if len(orig_url_dict) > 0 and len(sorted_and_filtered) == len(orig_url_dict):
|
||||
if not sorted_and_filtered:
|
||||
header.append("No versions selected")
|
||||
elif len(sorted_and_filtered) == len(orig_url_dict):
|
||||
header.append(
|
||||
f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}"
|
||||
)
|
||||
else:
|
||||
header.append(
|
||||
f"Selected {len(sorted_and_filtered)} of "
|
||||
f"{llnl.string.plural(len(orig_url_dict), 'version')}"
|
||||
f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions"
|
||||
)
|
||||
if sorted_and_filtered and known_versions:
|
||||
num_new = sum(1 for v in sorted_and_filtered if v not in known_versions)
|
||||
header.append(f"{llnl.string.plural(num_new, 'new version')}")
|
||||
if has_filter:
|
||||
header.append(colorize(f"Filtered by {VERSION_COLOR}@@{version_filter}@."))
|
||||
header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@."))
|
||||
|
||||
version_with_url = [
|
||||
colorize(
|
||||
|
||||
@@ -642,28 +642,3 @@ def test_effective_deptype_run_environment(default_mock_concretization):
|
||||
for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN):
|
||||
assert effective_type & expected_flags.pop(spec.name) == effective_type
|
||||
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"
|
||||
|
||||
|
||||
def test_monkey_patching_works_across_virtual(default_mock_concretization):
|
||||
"""Assert that a monkeypatched attribute is found regardless we access through the
|
||||
real name or the virtual name.
|
||||
"""
|
||||
s = default_mock_concretization("mpileaks ^mpich")
|
||||
s["mpich"].foo = "foo"
|
||||
assert s["mpich"].foo == "foo"
|
||||
assert s["mpi"].foo == "foo"
|
||||
|
||||
|
||||
def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_concretization):
|
||||
"""Verify that Spack drops CC, CXX, FC and F77 from the dependencies related build environment
|
||||
variable changes if they are set in setup_run_environment. Spack manages those variables
|
||||
elsewhere."""
|
||||
s = default_mock_concretization("build-env-compiler-var-a")
|
||||
ctx = spack.build_environment.SetupContext(s, context=Context.BUILD)
|
||||
result = {}
|
||||
ctx.get_env_modifications().apply_modifications(result)
|
||||
assert "CC" not in result
|
||||
assert "CXX" not in result
|
||||
assert "FC" not in result
|
||||
assert "F77" not in result
|
||||
assert result["ANOTHER_VAR"] == "this-should-be-present"
|
||||
|
||||
@@ -326,8 +326,4 @@ def fake_push(node, push_url, options):
|
||||
|
||||
buildcache(*buildcache_create_args)
|
||||
|
||||
# Order is not guaranteed, so we can't just compare lists
|
||||
assert set(packages_to_push) == set(expected)
|
||||
|
||||
# Ensure no duplicates
|
||||
assert len(set(packages_to_push)) == len(packages_to_push)
|
||||
assert packages_to_push == expected
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import pytest
|
||||
|
||||
import spack.cmd.checksum
|
||||
import spack.parser
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.main import SpackCommand
|
||||
@@ -255,10 +254,17 @@ def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch,
|
||||
assert "Added 0 new versions to" not in output
|
||||
|
||||
|
||||
def test_checksum_at(mock_packages):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||
versions = [str(v) for v in pkg_cls.versions]
|
||||
output = spack_checksum(f"zlib@{versions[0]}")
|
||||
assert "Found 1 version" in output
|
||||
|
||||
|
||||
def test_checksum_url(mock_packages):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||
with pytest.raises(spack.parser.SpecSyntaxError):
|
||||
spack_checksum(f"{pkg_cls.url}")
|
||||
output = spack_checksum(f"{pkg_cls.url}", fail_on_error=False)
|
||||
assert "accepts package names" in output
|
||||
|
||||
|
||||
def test_checksum_verification_fails(install_mockery, capsys):
|
||||
|
||||
@@ -58,24 +58,6 @@ def test_subcommands():
|
||||
assert "spack compiler add" in out2
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("subprocess not supported on Windows")
|
||||
def test_override_alias():
|
||||
"""Test that spack commands cannot be overriden by aliases."""
|
||||
|
||||
install = spack.main.SpackCommand("install", subprocess=True)
|
||||
instal = spack.main.SpackCommand("instal", subprocess=True)
|
||||
|
||||
out = install(fail_on_error=False, global_args=["-c", "config:aliases:install:find"])
|
||||
assert "install requires a package argument or active environment" in out
|
||||
assert "Alias 'install' (mapping to 'find') attempts to override built-in command" in out
|
||||
|
||||
out = install(fail_on_error=False, global_args=["-c", "config:aliases:foo bar:find"])
|
||||
assert "Alias 'foo bar' (mapping to 'find') contains a space, which is not supported" in out
|
||||
|
||||
out = instal(fail_on_error=False, global_args=["-c", "config:aliases:instal:find"])
|
||||
assert "install requires a package argument or active environment" not in out
|
||||
|
||||
|
||||
def test_rst():
|
||||
"""Do some simple sanity checks of the rst writer."""
|
||||
out1 = commands("--format=rst")
|
||||
|
||||
@@ -4,14 +4,12 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.compiler
|
||||
import spack.compilers
|
||||
import spack.main
|
||||
import spack.spec
|
||||
import spack.util.pattern
|
||||
import spack.version
|
||||
|
||||
compiler = spack.main.SpackCommand("compiler")
|
||||
@@ -148,7 +146,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
|
||||
compilers_before_find = set(spack.compilers.all_compiler_specs())
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False
|
||||
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
|
||||
)
|
||||
spack.cmd.compiler.compiler_find(args)
|
||||
compilers_after_find = set(spack.compilers.all_compiler_specs())
|
||||
@@ -161,15 +159,10 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
@pytest.mark.parametrize("mixed_toolchain", [True, False])
|
||||
def test_compiler_find_mixed_suffixes(
|
||||
mixed_toolchain, no_compilers_yaml, working_env, compilers_dir
|
||||
):
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
os.environ["PATH"] = str(compilers_dir)
|
||||
output = compiler(
|
||||
"find", "--scope=site", "--mixed-toolchain" if mixed_toolchain else "--no-mixed-toolchain"
|
||||
)
|
||||
output = compiler("find", "--scope=site")
|
||||
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
@@ -183,8 +176,9 @@ def test_compiler_find_mixed_suffixes(
|
||||
assert clang["paths"] == {
|
||||
"cc": str(compilers_dir / "clang"),
|
||||
"cxx": str(compilers_dir / "clang++"),
|
||||
"f77": gfortran_path if mixed_toolchain else None,
|
||||
"fc": gfortran_path if mixed_toolchain else None,
|
||||
# we only auto-detect mixed clang on macos
|
||||
"f77": gfortran_path if sys.platform == "darwin" else None,
|
||||
"fc": gfortran_path if sys.platform == "darwin" else None,
|
||||
}
|
||||
|
||||
assert gcc["paths"] == {
|
||||
|
||||
@@ -215,10 +215,10 @@ def test_config_add_override_leaf(mutable_empty_config):
|
||||
|
||||
|
||||
def test_config_add_update_dict(mutable_empty_config):
|
||||
config("add", "packages:hdf5:version:[1.0.0]")
|
||||
config("add", "packages:all:version:[1.0.0]")
|
||||
output = config("get", "packages")
|
||||
|
||||
expected = "packages:\n hdf5:\n version: [1.0.0]\n"
|
||||
expected = "packages:\n all:\n version: [1.0.0]\n"
|
||||
assert output == expected
|
||||
|
||||
|
||||
@@ -352,7 +352,8 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
||||
contents = """spack:
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
version:
|
||||
- 1.0.0
|
||||
"""
|
||||
|
||||
# create temp file and add it to config
|
||||
@@ -367,7 +368,8 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
|
||||
# added config comes before prior config
|
||||
expected = """packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
version:
|
||||
- 1.0.0
|
||||
compiler: [gcc]
|
||||
"""
|
||||
|
||||
@@ -379,7 +381,7 @@ def test_config_add_invalid_file_fails(tmpdir):
|
||||
# invalid because version requires a list
|
||||
contents = """spack:
|
||||
packages:
|
||||
hdf5:
|
||||
all:
|
||||
version: 1.0.0
|
||||
"""
|
||||
|
||||
@@ -629,11 +631,14 @@ def test_config_prefer_upstream(
|
||||
packages = syaml.load(open(cfg_file))["packages"]
|
||||
|
||||
# Make sure only the non-default variants are set.
|
||||
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
|
||||
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
|
||||
assert packages["dependency-install"] == {"version": ["2.0"]}
|
||||
assert packages["boost"] == {
|
||||
"compiler": ["gcc@=10.2.1"],
|
||||
"variants": "+debug +graph",
|
||||
"version": ["1.63.0"],
|
||||
}
|
||||
assert packages["dependency-install"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.0"]}
|
||||
# Ensure that neither variant gets listed for hdf5, since they conflict
|
||||
assert packages["hdf5"] == {"version": ["2.3"]}
|
||||
assert packages["hdf5"] == {"compiler": ["gcc@=10.2.1"], "version": ["2.3"]}
|
||||
|
||||
# Make sure a message about the conflicting hdf5's was given.
|
||||
assert "- hdf5" in output
|
||||
|
||||
@@ -14,14 +14,7 @@
|
||||
|
||||
dependencies = SpackCommand("dependencies")
|
||||
|
||||
mpis = [
|
||||
"intel-parallel-studio",
|
||||
"low-priority-provider",
|
||||
"mpich",
|
||||
"mpich2",
|
||||
"multi-provider-mpi",
|
||||
"zmpi",
|
||||
]
|
||||
mpis = ["low-priority-provider", "mpich", "mpich2", "multi-provider-mpi", "zmpi"]
|
||||
mpi_deps = ["fake"]
|
||||
|
||||
|
||||
|
||||
@@ -163,15 +163,8 @@ def test_dev_build_fails_multiple_specs(mock_packages):
|
||||
|
||||
|
||||
def test_dev_build_fails_nonexistent_package_name(mock_packages):
|
||||
output = ""
|
||||
|
||||
try:
|
||||
dev_build("no_such_package")
|
||||
assert False, "no exception was raised!"
|
||||
except spack.repo.UnknownPackageError as e:
|
||||
output = e.message
|
||||
|
||||
assert "Package 'no_such_package' not found" in output
|
||||
output = dev_build("no_such_package", fail_on_error=False)
|
||||
assert "No package for 'no_such_package' was found" in output
|
||||
|
||||
|
||||
def test_dev_build_fails_no_version(mock_packages):
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.link_tree
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.env
|
||||
import spack.config
|
||||
@@ -632,7 +631,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||
manifest_dir.mkdir(parents=True, exist_ok=False)
|
||||
manifest_file = manifest_dir / ev.manifest_name
|
||||
manifest_file.write_text(
|
||||
"""\
|
||||
"""
|
||||
spack:
|
||||
specs:
|
||||
- a
|
||||
@@ -720,25 +719,38 @@ def test_env_with_config(environment_from_manifest):
|
||||
|
||||
def test_with_config_bad_include(environment_from_manifest):
|
||||
"""Confirm missing include paths raise expected exception and error."""
|
||||
with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
|
||||
e = environment_from_manifest(
|
||||
"""
|
||||
e = environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- /no/such/directory
|
||||
- no/such/file.yaml
|
||||
"""
|
||||
)
|
||||
)
|
||||
with pytest.raises(spack.config.ConfigFileError, match="2 missing include path"):
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
assert ev.active_environment() is None
|
||||
|
||||
|
||||
def test_env_with_include_config_files_same_basename(tmp_path, environment_from_manifest):
|
||||
file1 = fs.join_path(tmp_path, "path", "to", "included-config.yaml")
|
||||
fs.mkdirp(os.path.dirname(file1))
|
||||
with open(file1, "w") as f:
|
||||
def test_env_with_include_config_files_same_basename(environment_from_manifest):
|
||||
e = environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- ./path/to/included-config.yaml
|
||||
- ./second/path/to/include-config.yaml
|
||||
specs:
|
||||
- libelf
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.read("test")
|
||||
|
||||
fs.mkdirp(os.path.join(e.path, "path", "to"))
|
||||
with open(os.path.join(e.path, "./path/to/included-config.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -747,9 +759,8 @@ def test_env_with_include_config_files_same_basename(tmp_path, environment_from_
|
||||
"""
|
||||
)
|
||||
|
||||
file2 = fs.join_path(tmp_path, "second", "path", "included-config.yaml")
|
||||
fs.mkdirp(os.path.dirname(file2))
|
||||
with open(file2, "w") as f:
|
||||
fs.mkdirp(os.path.join(e.path, "second", "path", "to"))
|
||||
with open(os.path.join(e.path, "./second/path/to/include-config.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -758,18 +769,6 @@ def test_env_with_include_config_files_same_basename(tmp_path, environment_from_
|
||||
"""
|
||||
)
|
||||
|
||||
e = environment_from_manifest(
|
||||
f"""
|
||||
spack:
|
||||
include:
|
||||
- {file1}
|
||||
- {file2}
|
||||
specs:
|
||||
- libelf
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
@@ -806,18 +805,12 @@ def mpileaks_env_config(include_path):
|
||||
)
|
||||
|
||||
|
||||
def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
|
||||
def test_env_with_included_config_file(environment_from_manifest, packages_file):
|
||||
"""Test inclusion of a relative packages configuration file added to an
|
||||
existing environment.
|
||||
"""
|
||||
env_root = mutable_mock_env_path
|
||||
fs.mkdirp(env_root)
|
||||
include_filename = "included-config.yaml"
|
||||
included_path = env_root / include_filename
|
||||
shutil.move(packages_file.strpath, included_path)
|
||||
|
||||
spack_yaml = env_root / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
e = environment_from_manifest(
|
||||
f"""\
|
||||
spack:
|
||||
include:
|
||||
@@ -827,7 +820,9 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(env_root)
|
||||
included_path = os.path.join(e.path, include_filename)
|
||||
shutil.move(packages_file.strpath, included_path)
|
||||
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
@@ -860,67 +855,68 @@ def test_env_with_included_config_missing_file(tmpdir, mutable_empty_config):
|
||||
with spack_yaml.open("w") as f:
|
||||
f.write("spack:\n include:\n - {0}\n".format(missing_file.strpath))
|
||||
|
||||
env = ev.Environment(tmpdir.strpath)
|
||||
with pytest.raises(spack.config.ConfigError, match="missing include path"):
|
||||
ev.Environment(tmpdir.strpath)
|
||||
ev.activate(env)
|
||||
|
||||
|
||||
def test_env_with_included_config_scope(mutable_mock_env_path, packages_file):
|
||||
def test_env_with_included_config_scope(environment_from_manifest, packages_file):
|
||||
"""Test inclusion of a package file from the environment's configuration
|
||||
stage directory. This test is intended to represent a case where a remote
|
||||
file has already been staged."""
|
||||
env_root = mutable_mock_env_path
|
||||
config_scope_path = env_root / "config"
|
||||
config_scope_path = os.path.join(ev.root("test"), "config")
|
||||
|
||||
# Configure the environment to include file(s) from the environment's
|
||||
# remote configuration stage directory.
|
||||
e = environment_from_manifest(mpileaks_env_config(config_scope_path))
|
||||
|
||||
# Copy the packages.yaml file to the environment configuration
|
||||
# directory, so it is picked up during concretization. (Using
|
||||
# copy instead of rename in case the fixture scope changes.)
|
||||
fs.mkdirp(config_scope_path)
|
||||
include_filename = os.path.basename(packages_file.strpath)
|
||||
included_path = config_scope_path / include_filename
|
||||
included_path = os.path.join(config_scope_path, include_filename)
|
||||
fs.copy(packages_file.strpath, included_path)
|
||||
|
||||
# Configure the environment to include file(s) from the environment's
|
||||
# remote configuration stage directory.
|
||||
spack_yaml = env_root / ev.manifest_name
|
||||
spack_yaml.write_text(mpileaks_env_config(config_scope_path))
|
||||
|
||||
# Ensure the concretized environment reflects contents of the
|
||||
# packages.yaml file.
|
||||
e = ev.Environment(env_root)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
def test_env_with_included_config_var_path(tmpdir, packages_file):
|
||||
def test_env_with_included_config_var_path(environment_from_manifest, packages_file):
|
||||
"""Test inclusion of a package configuration file with path variables
|
||||
"staged" in the environment's configuration stage directory."""
|
||||
included_file = packages_file.strpath
|
||||
env_path = pathlib.PosixPath(tmpdir)
|
||||
config_var_path = os.path.join("$tempdir", "included-packages.yaml")
|
||||
|
||||
spack_yaml = env_path / ev.manifest_name
|
||||
spack_yaml.write_text(mpileaks_env_config(config_var_path))
|
||||
config_var_path = os.path.join("$tempdir", "included-config.yaml")
|
||||
e = environment_from_manifest(mpileaks_env_config(config_var_path))
|
||||
|
||||
config_real_path = substitute_path_variables(config_var_path)
|
||||
shutil.move(included_file, config_real_path)
|
||||
fs.mkdirp(os.path.dirname(config_real_path))
|
||||
shutil.move(packages_file.strpath, config_real_path)
|
||||
assert os.path.exists(config_real_path)
|
||||
|
||||
e = ev.Environment(env_path)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
def test_env_with_included_config_precedence(tmp_path):
|
||||
"""Test included scope and manifest precedence when including a package
|
||||
configuration file."""
|
||||
|
||||
included_file = "included-packages.yaml"
|
||||
included_path = tmp_path / included_file
|
||||
with open(included_path, "w") as f:
|
||||
def test_env_config_precedence(environment_from_manifest):
|
||||
e = environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
packages:
|
||||
libelf:
|
||||
version: ["0.8.12"]
|
||||
include:
|
||||
- ./included-config.yaml
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -931,50 +927,29 @@ def test_env_with_included_config_precedence(tmp_path):
|
||||
"""
|
||||
)
|
||||
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
f"""\
|
||||
spack:
|
||||
packages:
|
||||
libelf:
|
||||
version: ["0.8.12"]
|
||||
include:
|
||||
- {os.path.join(".", included_file)}
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(tmp_path)
|
||||
with e:
|
||||
e.concretize()
|
||||
specs = e._get_environment_specs()
|
||||
|
||||
# ensure included scope took effect
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in specs)
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
# ensure env file takes precedence
|
||||
assert any(x.satisfies("libelf@0.8.12") for x in specs)
|
||||
assert any(x.satisfies("libelf@0.8.12") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""Test precendence of multiple included configuration files."""
|
||||
file1 = "high-config.yaml"
|
||||
file2 = "low-config.yaml"
|
||||
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
f"""\
|
||||
def test_included_config_precedence(environment_from_manifest):
|
||||
e = environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
include:
|
||||
- {os.path.join(".", file1)} # this one should take precedence
|
||||
- {os.path.join(".", file2)}
|
||||
- ./high-config.yaml # this one should take precedence
|
||||
- ./low-config.yaml
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
with open(tmp_path / file1, "w") as f:
|
||||
with open(os.path.join(e.path, "high-config.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -983,7 +958,7 @@ def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""
|
||||
)
|
||||
|
||||
with open(tmp_path / file2, "w") as f:
|
||||
with open(os.path.join(e.path, "low-config.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
packages:
|
||||
@@ -994,115 +969,30 @@ def test_env_with_included_configs_precedence(tmp_path):
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(tmp_path)
|
||||
with e:
|
||||
e.concretize()
|
||||
specs = e._get_environment_specs()
|
||||
|
||||
# ensure included package spec took precedence over manifest spec
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in specs)
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
# ensure first included package spec took precedence over one from second
|
||||
assert any(x.satisfies("libelf@0.8.10") for x in specs)
|
||||
assert any([x.satisfies("libelf@0.8.10") for x in e._get_environment_specs()])
|
||||
|
||||
|
||||
@pytest.mark.regression("39248")
|
||||
def test_bad_env_yaml_format_remove(mutable_mock_env_path):
|
||||
badenv = "badenv"
|
||||
env("create", badenv)
|
||||
filename = mutable_mock_env_path / "spack.yaml"
|
||||
def test_bad_env_yaml_format(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
spacks:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
assert badenv in env("list")
|
||||
env("remove", "-y", badenv)
|
||||
assert badenv not in env("list")
|
||||
|
||||
|
||||
@pytest.mark.regression("39248")
|
||||
@pytest.mark.parametrize(
|
||||
"error,message,contents",
|
||||
[
|
||||
(
|
||||
spack.config.ConfigFormatError,
|
||||
"not of type",
|
||||
"""\
|
||||
spack:
|
||||
specs: mpi@2.0
|
||||
""",
|
||||
),
|
||||
(
|
||||
ev.SpackEnvironmentConfigError,
|
||||
"duplicate key",
|
||||
"""\
|
||||
spack:
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
mpi: [mvapich2]
|
||||
mpi: [mpich]
|
||||
""",
|
||||
),
|
||||
(
|
||||
spack.config.ConfigFormatError,
|
||||
"'specks' was unexpected",
|
||||
"""\
|
||||
spack:
|
||||
specks:
|
||||
- libdwarf
|
||||
""",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_bad_env_yaml_create_fails(tmp_path, mutable_mock_env_path, error, message, contents):
|
||||
"""Ensure creation with invalid yaml does NOT create or leave the environment."""
|
||||
filename = tmp_path / ev.manifest_name
|
||||
filename.write_text(contents)
|
||||
env_name = "bad_env"
|
||||
with pytest.raises(error, match=message):
|
||||
env("create", env_name, str(filename))
|
||||
|
||||
assert env_name not in env("list")
|
||||
manifest = mutable_mock_env_path / env_name / ev.manifest_name
|
||||
assert not os.path.exists(str(manifest))
|
||||
|
||||
|
||||
@pytest.mark.regression("39248")
|
||||
@pytest.mark.parametrize("answer", ["-y", ""])
|
||||
def test_multi_env_remove(mutable_mock_env_path, monkeypatch, answer):
|
||||
"""Test removal (or not) of a valid and invalid environment"""
|
||||
remove_environment = answer == "-y"
|
||||
monkeypatch.setattr(tty, "get_yes_or_no", lambda prompt, default: remove_environment)
|
||||
|
||||
environments = ["goodenv", "badenv"]
|
||||
for e in environments:
|
||||
env("create", e)
|
||||
|
||||
# Ensure the bad environment contains invalid yaml
|
||||
filename = mutable_mock_env_path / environments[1] / ev.manifest_name
|
||||
filename.write_text(
|
||||
"""\
|
||||
- libdwarf
|
||||
"""
|
||||
)
|
||||
|
||||
assert all(e in env("list") for e in environments)
|
||||
|
||||
args = [answer] if answer else []
|
||||
args.extend(environments)
|
||||
output = env("remove", *args, fail_on_error=False)
|
||||
|
||||
if remove_environment is True:
|
||||
# Successfully removed (and reported removal) of *both* environments
|
||||
assert not all(e in env("list") for e in environments)
|
||||
assert output.count("Successfully removed") == len(environments)
|
||||
else:
|
||||
# Not removing any of the environments
|
||||
assert all(e in env("list") for e in environments)
|
||||
with tmpdir.as_cwd():
|
||||
with pytest.raises(spack.config.ConfigFormatError) as e:
|
||||
env("create", "test", "./spack.yaml")
|
||||
assert "spack.yaml:2" in str(e)
|
||||
assert "'spacks' was unexpected" in str(e)
|
||||
|
||||
|
||||
def test_env_loads(install_mockery, mock_fetch):
|
||||
@@ -1641,10 +1531,11 @@ def test_stack_yaml_remove_from_list(tmpdir):
|
||||
assert Spec("callpath") in test.user_specs
|
||||
|
||||
|
||||
def test_stack_yaml_remove_from_list_force(tmp_path):
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
"""\
|
||||
def test_stack_yaml_remove_from_list_force(tmpdir):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [mpileaks, callpath]
|
||||
@@ -1653,20 +1544,20 @@ def test_stack_yaml_remove_from_list_force(tmp_path):
|
||||
- [$packages]
|
||||
- [^mpich, ^zmpi]
|
||||
"""
|
||||
)
|
||||
)
|
||||
with tmpdir.as_cwd():
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
remove("-f", "-l", "packages", "mpileaks")
|
||||
find_output = find("-c")
|
||||
|
||||
env("create", "test", str(spack_yaml))
|
||||
with ev.read("test"):
|
||||
concretize()
|
||||
remove("-f", "-l", "packages", "mpileaks")
|
||||
find_output = find("-c")
|
||||
assert "mpileaks" not in find_output
|
||||
|
||||
assert "mpileaks" not in find_output
|
||||
|
||||
test = ev.read("test")
|
||||
assert len(test.user_specs) == 2
|
||||
assert Spec("callpath ^zmpi") in test.user_specs
|
||||
assert Spec("callpath ^mpich") in test.user_specs
|
||||
test = ev.read("test")
|
||||
assert len(test.user_specs) == 2
|
||||
assert Spec("callpath ^zmpi") in test.user_specs
|
||||
assert Spec("callpath ^mpich") in test.user_specs
|
||||
|
||||
|
||||
def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
|
||||
@@ -1712,7 +1603,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test") as e:
|
||||
e.concretize()
|
||||
concretize()
|
||||
|
||||
before_user = e.user_specs.specs
|
||||
before_conc = e.concretized_user_specs
|
||||
@@ -2552,12 +2443,8 @@ def test_concretize_user_specs_together():
|
||||
e.remove("mpich")
|
||||
e.add("mpich2")
|
||||
|
||||
exc_cls = spack.error.SpackError
|
||||
if spack.config.get("config:concretizer") == "clingo":
|
||||
exc_cls = spack.error.UnsatisfiableSpecError
|
||||
|
||||
# Concretizing without invalidating the concrete spec for mpileaks fails
|
||||
with pytest.raises(exc_cls):
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
e.concretize()
|
||||
e.concretize(force=True)
|
||||
|
||||
@@ -2589,12 +2476,9 @@ def test_duplicate_packages_raise_when_concretizing_together():
|
||||
e.add("mpileaks~opt")
|
||||
e.add("mpich")
|
||||
|
||||
exc_cls, match = spack.error.SpackError, None
|
||||
if spack.config.get("config:concretizer") == "clingo":
|
||||
exc_cls = spack.error.UnsatisfiableSpecError
|
||||
match = r"You could consider setting `concretizer:unify`"
|
||||
|
||||
with pytest.raises(exc_cls, match=match):
|
||||
with pytest.raises(
|
||||
spack.error.UnsatisfiableSpecError, match=r"You could consider setting `concretizer:unify`"
|
||||
):
|
||||
e.concretize()
|
||||
|
||||
|
||||
@@ -2621,7 +2505,7 @@ def test_env_write_only_non_default_nested(tmpdir):
|
||||
- matrix:
|
||||
- [mpileaks]
|
||||
packages:
|
||||
all:
|
||||
mpileaks:
|
||||
compiler: [gcc]
|
||||
view: true
|
||||
"""
|
||||
@@ -2959,25 +2843,6 @@ def test_activate_temp(monkeypatch, tmpdir):
|
||||
assert ev.is_env_dir(str(tmpdir))
|
||||
|
||||
|
||||
def test_activate_default(monkeypatch):
|
||||
"""Tests whether `spack env activate` creates / activates the default
|
||||
environment"""
|
||||
assert not ev.exists("default")
|
||||
|
||||
# Activating it the first time should create it
|
||||
env("activate", "--sh")
|
||||
env("deactivate", "--sh")
|
||||
assert ev.exists("default")
|
||||
|
||||
# Activating it while it already exists should work
|
||||
env("activate", "--sh")
|
||||
env("deactivate", "--sh")
|
||||
assert ev.exists("default")
|
||||
|
||||
env("remove", "-y", "default")
|
||||
assert not ev.exists("default")
|
||||
|
||||
|
||||
def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch):
|
||||
view = str(tmpdir.join("view"))
|
||||
# Put a symlink to an actual directory in view
|
||||
@@ -3463,20 +3328,6 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
assert "post-install: {}".format(s.dag_hash()) in out
|
||||
|
||||
|
||||
def test_depfile_empty_does_not_error(tmp_path):
|
||||
# For empty environments Spack should create a depfile that does nothing
|
||||
make = Executable("make")
|
||||
makefile = str(tmp_path / "Makefile")
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
env("depfile", "-o", makefile)
|
||||
|
||||
make("-f", makefile)
|
||||
|
||||
assert make.returncode == 0
|
||||
|
||||
|
||||
def test_unify_when_possible_works_around_conflicts():
|
||||
e = ev.create("coconcretization")
|
||||
e.unify = "when_possible"
|
||||
|
||||
@@ -28,12 +28,21 @@ def _mock_search(path_hints=None):
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _platform_executables(monkeypatch):
|
||||
def _win_exe_ext():
|
||||
return ".bat"
|
||||
|
||||
monkeypatch.setattr(spack.util.path, "win_exe_ext", _win_exe_ext)
|
||||
|
||||
|
||||
def define_plat_exe(exe):
|
||||
if sys.platform == "win32":
|
||||
exe += ".bat"
|
||||
return exe
|
||||
|
||||
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_find_external_single_package(mock_executable):
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
search_dir = cmake_path.parent.parent
|
||||
@@ -45,7 +54,7 @@ def test_find_external_single_package(mock_executable):
|
||||
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
|
||||
|
||||
|
||||
def test_find_external_two_instances_same_package(mock_executable):
|
||||
def test_find_external_two_instances_same_package(mock_executable, _platform_executables):
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
# we need to avoid that for proper regex.
|
||||
@@ -227,7 +236,32 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
|
||||
assert external.returncode == 0
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch):
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
# Prepare an environment to detect a fake gcc
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
prefix = os.path.dirname(gcc_exe)
|
||||
monkeypatch.setenv("PATH", prefix)
|
||||
|
||||
# Find the external spec
|
||||
external("find", "gcc")
|
||||
|
||||
# Check entries in 'packages.yaml'
|
||||
packages_yaml = spack.config.get("packages")
|
||||
assert "gcc" in packages_yaml
|
||||
assert "externals" in packages_yaml["gcc"]
|
||||
externals = packages_yaml["gcc"]["externals"]
|
||||
assert len(externals) == 1
|
||||
external_gcc = externals[0]
|
||||
assert external_gcc["spec"] == "gcc@4.2.1 languages=c"
|
||||
assert external_gcc["prefix"] == os.path.dirname(prefix)
|
||||
assert "extra_attributes" in external_gcc
|
||||
extra_attributes = external_gcc["extra_attributes"]
|
||||
assert "prefix" not in extra_attributes
|
||||
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
|
||||
|
||||
|
||||
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
search_dir = gcc_exe.parent
|
||||
|
||||
@@ -248,7 +282,10 @@ def _determine_variants(cls, exes, version_str):
|
||||
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
|
||||
|
||||
|
||||
def test_new_entries_are_reported_correctly(mock_executable, mutable_config, monkeypatch):
|
||||
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
|
||||
def test_new_entries_are_reported_correctly(
|
||||
mock_executable, mutable_config, monkeypatch, _platform_executables
|
||||
):
|
||||
# Prepare an environment to detect a fake gcc
|
||||
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
|
||||
prefix = os.path.dirname(gcc_exe)
|
||||
|
||||
@@ -349,25 +349,11 @@ def test_compiler_flags_differ_identical_compilers(self):
|
||||
spec.concretize()
|
||||
assert spec.satisfies("cflags=-O2")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_compiler_flag_propagate(self):
|
||||
spec = Spec("callpath cflags=='-g'")
|
||||
spec = Spec("hypre cflags=='-g' ^openblas")
|
||||
spec.concretize()
|
||||
|
||||
for dep in spec.traverse():
|
||||
assert dep.satisfies("cflags='-g'")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_non_root_compiler_flag_propagate(self):
|
||||
spec = Spec("callpath ^dyninst cflags=='-g'")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^libdwarf cflags='-g'")
|
||||
assert spec.satisfies("^libelf cflags='-g'")
|
||||
assert spec.satisfies("^openblas cflags='-g'")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
@@ -376,30 +362,17 @@ def test_concretize_compiler_flag_does_not_propagate(self):
|
||||
spec = Spec("hypre cflags='-g' ^openblas")
|
||||
spec.concretize()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
assert not dep.satisfies("cflags='-g'")
|
||||
assert not spec.satisfies("^openblas cflags='-g'")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self):
|
||||
spec = Spec("callpath cflags=='-g' ^dyninst cflags='-O3'")
|
||||
spec = Spec("hypre cflags=='-g' ^openblas cflags='-O3'")
|
||||
spec.concretize()
|
||||
|
||||
assert set(spec.compiler_flags["cflags"]) == set(["-g"])
|
||||
assert spec.satisfies("^dyninst cflags='-O3'")
|
||||
assert spec.satisfies("^libelf cflags='-g'")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_specified_compiler_flag(self):
|
||||
spec = Spec("callpath cflags=='-g' cxxflags='-O3'")
|
||||
spec.concretize()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
assert dep.satisfies("cflags='-g'")
|
||||
assert not dep.satisfies("cxxflags='-O3'")
|
||||
assert spec.satisfies("^openblas cflags='-O3'")
|
||||
|
||||
def test_mixing_compilers_only_affects_subdag(self):
|
||||
spack.config.set("packages:all:compiler", ["clang", "gcc"])
|
||||
@@ -485,66 +458,19 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_propagation",
|
||||
[
|
||||
("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]),
|
||||
# Propagates past a node that doesn't have the variant
|
||||
("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]),
|
||||
(
|
||||
"ascent~~shared +adios2",
|
||||
[("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
# Propagates below a node that uses the other value explicitly
|
||||
(
|
||||
"ascent~~shared +adios2 ^adios2+shared",
|
||||
[("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
(
|
||||
"ascent++shared +adios2 ^adios2~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagation):
|
||||
"""Tests various patterns of boolean variant propagation"""
|
||||
spec = Spec(spec_str).concretized()
|
||||
for key, expected_satisfies in expected_propagation:
|
||||
spec[key].satisfies(expected_satisfies)
|
||||
def test_concretize_propagate_disabled_variant(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
spec = Spec("hypre~~shared ^openblas")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^openblas~shared")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
spec = Spec("ascent~~shared +adios2 ^adios2+shared")
|
||||
spec = Spec("hypre~~shared ^openblas+shared")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^adios2+shared")
|
||||
assert spec.satisfies("^bzip2~shared")
|
||||
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_specified_variant(self):
|
||||
"""Test that only the specified variant is propagated to the dependencies"""
|
||||
spec = Spec("parent-foo-bar ~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
@pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation")
|
||||
def test_concretize_propagate_multivalue_variant(self):
|
||||
"""Test that multivalue variants are propagating the specified value(s)
|
||||
to their dependecies. The dependencies should not have the default value"""
|
||||
spec = Spec("multivalue-variant foo==baz,fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^a foo=baz,fee")
|
||||
assert spec.satisfies("^b foo=baz,fee")
|
||||
assert not spec.satisfies("^a foo=bar")
|
||||
assert not spec.satisfies("^b foo=bar")
|
||||
assert spec.satisfies("^openblas+shared")
|
||||
|
||||
def test_no_matching_compiler_specs(self, mock_low_high_config):
|
||||
# only relevant when not building compilers as needed
|
||||
@@ -1912,8 +1838,7 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
|
||||
# If we concretize with --reuse it is not, since "mpich~debug" was already installed
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec("mpich").concretized()
|
||||
assert s.installed
|
||||
assert s.satisfies("~debug"), s
|
||||
assert s.satisfies("~debug")
|
||||
|
||||
@pytest.mark.regression("32471")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@@ -2207,16 +2132,14 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
builder_test_path = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_config", "duplicates_test_repository")
|
||||
@pytest.mark.only_clingo("Not supported by the original concretizer")
|
||||
class TestConcretizeSeparately:
|
||||
"""Collects test on separate concretization"""
|
||||
|
||||
@pytest.mark.parametrize("strategy", ["minimal", "full"])
|
||||
def test_two_gmake(self, strategy):
|
||||
"""Tests that we can concretize a spec with nodes using the same build
|
||||
@@ -2397,53 +2320,3 @@ def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
assert node == container[node.dag_hash()]
|
||||
assert node.dag_hash() in container
|
||||
assert node is not container[node.dag_hash()]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def edges_test_repository():
|
||||
repository_path = os.path.join(spack.paths.repos_path, "edges.test")
|
||||
with spack.repo.use_repositories(repository_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_config", "edges_test_repository")
|
||||
@pytest.mark.only_clingo("Edge properties not supported by the original concretizer")
|
||||
class TestConcretizeEdges:
|
||||
"""Collects tests on edge properties"""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_satisfies,expected_not_satisfies",
|
||||
[
|
||||
("conditional-edge", ["^zlib@2.0"], ["^zlib-api"]),
|
||||
("conditional-edge~foo", ["^zlib@2.0"], ["^zlib-api"]),
|
||||
(
|
||||
"conditional-edge+foo",
|
||||
["^zlib@1.0", "^zlib-api", "^[virtuals=zlib-api] zlib"],
|
||||
["^[virtuals=mpi] zlib"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_condition_triggered_by_edge_property(
|
||||
self, spec_str, expected_satisfies, expected_not_satisfies
|
||||
):
|
||||
"""Tests that we can enforce constraints based on edge attributes"""
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for expected in expected_satisfies:
|
||||
assert s.satisfies(expected), str(expected)
|
||||
|
||||
for not_expected in expected_not_satisfies:
|
||||
assert not s.satisfies(not_expected), str(not_expected)
|
||||
|
||||
def test_virtuals_provided_together_but_only_one_required_in_dag(self):
|
||||
"""Tests that we can use a provider that provides more than one virtual together,
|
||||
and is providing only one, iff the others are not needed in the DAG.
|
||||
|
||||
o blas-only-client
|
||||
| [virtual=blas]
|
||||
o openblas (provides blas and lapack together)
|
||||
|
||||
"""
|
||||
s = Spec("blas-only-client ^openblas").concretized()
|
||||
assert s.satisfies("^[virtuals=blas] openblas")
|
||||
assert not s.satisfies("^[virtuals=blas,lapack] openblas")
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.not_on_windows("Windows uses old concretizer"),
|
||||
pytest.mark.only_clingo("Original concretizer does not support configuration requirements"),
|
||||
]
|
||||
|
||||
version_error_messages = [
|
||||
"Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:",
|
||||
" required because quantum-espresso depends on fftw@:1.0",
|
||||
" required because quantum-espresso ^fftw@1.1: requested from CLI",
|
||||
" required because quantum-espresso ^fftw@1.1: requested from CLI",
|
||||
]
|
||||
|
||||
external_error_messages = [
|
||||
(
|
||||
"Attempted to build package quantum-espresso which is not buildable and does not have"
|
||||
" a satisfying external"
|
||||
),
|
||||
(
|
||||
" 'quantum-espresso~veritas' is an external constraint for quantum-espresso"
|
||||
" which was not satisfied"
|
||||
),
|
||||
" 'quantum-espresso+veritas' required",
|
||||
" required because quantum-espresso+veritas requested from CLI",
|
||||
]
|
||||
|
||||
variant_error_messages = [
|
||||
"'fftw' required multiple values for single-valued variant 'mpi'",
|
||||
" Requested '~mpi' and '+mpi'",
|
||||
" required because quantum-espresso depends on fftw+mpi when +invino",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested from CLI",
|
||||
" required because quantum-espresso+invino ^fftw~mpi requested from CLI",
|
||||
]
|
||||
|
||||
external_config = {
|
||||
"packages:quantum-espresso": {
|
||||
"buildable": False,
|
||||
"externals": [{"spec": "quantum-espresso@1.0~veritas", "prefix": "/path/to/qe"}],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"error_messages,config_set,spec",
|
||||
[
|
||||
(version_error_messages, {}, "quantum-espresso^fftw@1.1:"),
|
||||
(external_error_messages, external_config, "quantum-espresso+veritas"),
|
||||
(variant_error_messages, {}, "quantum-espresso+invino^fftw~mpi"),
|
||||
],
|
||||
)
|
||||
def test_error_messages(error_messages, config_set, spec, mock_packages, mutable_config):
|
||||
for path, conf in config_set.items():
|
||||
spack.config.set(path, conf)
|
||||
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError) as e:
|
||||
_ = spack.spec.Spec(spec).concretized()
|
||||
|
||||
for em in error_messages:
|
||||
assert em in str(e.value)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user