Compare commits
19 Commits
cws/pumiFi
...
isolate-ut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba62db9add | ||
|
|
aef5c35065 | ||
|
|
7ee62d8202 | ||
|
|
1ff78a7959 | ||
|
|
0b92a19620 | ||
|
|
ee36214f83 | ||
|
|
4516b742dd | ||
|
|
6e24ea55ea | ||
|
|
bc06e2bc17 | ||
|
|
330a5c0010 | ||
|
|
1f0a8755c7 | ||
|
|
425d3ba8a6 | ||
|
|
2972dea418 | ||
|
|
953209fe2d | ||
|
|
1d07e4cb8d | ||
|
|
3fd543328b | ||
|
|
30cafc553a | ||
|
|
3a6ad72ac1 | ||
|
|
736c46e22d |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@dbef88086f6cef02e264edb7dbf63250c17cef6c
|
||||
- uses: docker/metadata-action@e6428a5c4e294a61438ed7f43155db912025b6b3
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
8
.github/workflows/style/requirements.txt
vendored
8
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==23.12.1
|
||||
black==23.11.0
|
||||
clingo==5.6.2
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.7.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
||||
|
||||
8
.github/workflows/unit_tests.yaml
vendored
8
.github/workflows/unit_tests.yaml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -194,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
20
CITATION.cff
20
CITATION.cff
@@ -31,17 +31,13 @@ type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the
|
||||
complexity of HPC software is quickly outpacing the capabilities of existing software management
|
||||
tools. Scientific applications require specific versions of compilers, MPI, and other dependency
|
||||
libraries, so using a single, standard software stack is infeasible. However, managing many
|
||||
configurations is difficult because the configuration space is combinatorial in size. We
|
||||
introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages
|
||||
and dependencies. It allows any number of builds to coexist on the same system, and it ensures
|
||||
that installed packages can find their dependencies, regardless of the environment. We show
|
||||
through real-world use cases that Spack supports diverse and demanding applications, bringing
|
||||
order to HPC software chaos.
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
@@ -75,7 +71,7 @@ preferred-citation:
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013-2024 LLNS, LLC and other Spack Project Developers.
|
||||
Copyright (c) 2013-2023 LLNS, LLC and other Spack Project Developers.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
44
README.md
44
README.md
@@ -1,34 +1,13 @@
|
||||
<div align="left">
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
<h2>
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-white-text.svg" width="250">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
<img alt="Spack" src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
</picture>
|
||||
|
||||
<br>
|
||||
<br clear="all">
|
||||
|
||||
<a href="https://github.com/spack/spack/actions/workflows/ci.yml"><img src="https://github.com/spack/spack/workflows/ci/badge.svg" alt="CI Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/bootstrapping.yml"><img src="https://github.com/spack/spack/actions/workflows/bootstrap.yml/badge.svg" alt="Bootstrap Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/build-containers.yml"><img src="https://github.com/spack/spack/actions/workflows/build-containers.yml/badge.svg" alt="Containers Status"></a>
|
||||
<a href="https://spack.readthedocs.io"><img src="https://readthedocs.org/projects/spack/badge/?version=latest" alt="Documentation Status"></a>
|
||||
<a href="https://codecov.io/gh/spack/spack"><img src="https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg" alt="Code coverage"/></a>
|
||||
<a href="https://slack.spack.io"><img src="https://slack.spack.io/badge.svg" alt="Slack"/></a>
|
||||
<a href="https://matrix.to/#/#spack-space:matrix.org"><img src="https://img.shields.io/matrix/spack-space%3Amatrix.org?label=matrix" alt="Matrix"/></a>
|
||||
|
||||
</h2>
|
||||
|
||||
**[Getting Started] • [Config] • [Community] • [Contributing] • [Packaging Guide]**
|
||||
|
||||
[Getting Started]: https://spack.readthedocs.io/en/latest/getting_started.html
|
||||
[Config]: https://spack.readthedocs.io/en/latest/configuration.html
|
||||
[Community]: #community
|
||||
[Contributing]: https://spack.readthedocs.io/en/latest/contribution_guide.html
|
||||
[Packaging Guide]: https://spack.readthedocs.io/en/latest/packaging_guide.html
|
||||
|
||||
</div>
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -87,11 +66,10 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
||||
Contributing
|
||||
------------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# sbang project developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
# -*- python -*-
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
:: Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
:: Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
::
|
||||
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -153,43 +153,7 @@ keyring, and trusting all downloaded keys.
|
||||
List of popular build caches
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_'
|
||||
|
||||
-------------------
|
||||
Build cache signing
|
||||
-------------------
|
||||
|
||||
By default, Spack will add a cryptographic signature to each package pushed to
|
||||
a build cache, and verifies the signature when installing from a build cache.
|
||||
|
||||
Keys for signing can be managed with the :ref:`spack gpg <cmd-spack-gpg>` command,
|
||||
as well as ``spack buildcache keys`` as mentioned above.
|
||||
|
||||
You can disable signing when pushing with ``spack buildcache push --unsigned``,
|
||||
and disable verification when installing from any build cache with
|
||||
``spack install --no-check-signature``.
|
||||
|
||||
Alternatively, signing and verification can be enabled or disabled on a per build cache
|
||||
basis:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --signed <name> <url> # enable signing and verification
|
||||
$ spack mirror add --unsigned <name> <url> # disable signing and verification
|
||||
|
||||
$ spack mirror set --signed <name> # enable signing and verification for an existing mirror
|
||||
$ spack mirror set --unsigned <name> # disable signing and verification for an existing mirror
|
||||
|
||||
Or you can directly edit the ``mirrors.yaml`` configuration file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mirrors:
|
||||
<name>:
|
||||
url: <url>
|
||||
signed: false # disable signing and verification
|
||||
|
||||
See also :ref:`mirrors`.
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
|
||||
----------
|
||||
Relocation
|
||||
@@ -287,13 +251,87 @@ To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/setup-spack <https://github.com/spack/setup-spack>`_ for setting up Spack in GitHub
|
||||
Actions
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_ for
|
||||
more details on the public build cache
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -90,7 +90,7 @@ and optimizers do require a paid license. In Spack, they are packaged as:
|
||||
TODO: Confirm and possible change(!) the scope of MPI components (runtime
|
||||
vs. devel) in current (and previous?) *cluster/professional/composer*
|
||||
editions, i.e., presence in downloads, possibly subject to license
|
||||
coverage(!); see `discussion in PR #4300
|
||||
coverage(!); see `disussion in PR #4300
|
||||
<https://github.com/spack/spack/pull/4300#issuecomment-305582898>`_. [NB:
|
||||
An "mpi" subdirectory is not indicative of the full MPI SDK being present
|
||||
(i.e., ``mpicc``, ..., and header files). The directory may just as well
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -88,13 +88,13 @@ command-line. However, Makefiles that use ``?=`` for assignment honor
|
||||
environment variables. Since Spack already sets ``CC``, ``CXX``, ``F77``,
|
||||
and ``FC``, you won't need to worry about setting these variables. If
|
||||
there are any other variables you need to set, you can do this in the
|
||||
``setup_build_environment`` method:
|
||||
``edit`` method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
def edit(self, spec, prefix):
|
||||
env["PREFIX"] = prefix
|
||||
env["BLASLIB"] = spec["blas"].libs.ld_flags
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
@@ -140,7 +140,7 @@ Edit Makefile
|
||||
Some Makefiles are just plain stubborn and will ignore command-line
|
||||
variables. The only way to ensure that these packages build correctly
|
||||
is to directly edit the Makefile. Spack provides a ``FileFilter`` class
|
||||
and a ``filter`` method to help with this. For example:
|
||||
and a ``filter_file`` method to help with this. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -9,96 +9,34 @@
|
||||
Container Images
|
||||
================
|
||||
|
||||
Spack :ref:`environments` can easily be turned into container images. This page
|
||||
outlines two ways in which this can be done:
|
||||
|
||||
1. By installing the environment on the host system, and copying the installations
|
||||
into the container image. This approach does not require any tools like Docker
|
||||
or Singularity to be installed.
|
||||
2. By generating a Docker or Singularity recipe that can be used to build the
|
||||
container image. In this approach, Spack builds the software inside the
|
||||
container runtime, not on the host system.
|
||||
|
||||
The first approach is easiest if you already have an installed environment,
|
||||
the second approach gives more control over the container image.
|
||||
|
||||
---------------------------
|
||||
From existing installations
|
||||
---------------------------
|
||||
|
||||
If you already have a Spack environment installed on your system, you can
|
||||
share the binaries as an OCI compatible container image. To get started you
|
||||
just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Create and install an environment in the current directory
|
||||
spack env create -d .
|
||||
spack -e . add pkg-a pkg-b
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ docker run -it example.com/name/image:my_env
|
||||
|
||||
The image generated by Spack consists of the specified base image with each package from the
|
||||
environment as a separate layer on top. The image is minimal by construction, it only contains the
|
||||
environment roots and its runtime dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
|
||||
the password for your account, but a personal access token you need to generate separately.
|
||||
|
||||
The specified ``--base-image`` should have a libc that is compatible with the host system.
|
||||
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``
|
||||
or newer: the libc in the container image must be at least the version of the host system,
|
||||
assuming ABI compatibility. It is also perfectly fine to use a completely different
|
||||
Linux distribution as long as the libc is compatible.
|
||||
|
||||
For convenience, Spack also turns the OCI registry into a :ref:`build cache <binary_caches_oci>`,
|
||||
so that future ``spack install`` of the environment will simply pull the binaries from the
|
||||
registry instead of doing source builds. The flag ``--update-index`` is needed to make Spack
|
||||
take the build cache into account when concretizing.
|
||||
|
||||
.. note::
|
||||
|
||||
When generating container images in CI, the approach above is recommended when CI jobs
|
||||
already run in a sandboxed environment. You can simply use ``spack`` directly
|
||||
in the CI job and push the resulting image to a registry. Subsequent CI jobs should
|
||||
run faster because Spack can install from the same registry instead of rebuilding from
|
||||
sources.
|
||||
|
||||
---------------------------------------------
|
||||
Generating recipes for Docker and Singularity
|
||||
---------------------------------------------
|
||||
|
||||
Apart from copying existing installations into container images, Spack can also
|
||||
generate recipes for container images. This is useful if you want to run Spack
|
||||
itself in a sandboxed environment instead of on the host system.
|
||||
|
||||
Since recipes need a little bit more boilerplate than
|
||||
Spack :ref:`environments` are a great tool to create container images, but
|
||||
preparing one that is suitable for production requires some more boilerplate
|
||||
than just:
|
||||
|
||||
.. code-block:: docker
|
||||
|
||||
COPY spack.yaml /environment
|
||||
RUN spack -e /environment install
|
||||
|
||||
Spack provides a command to generate customizable recipes for container images. Customizations
|
||||
include minimizing the size of the image, installing packages in the base image using the system
|
||||
package manager, and setting up a proper entrypoint to run the image.
|
||||
Additional actions may be needed to minimize the size of the
|
||||
container, or to update the system software that is installed in the base
|
||||
image, or to set up a proper entrypoint to run the image. These tasks are
|
||||
usually both necessary and repetitive, so Spack comes with a command
|
||||
to generate recipes for container images starting from a ``spack.yaml``.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
.. seealso::
|
||||
|
||||
This page is a reference for generating recipes to build container images.
|
||||
It means that your environment is built from scratch inside the container
|
||||
runtime.
|
||||
|
||||
Since v0.21, Spack can also create container images from existing package installations
|
||||
on your host system. See :ref:`binary_caches_oci` for more information on
|
||||
that topic.
|
||||
|
||||
--------------------
|
||||
A Quick Introduction
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------
|
||||
|
||||
Consider having a Spack environment like the following:
|
||||
|
||||
@@ -109,8 +47,8 @@ Consider having a Spack environment like the following:
|
||||
- gromacs+mpi
|
||||
- mpich
|
||||
|
||||
Producing a ``Dockerfile`` from it is as simple as changing directories to
|
||||
where the ``spack.yaml`` file is stored and running the following command:
|
||||
Producing a ``Dockerfile`` from it is as simple as moving to the directory
|
||||
where the ``spack.yaml`` file is stored and giving the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -176,9 +114,9 @@ configuration are discussed in details in the sections below.
|
||||
|
||||
.. _container_spack_images:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------------
|
||||
Spack Images on Docker Hub
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
@@ -248,9 +186,9 @@ by Spack use them as default base images for their ``build`` stage,
|
||||
even though handles to use custom base images provided by users are
|
||||
available to accommodate complex use cases.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuring the Container Recipe
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
---------------------------------
|
||||
Creating Images From Environments
|
||||
---------------------------------
|
||||
|
||||
Any Spack Environment can be used for the automatic generation of container
|
||||
recipes. Sensible defaults are provided for things like the base image or the
|
||||
@@ -291,18 +229,18 @@ under the ``container`` attribute of environments:
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
-------------------
|
||||
Setting Base Images
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
-------------------
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use Official Spack Images From Dockerhub
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To generate a recipe that uses an official Docker image from the
|
||||
Spack organization to build the software and the corresponding official OS image
|
||||
@@ -507,9 +445,9 @@ responsibility to ensure that:
|
||||
Therefore we don't recommend its use in cases that can be otherwise
|
||||
covered by the simplified mode shown first.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
----------------------------
|
||||
Singularity Definition Files
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
----------------------------
|
||||
|
||||
In addition to producing recipes in ``Dockerfile`` format Spack can produce
|
||||
Singularity Definition Files by just changing the value of the ``format``
|
||||
@@ -530,9 +468,9 @@ attribute:
|
||||
The minimum version of Singularity required to build a SIF (Singularity Image Format)
|
||||
image from the recipes generated by Spack is ``3.5.3``.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
------------------------------
|
||||
Extending the Jinja2 Templates
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
------------------------------
|
||||
|
||||
The Dockerfile and the Singularity definition file that Spack can generate are based on
|
||||
a few Jinja2 templates that are rendered according to the environment being containerized.
|
||||
@@ -653,9 +591,9 @@ The recipe that gets generated contains the two extra instruction that we added
|
||||
|
||||
.. _container_config_options:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
-----------------------
|
||||
Configuration Reference
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
-----------------------
|
||||
|
||||
The tables below describe all the configuration options that are currently supported
|
||||
to customize the generation of container recipes:
|
||||
@@ -752,13 +690,13 @@ to customize the generation of container recipes:
|
||||
- Description string
|
||||
- No
|
||||
|
||||
~~~~~~~~~~~~~~
|
||||
--------------
|
||||
Best Practices
|
||||
~~~~~~~~~~~~~~
|
||||
--------------
|
||||
|
||||
"""
|
||||
^^^
|
||||
MPI
|
||||
"""
|
||||
^^^
|
||||
Due to the dependency on Fortran for OpenMPI, which is the spack default
|
||||
implementation, consider adding ``gfortran`` to the ``apt-get install`` list.
|
||||
|
||||
@@ -769,9 +707,9 @@ For execution on HPC clusters, it can be helpful to import the docker
|
||||
image into Singularity in order to start a program with an *external*
|
||||
MPI. Otherwise, also add ``openssh-server`` to the ``apt-get install`` list.
|
||||
|
||||
""""
|
||||
^^^^
|
||||
CUDA
|
||||
""""
|
||||
^^^^
|
||||
Starting from CUDA 9.0, Nvidia provides minimal CUDA images based on
|
||||
Ubuntu. Please see `their instructions <https://hub.docker.com/r/nvidia/cuda/>`_.
|
||||
Avoid double-installing CUDA by adding, e.g.
|
||||
@@ -790,9 +728,9 @@ to your ``spack.yaml``.
|
||||
Users will either need ``nvidia-docker`` or e.g. Singularity to *execute*
|
||||
device kernels.
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Docker on Windows and OSX
|
||||
"""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
On Mac OS and Windows, docker runs on a hypervisor that is not allocated much
|
||||
memory by default, and some spack packages may fail to build due to lack of
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -9,42 +9,46 @@
|
||||
Custom Extensions
|
||||
=================
|
||||
|
||||
*Spack extensions* allow you to extend Spack capabilities by deploying your
|
||||
*Spack extensions* permit you to extend Spack capabilities by deploying your
|
||||
own custom commands or logic in an arbitrary location on your filesystem.
|
||||
This might be extremely useful e.g. to develop and maintain a command whose purpose is
|
||||
too specific to be considered for reintegration into the mainline or to
|
||||
evolve a command through its early stages before starting a discussion to merge
|
||||
it upstream.
|
||||
|
||||
From Spack's point of view an extension is any path in your filesystem which
|
||||
respects the following naming and layout for files:
|
||||
respects a prescribed naming and layout for files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
|
||||
├── pytest.ini # Optional file if the extension ships its own tests
|
||||
├── scripting # Folder that may contain modules that are needed for the extension commands
|
||||
│ ├── cmd # Folder containing extension commands
|
||||
│ │ └── filter.py # A new command that will be available
|
||||
│ └── functions.py # Module with internal details
|
||||
└── tests # Tests for this extension
|
||||
│ └── cmd # Folder containing extension commands
|
||||
│ └── filter.py # A new command that will be available
|
||||
├── tests # Tests for this extension
|
||||
│ ├── conftest.py
|
||||
│ └── test_filter.py
|
||||
└── templates # Templates that may be needed by the extension
|
||||
|
||||
In the example above, the extension is named *scripting*. It adds an additional command
|
||||
(``spack filter``) and unit tests to verify its behavior.
|
||||
In the example above the extension named *scripting* adds an additional command (``filter``)
|
||||
and unit tests to verify its behavior. The code for this example can be
|
||||
obtained by cloning the corresponding git repository:
|
||||
|
||||
The extension can import any core Spack module in its implementation. When loaded by
|
||||
the ``spack`` command, the extension itself is imported as a Python package in the
|
||||
``spack.extensions`` namespace. In the example above, since the extension is named
|
||||
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
|
||||
|
||||
The code for this example extension can be obtained by cloning the corresponding git repository:
|
||||
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
|
||||
$ cd ~/
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
remote: Counting objects: 11, done.
|
||||
remote: Compressing objects: 100% (7/7), done.
|
||||
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
|
||||
Receiving objects: 100% (11/11), done.
|
||||
|
||||
As you can see by inspecting the sources, Python modules that are part of the extension
|
||||
can import any core Spack module.
|
||||
|
||||
---------------------------------
|
||||
Configure Spack to Use Extensions
|
||||
@@ -57,7 +61,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- /tmp/spack-scripting
|
||||
- ~/tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
@@ -82,32 +86,37 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unit-test --extension=scripting
|
||||
========================================== test session starts ===========================================
|
||||
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
|
||||
rootdir: /home/culpo/github/spack-scripting
|
||||
configfile: pytest.ini
|
||||
testpaths: tests
|
||||
plugins: xdist-3.5.0
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
|
||||
collected 5 items
|
||||
|
||||
tests/test_filter.py ..... [100%]
|
||||
tests/test_filter.py ...XX
|
||||
============================================================ short test summary info =============================================================
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
|
||||
========================================== slowest 30 durations ==========================================
|
||||
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
=========================================================== slowest 20 test durations ============================================================
|
||||
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -111,28 +111,3 @@ CUDA is split into fewer components and is simpler to specify:
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
Using an External OpenGL API
|
||||
-----------------------------------
|
||||
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
|
||||
|
||||
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
|
||||
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libglx:
|
||||
require: [opengl]
|
||||
opengl:
|
||||
buildable: false
|
||||
externals:
|
||||
- prefix: /usr/
|
||||
spec: opengl@4.6
|
||||
|
||||
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
|
||||
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -97,35 +97,6 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extra attributes for external packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes external packages require additional attributes to be used
|
||||
effectively. This information can be defined on a per-package basis
|
||||
and stored in the ``extra_attributes`` section of the external package
|
||||
configuration. In addition to per-package information, this section
|
||||
can be used to define environment modifications to be performed
|
||||
whenever the package is used. For example, if an external package is
|
||||
built without ``rpath`` support, it may require ``LD_LIBRARY_PATH``
|
||||
settings to find its dependencies. This could be configured as
|
||||
follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: mpich@3.3 %clang@12.0.0 +hwloc
|
||||
prefix: /path/to/mpich
|
||||
extra_attributes:
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /path/to/hwloc/lib64
|
||||
|
||||
See :ref:`configuration_environment_variables` for more information on
|
||||
how to configure environment modifications in Spack config files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -237,7 +237,7 @@ for details):
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -4379,16 +4379,10 @@ implementation was selected for this build:
|
||||
elif "mvapich" in spec:
|
||||
configure_args.append("--with-mvapich")
|
||||
|
||||
It's also a bit more concise than satisfies.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``satisfies()`` method tests whether this spec has, at least, all the constraints of the argument spec,
|
||||
while ``in`` tests whether a spec or any of its dependencies satisfy the provided spec.
|
||||
|
||||
If the provided spec is anonymous (e.g., ":1.2:", "+shared") or has the
|
||||
same name as the spec being checked, then ``in`` works the same as
|
||||
``satisfies()``; however, use of ``satisfies()`` is more intuitive.
|
||||
It's also a bit more concise than satisfies. The difference between
|
||||
the two functions is that ``satisfies()`` tests whether spec
|
||||
constraints overlap at all, while ``in`` tests whether a spec or any
|
||||
of its dependencies satisfy the provided spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Architecture specifiers
|
||||
@@ -5290,7 +5284,7 @@ installed example.
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
Output showing the identification of each test part after runnig the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -5787,7 +5781,7 @@ with those implemented in the package itself.
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
* - `Fortan
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -6,8 +6,8 @@ python-levenshtein==0.23.0
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.4
|
||||
isort==5.13.2
|
||||
black==23.12.1
|
||||
flake8==7.0.0
|
||||
mypy==1.8.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
flake8==6.1.0
|
||||
mypy==1.7.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -142,7 +142,7 @@ Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exists in this class of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
@@ -272,7 +272,7 @@ Internal Implementation
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the technical
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
@@ -295,7 +295,7 @@ infrastructure.
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. University of
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
@@ -305,7 +305,7 @@ contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastructure
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
2
lib/spack/env/cc
vendored
2
lib/spack/env/cc
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -1047,9 +1047,9 @@ def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self, base)
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
@@ -1072,18 +1072,15 @@ class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
self._base = base
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
if not issubclass(exc_type, self._base):
|
||||
return False
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -204,23 +204,17 @@ def color_when(value):
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False, zsh=False):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
self.zsh = zsh
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
result = f"\033[{s}m"
|
||||
|
||||
if self.enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
return result
|
||||
return r"\[\033[%sm\]" % s
|
||||
else:
|
||||
return "\033[%sm" % s
|
||||
else:
|
||||
return ""
|
||||
|
||||
@@ -267,11 +261,9 @@ def colorize(string, **kwargs):
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
zsh = kwargs.get("zsh", False)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -499,7 +499,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
@package_properties
|
||||
def _ensure_packages_are_unparseable(pkgs, error_cls):
|
||||
"""Ensure that all packages can unparse and that unparsed code is valid Python"""
|
||||
import spack.util.package_hash as ph
|
||||
import spack.package_hash as ph
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
@@ -646,7 +646,11 @@ def _linting_package_file(pkgs, error_cls):
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
https = re.sub("http", "https", pkg_cls.homepage, 1)
|
||||
try:
|
||||
response = urlopen(https)
|
||||
response = urlopen(
|
||||
https,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except Exception as e:
|
||||
msg = 'Error with attempting https for "{0}": '
|
||||
errors.append(error_cls(msg.format(pkg_cls.name), [str(e)]))
|
||||
@@ -667,8 +671,8 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for trigger, conflicts in pkg_cls.conflicts.items():
|
||||
for conflict, _ in conflicts:
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
for trigger, _ in triggers:
|
||||
vrn = spack.spec.Spec(conflict)
|
||||
try:
|
||||
vrn.constrain(trigger)
|
||||
@@ -694,21 +698,25 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
)
|
||||
|
||||
# Check "depends_on" directive
|
||||
for trigger in pkg_cls.dependencies:
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="depends_on", error_cls=error_cls
|
||||
for _, triggers in pkg_cls.dependencies.items():
|
||||
triggers = list(triggers)
|
||||
for trigger in list(triggers):
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="depends_on", error_cls=error_cls
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Check "provides" directive
|
||||
for when_spec in pkg_cls.provided:
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, when_spec, directive="provides", error_cls=error_cls
|
||||
# Check "patch" directive
|
||||
for _, triggers in pkg_cls.provided.items():
|
||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||
for vrn in triggers:
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="patch", error_cls=error_cls
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Check "resource" directive
|
||||
for vrn in pkg_cls.resources:
|
||||
@@ -722,82 +730,47 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
||||
|
||||
@package_directives
|
||||
def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
"""Reports issues with 'depends_on' directives.
|
||||
|
||||
Issues might be unknown dependencies, unknown variants or variant values, or declaration
|
||||
of nested dependencies.
|
||||
"""
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
for when, deps_by_name in pkg_cls.dependencies.items():
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
nested_dependencies = dep.spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||
ndir = len(nested_dependencies) + 1
|
||||
details = [
|
||||
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = pkg_name + ": unknown package '{0}' in " "'depends_on' directive".format(
|
||||
dependency_name
|
||||
)
|
||||
details = [" in " + filename]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
def check_virtual_with_variants(spec, msg):
|
||||
if not spec.virtual or not spec.variants:
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
check_virtual_with_variants(dep.spec, "virtual dependency cannot have variants")
|
||||
check_virtual_with_variants(dep.spec, "virtual when= spec cannot have variants")
|
||||
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dep_name):
|
||||
continue
|
||||
|
||||
# check for unknown dependencies
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = (
|
||||
f"{pkg_name}: unknown package '{dep_name}' in " "'depends_on' directive"
|
||||
)
|
||||
details = [f" in {filename}"]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
# check variants
|
||||
dependency_variants = dep.spec.variants
|
||||
for _, dependency_edge in dependency_data.items():
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
except Exception as e:
|
||||
summary = (
|
||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||
pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive"
|
||||
)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = (
|
||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||
)
|
||||
error_msg += f" in package '{dep_name}'"
|
||||
error_msg = "the variant {0} does not " "exist".format(error_msg)
|
||||
error_msg += " in package '" + dependency_name + "'"
|
||||
|
||||
errors.append(
|
||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||
error_cls(summary=summary, details=[error_msg, "in " + filename])
|
||||
)
|
||||
|
||||
return errors
|
||||
@@ -864,17 +837,14 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dep_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.append(dep.spec)
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
@@ -946,53 +916,39 @@ def _named_specs_in_when_arguments(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
def _refers_to_pkg(when):
|
||||
when_spec = spack.spec.Spec(when)
|
||||
return when_spec.name is None or when_spec.name == pkg_name
|
||||
|
||||
def _error_items(when_dict):
|
||||
for when, elts in when_dict.items():
|
||||
if not _refers_to_pkg(when):
|
||||
yield when, elts, [f"using '{when}', should be '^{when}'"]
|
||||
|
||||
def _extracts_errors(triggers, summary):
|
||||
_errors = []
|
||||
for trigger in list(triggers):
|
||||
if not _refers_to_pkg(trigger):
|
||||
when_spec = spack.spec.Spec(trigger)
|
||||
if when_spec.name is not None and when_spec.name != pkg_name:
|
||||
details = [f"using '{trigger}', should be '^{trigger}'"]
|
||||
_errors.append(error_cls(summary=summary, details=details))
|
||||
return _errors
|
||||
|
||||
for when, dnames, details in _error_items(pkg_cls.dependencies):
|
||||
errors.extend(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition for '{dname}' dependency", details)
|
||||
for dname in dnames
|
||||
)
|
||||
for dname, triggers in pkg_cls.dependencies.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, providers, details in _error_items(pkg_cls.provided):
|
||||
errors.extend(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition for '{provided}' virtual", details)
|
||||
for provided in providers
|
||||
)
|
||||
for provided, triggers in pkg_cls.provided.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, requirements, details in _error_items(pkg_cls.requirements):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'requires' directive", details)
|
||||
)
|
||||
for _, triggers in pkg_cls.requirements.items():
|
||||
triggers = [when_spec for when_spec, _, _ in triggers]
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, _, details in _error_items(pkg_cls.patches):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'patch' directives", details)
|
||||
)
|
||||
triggers = list(pkg_cls.patches)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, _, details in _error_items(pkg_cls.resources):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'resource' directives", details)
|
||||
)
|
||||
triggers = list(pkg_cls.resources)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -25,7 +25,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -38,12 +38,14 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.gpg
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
@@ -52,7 +54,6 @@
|
||||
import spack.traverse as traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -68,10 +69,7 @@
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
#: The build cache layout version that this version of Spack creates.
|
||||
#: Version 2: includes parent directories of the package prefix in the tarball
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
@@ -489,7 +487,10 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json"),
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -535,7 +536,9 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
def binary_index_location():
|
||||
"""Set up a BinaryCacheIndex for remote buildcache dbs in the user's homedir."""
|
||||
cache_root = os.path.join(misc_cache_location(), "indices")
|
||||
return spack.util.path.canonicalize_path(cache_root)
|
||||
return spack.util.path.canonicalize_path(
|
||||
cache_root, replacements=spack.paths.path_replacements()
|
||||
)
|
||||
|
||||
|
||||
#: Default binary cache index instance
|
||||
@@ -830,7 +833,7 @@ def tarball_path_name(spec, ext):
|
||||
|
||||
def select_signing_key(key=None):
|
||||
if key is None:
|
||||
keys = spack.util.gpg.signing_keys()
|
||||
keys = spack.gpg.signing_keys()
|
||||
if len(keys) == 1:
|
||||
key = keys[0]
|
||||
|
||||
@@ -855,7 +858,7 @@ def sign_specfile(key, force, specfile_path):
|
||||
raise NoOverwriteException(signed_specfile_path)
|
||||
|
||||
key = select_signing_key(key)
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
spack.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
|
||||
|
||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
||||
@@ -907,6 +910,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
# Push the hash
|
||||
@@ -915,6 +919,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
|
||||
@@ -979,9 +984,13 @@ def _specs_from_cache_fallback(cache_prefix):
|
||||
def url_read_method(url):
|
||||
contents = None
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
_, _, spec_file = web_util.read_from_url(
|
||||
url,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
except (URLError, web_util.WebError) as url_err:
|
||||
tty.error("Error reading specfile: {0}".format(url))
|
||||
tty.error(url_err)
|
||||
return contents
|
||||
@@ -989,7 +998,9 @@ def url_read_method(url):
|
||||
try:
|
||||
file_list = [
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
for entry in web_util.list_url(
|
||||
cache_prefix, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
@@ -1087,7 +1098,9 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
try:
|
||||
fingerprints = (
|
||||
entry[:-4]
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
for entry in web_util.list_url(
|
||||
key_prefix, recursive=False, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
@@ -1124,6 +1137,7 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
url_util.join(key_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
@@ -1177,16 +1191,6 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
except OSError:
|
||||
files_to_skip = []
|
||||
|
||||
# First add all directories leading up to `prefix` (Spack <= 0.21 did not do this, leading to
|
||||
# issues when tarballs are used in runtimes like AWS lambda). Skip the file system root.
|
||||
parent_dirs = reversed(pathlib.Path(prefix).parents)
|
||||
next(parent_dirs) # skip the root: slices are supported from python 3.10
|
||||
for parent_dir in parent_dirs:
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(str(parent_dir)))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
dir_stack = [prefix]
|
||||
while dir_stack:
|
||||
dir = dir_stack.pop()
|
||||
@@ -1377,10 +1381,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
|
||||
remote_spackfile_path = url_util.join(out_url, os.path.relpath(spackfile_path, stage_dir))
|
||||
|
||||
fetch_method = (spack.config.get("config:url_fetch_method", "urllib"),)
|
||||
verify_ssl = (spack.config.get("config:verify_ssl"),)
|
||||
timeout = spack.config.get("config:connect_timeout", 10)
|
||||
|
||||
url_args = {"fetch_method": fetch_method, "verify_ssl": verify_ssl, "timeout": timeout}
|
||||
|
||||
mkdirp(tarfile_dir)
|
||||
if web_util.url_exists(remote_spackfile_path):
|
||||
if web_util.url_exists(remote_spackfile_path, **url_args):
|
||||
if options.force:
|
||||
web_util.remove_url(remote_spackfile_path)
|
||||
web_util.remove_url(
|
||||
remote_spackfile_path, verify_ssl=spack.config.get("config:verify_ssl", True)
|
||||
)
|
||||
else:
|
||||
raise NoOverwriteException(url_util.format(remote_spackfile_path))
|
||||
|
||||
@@ -1400,12 +1412,13 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
|
||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||
if options.force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
||||
remote_signed_specfile_path
|
||||
verify_ssl = spack.config.get("config:verify_ssl", True)
|
||||
if web_util.url_exists(remote_specfile_path, **url_args):
|
||||
web_util.remove_url(remote_specfile_path, verify_ssl=verify_ssl)
|
||||
if web_util.url_exists(remote_signed_specfile_path, **url_args):
|
||||
web_util.remove_url(remote_signed_specfile_path, verify_ssl=verify_ssl)
|
||||
elif web_util.url_exists(remote_specfile_path, **url_args) or web_util.url_exists(
|
||||
remote_signed_specfile_path, **url_args
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1439,11 +1452,17 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
spackfile_path,
|
||||
remote_spackfile_path,
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
@@ -1547,7 +1566,7 @@ def try_verify(specfile_path):
|
||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||
|
||||
try:
|
||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
spack.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
@@ -1618,14 +1637,14 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
||||
return spec_dict, layout_version
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
@@ -1646,9 +1665,7 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1666,16 +1683,8 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
mirror_urls = try_first + try_next
|
||||
|
||||
# TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here.
|
||||
def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
mirrors = try_first + try_next
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
@@ -1684,17 +1693,14 @@ def fetch_url_to_mirror(url):
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# Override mirror's default if
|
||||
currently_unsigned = unsigned if unsigned is not None else not mirror.signed
|
||||
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
fetch_url = mirror.fetch_url
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if fetch_url.startswith("oci://"):
|
||||
ref = spack.oci.image.ImageReference.from_string(
|
||||
fetch_url[len("oci://") :]
|
||||
).with_tag(spack.oci.image.default_tag(spec))
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
@@ -1731,7 +1737,7 @@ def fetch_url_to_mirror(url):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
@@ -1753,16 +1759,13 @@ def fetch_url_to_mirror(url):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix
|
||||
)
|
||||
specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
@@ -1775,21 +1778,21 @@ def fetch_url_to_mirror(url):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
|
||||
if try_signed and not currently_unsigned:
|
||||
if try_signed and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn(f"Failed to verify: {specfile_url}")
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if currently_unsigned or signature_verified or not try_signed:
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
@@ -1812,7 +1815,6 @@ def fetch_url_to_mirror(url):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
@@ -2011,7 +2013,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, ".spack")
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
@@ -2031,11 +2033,11 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
if signature_required:
|
||||
if not unsigned:
|
||||
if os.path.exists("%s.asc" % specfile_path):
|
||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||
try:
|
||||
spack.util.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress)
|
||||
spack.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress)
|
||||
except Exception:
|
||||
raise NoVerifyException(
|
||||
"Spack was unable to verify package "
|
||||
@@ -2060,12 +2062,11 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Yield all members of tarfile that start with given prefix, and strip that prefix (including
|
||||
symlinks)"""
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Only yield members in the package prefix.
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
@@ -2073,17 +2074,15 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
if not result:
|
||||
continue
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
yield m
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
@@ -2109,8 +2108,7 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified: bool = download_result["signature_verified"]
|
||||
signature_required: bool = download_result["signature_required"]
|
||||
signature_verified = download_result["signature_verified"]
|
||||
tmpdir = None
|
||||
|
||||
if layout_version == 0:
|
||||
@@ -2119,14 +2117,12 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, signature_required, bchecksum
|
||||
)
|
||||
tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
elif 1 <= layout_version <= 2:
|
||||
elif layout_version == 1:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
@@ -2134,10 +2130,9 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if signature_required and not signature_verified:
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option, "
|
||||
"or configure the mirror with signed: false."
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
@@ -2154,10 +2149,8 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
try:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
tar.extractall(
|
||||
path=spec.prefix,
|
||||
members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar)),
|
||||
)
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
@@ -2192,47 +2185,20 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Find the lowest `binary_distribution` file (hard-coded forward slash is on purpose).
|
||||
binary_distribution = min(
|
||||
(
|
||||
e.name
|
||||
for e in tar.getmembers()
|
||||
if e.isfile() and e.name.endswith(".spack/binary_distribution")
|
||||
),
|
||||
key=len,
|
||||
default=None,
|
||||
)
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if binary_distribution is None:
|
||||
raise ValueError("Tarball is not a Spack package, missing binary_distribution file")
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
pkg_path = pathlib.PurePosixPath(binary_distribution).parent.parent
|
||||
|
||||
# Even the most ancient Spack version has required to list the dir of the package itself, so
|
||||
# guard against broken tarballs where `path.parent.parent` is empty.
|
||||
if pkg_path == pathlib.PurePosixPath():
|
||||
raise ValueError("Invalid tarball, missing package prefix dir")
|
||||
|
||||
pkg_prefix = str(pkg_path)
|
||||
|
||||
# Ensure all tar entries are in the pkg_prefix dir, and if they're not, they should be parent
|
||||
# dirs of it.
|
||||
has_prefix = False
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
stripped = member.name.rstrip("/")
|
||||
if not (
|
||||
stripped.startswith(pkg_prefix) or member.isdir() and pkg_prefix.startswith(stripped)
|
||||
):
|
||||
raise ValueError(f"Tarball contains file {stripped} outside of prefix {pkg_prefix}")
|
||||
if member.isdir() and stripped == pkg_prefix:
|
||||
has_prefix = True
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
# This is technically not required, but let's be defensive about the existence of the package
|
||||
# prefix dir.
|
||||
if not has_prefix:
|
||||
raise ValueError(f"Tarball does not contain a common prefix {pkg_prefix}")
|
||||
|
||||
return pkg_prefix
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
@@ -2277,9 +2243,10 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
tty.debug("Verified SHA256 checksum of the build cache")
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
padding = spack.config.get("config:install_tree:padded_length", None)
|
||||
with spack.util.path.filter_padding(padding=padding):
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
@@ -2316,12 +2283,20 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH, signed_specfile_name
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
_, _, fs = web_util.read_from_url(
|
||||
buildcache_fetch_url_signed_json,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
specfile_is_signed = True
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
except (URLError, web_util.WebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
_, _, fs = web_util.read_from_url(
|
||||
buildcache_fetch_url_json,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except (URLError, web_util.WebError, HTTPError) as url_err_x:
|
||||
tty.debug(
|
||||
"Did not find {0} on {1}".format(
|
||||
specfile_name, buildcache_fetch_url_signed_json
|
||||
@@ -2425,10 +2400,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url)))
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
_, _, json_file = web_util.read_from_url(
|
||||
keys_index,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
if web_util.url_exists(keys_index):
|
||||
except (URLError, web_util.WebError) as url_err:
|
||||
if web_util.url_exists(
|
||||
keys_index,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
):
|
||||
err_msg = [
|
||||
"Unable to find public keys in {0},",
|
||||
" caught exception attempting to read from {1}.",
|
||||
@@ -2459,7 +2443,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
tty.debug("Found key {0}".format(fingerprint))
|
||||
if install:
|
||||
if trust:
|
||||
spack.util.gpg.trust(stage.save_filename)
|
||||
spack.gpg.trust(stage.save_filename)
|
||||
tty.debug("Added this key to trusted keys.")
|
||||
else:
|
||||
tty.debug(
|
||||
@@ -2477,7 +2461,7 @@ def push_keys(*mirrors, **kwargs):
|
||||
tmpdir = kwargs.get("tmpdir")
|
||||
remove_tmpdir = False
|
||||
|
||||
keys = spack.util.gpg.public_keys(*(keys or []))
|
||||
keys = spack.gpg.public_keys(*(keys or []))
|
||||
|
||||
try:
|
||||
for mirror in mirrors:
|
||||
@@ -2509,7 +2493,7 @@ def push_keys(*mirrors, **kwargs):
|
||||
export_target = os.path.join(prefix, filename)
|
||||
|
||||
# Export public keys (private is set to False)
|
||||
spack.util.gpg.export_keys(export_target, [fingerprint])
|
||||
spack.gpg.export_keys(export_target, [fingerprint])
|
||||
|
||||
# If mirror is local, the above export writes directly to the
|
||||
# mirror (export_target points directly to the mirror).
|
||||
@@ -2518,7 +2502,10 @@ def push_keys(*mirrors, **kwargs):
|
||||
# uploaded to the mirror.
|
||||
if not keys_local:
|
||||
spack.util.web.push_to_url(
|
||||
export_target, url_util.join(keys_url, filename), keep_original=False
|
||||
export_target,
|
||||
url_util.join(keys_url, filename),
|
||||
keep_original=False,
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
)
|
||||
|
||||
if regenerate_index:
|
||||
@@ -2552,7 +2539,12 @@ def needs_rebuild(spec, mirror_url):
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the json version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
return not web_util.url_exists(
|
||||
specfile_path,
|
||||
fetch_method=spack.config.get("config:url_fetch_method", "urllib"),
|
||||
verify_ssl=spack.config.get("config:verify_ssl"),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
|
||||
|
||||
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
@@ -2718,7 +2710,11 @@ def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url_index_hash, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.URLError:
|
||||
return None
|
||||
|
||||
@@ -2740,7 +2736,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url_index, headers=self.headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.URLError as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
|
||||
@@ -2788,7 +2788,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
}
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(url, headers=headers),
|
||||
verify_ssl=spack.config.get("config:verify_ssl", True),
|
||||
timeout=spack.config.get("config:connect_timeout", 10),
|
||||
)
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.getcode() == 304:
|
||||
# Not modified; that means fresh.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -16,7 +16,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
@@ -207,19 +206,17 @@ def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
elif platform == "linux":
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -45,7 +45,8 @@ def spec_for_current_python() -> str:
|
||||
def root_path() -> str:
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path),
|
||||
replacements=spack.paths.path_replacements(),
|
||||
)
|
||||
|
||||
|
||||
@@ -79,12 +80,16 @@ def spack_python_interpreter() -> Generator:
|
||||
|
||||
def _store_path() -> str:
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||
return spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "store"), replacements=spack.paths.path_replacements()
|
||||
)
|
||||
|
||||
|
||||
def _config_path() -> str:
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||
return spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "config"), replacements=spack.paths.path_replacements()
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -92,7 +92,9 @@ class Bootstrapper:
|
||||
def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.conf = conf
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(
|
||||
conf["metadata"], replacements=spack.paths.path_replacements()
|
||||
)
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
url = conf["info"]["url"]
|
||||
@@ -386,7 +388,7 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
@@ -441,7 +443,7 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
with exception_handler.forward(current_config["name"]):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
@@ -585,7 +587,9 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_dir = spack.util.path.canonicalize_path(
|
||||
entry["metadata"], replacements=spack.paths.path_replacements()
|
||||
)
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -16,9 +16,11 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.environment
|
||||
import spack.paths
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
@@ -49,7 +51,8 @@ def environment_root(cls) -> pathlib.Path:
|
||||
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||
return pathlib.Path(
|
||||
spack.util.path.canonicalize_path(
|
||||
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
||||
os.path.join(bootstrap_root_path, "environments", environment_dir),
|
||||
replacements=spack.paths.path_replacements(),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -85,9 +88,12 @@ def __init__(self) -> None:
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
method of environments on Windows.
|
||||
"""
|
||||
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||
specs = self.concretize()
|
||||
if specs:
|
||||
colorized_specs = [
|
||||
@@ -96,9 +102,11 @@ def update_installations(self) -> None:
|
||||
]
|
||||
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||
self.write(regenerate=False)
|
||||
with tty.SuppressOutput(msg_enabled=log_enabled, warn_enabled=log_enabled):
|
||||
if sys.platform == "win32":
|
||||
self.install_all()
|
||||
self.write(regenerate=True)
|
||||
else:
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
@@ -116,6 +124,27 @@ def update_syspath_and_environ(self) -> None:
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self) -> None:
|
||||
model = depfile.MakefileModel.from_env(self)
|
||||
template = spack.tengine.make_environment().get_template(
|
||||
os.path.join("depfile", "Makefile")
|
||||
)
|
||||
makefile = self.environment_root() / "Makefile"
|
||||
makefile.write_text(template.render(model.to_dict()))
|
||||
make = spack.util.executable.which("make")
|
||||
kwargs = {}
|
||||
if not tty.is_debug():
|
||||
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||
make(
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True, config=spack.config.CONFIG)
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user