Compare commits
20 Commits
install-st
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6bcea7e9f4 | ||
![]() |
74263b2016 | ||
![]() |
8f480c5c13 | ||
![]() |
2129eca951 | ||
![]() |
c920ad14e6 | ||
![]() |
13a0d3cdfa | ||
![]() |
c1665567a5 | ||
![]() |
7b5e468a5b | ||
![]() |
5d7cf204f6 | ||
![]() |
4c3d9ca525 | ||
![]() |
7a174666e8 | ||
![]() |
37894de044 | ||
![]() |
b5d9e5da63 | ||
![]() |
fea922e92e | ||
![]() |
39914cc5ef | ||
![]() |
1a0a4e1237 | ||
![]() |
5a144722a0 | ||
![]() |
f048fd2a18 | ||
![]() |
ee8954ecec | ||
![]() |
bf23c83861 |
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@@ -1,6 +0,0 @@
|
||||
<!--
|
||||
Remember that `spackbot` can help with your PR in multiple ways:
|
||||
- `@spackbot help` shows all the commands that are currently available
|
||||
- `@spackbot fix style` tries to push a commit to fix style issues in this PR
|
||||
- `@spackbot re-run pipeline` runs the pipelines again, if you have write access to the repository
|
||||
-->
|
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
- uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
@@ -118,5 +118,7 @@ jobs:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@ebc4d7e9ebcb0b1eb21480bb8f43113e996ac77a
|
||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
10
.github/workflows/style/requirements.txt
vendored
10
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
||||
black==24.2.0
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
black==23.11.0
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.6.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
||||
|
16
.github/workflows/unit_tests.yaml
vendored
16
.github/workflows/unit_tests.yaml
vendored
@@ -54,7 +54,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -181,7 +181,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -194,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -216,6 +216,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
20
CITATION.cff
20
CITATION.cff
@@ -31,17 +31,13 @@ type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the
|
||||
complexity of HPC software is quickly outpacing the capabilities of existing software management
|
||||
tools. Scientific applications require specific versions of compilers, MPI, and other dependency
|
||||
libraries, so using a single, standard software stack is infeasible. However, managing many
|
||||
configurations is difficult because the configuration space is combinatorial in size. We
|
||||
introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages
|
||||
and dependencies. It allows any number of builds to coexist on the same system, and it ensures
|
||||
that installed packages can find their dependencies, regardless of the environment. We show
|
||||
through real-world use cases that Spack supports diverse and demanding applications, bringing
|
||||
order to HPC software chaos.
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
@@ -75,7 +71,7 @@ preferred-citation:
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013-2024 LLNS, LLC and other Spack Project Developers.
|
||||
Copyright (c) 2013-2023 LLNS, LLC and other Spack Project Developers.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
44
README.md
44
README.md
@@ -1,34 +1,13 @@
|
||||
<div align="left">
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
<h2>
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-white-text.svg" width="250">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
<img alt="Spack" src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo-text.svg" width="250">
|
||||
</picture>
|
||||
|
||||
<br>
|
||||
<br clear="all">
|
||||
|
||||
<a href="https://github.com/spack/spack/actions/workflows/ci.yml"><img src="https://github.com/spack/spack/workflows/ci/badge.svg" alt="CI Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/bootstrapping.yml"><img src="https://github.com/spack/spack/actions/workflows/bootstrap.yml/badge.svg" alt="Bootstrap Status"></a>
|
||||
<a href="https://github.com/spack/spack/actions/workflows/build-containers.yml"><img src="https://github.com/spack/spack/actions/workflows/build-containers.yml/badge.svg" alt="Containers Status"></a>
|
||||
<a href="https://spack.readthedocs.io"><img src="https://readthedocs.org/projects/spack/badge/?version=latest" alt="Documentation Status"></a>
|
||||
<a href="https://codecov.io/gh/spack/spack"><img src="https://codecov.io/gh/spack/spack/branch/develop/graph/badge.svg" alt="Code coverage"/></a>
|
||||
<a href="https://slack.spack.io"><img src="https://slack.spack.io/badge.svg" alt="Slack"/></a>
|
||||
<a href="https://matrix.to/#/#spack-space:matrix.org"><img src="https://img.shields.io/matrix/spack-space%3Amatrix.org?label=matrix" alt="Matrix"/></a>
|
||||
|
||||
</h2>
|
||||
|
||||
**[Getting Started] • [Config] • [Community] • [Contributing] • [Packaging Guide]**
|
||||
|
||||
[Getting Started]: https://spack.readthedocs.io/en/latest/getting_started.html
|
||||
[Config]: https://spack.readthedocs.io/en/latest/configuration.html
|
||||
[Community]: #community
|
||||
[Contributing]: https://spack.readthedocs.io/en/latest/contribution_guide.html
|
||||
[Packaging Guide]: https://spack.readthedocs.io/en/latest/packaging_guide.html
|
||||
|
||||
</div>
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions/workflows/bootstrap.yml)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
[](https://github.com/spack/spack/actions/workflows/build-containers.yml)
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -87,11 +66,10 @@ Resources:
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
not just for discussions, but also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
||||
Contributing
|
||||
------------------------
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# sbang project developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
# -*- python -*-
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
:: Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
:: Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
:: Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
::
|
||||
:: SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -1130,10 +1130,6 @@ A version specifier can also be a list of ranges and specific versions,
|
||||
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
|
||||
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
|
||||
|
||||
^^^^^^^^^^^^
|
||||
Git versions
|
||||
^^^^^^^^^^^^
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -153,43 +153,7 @@ keyring, and trusting all downloaded keys.
|
||||
List of popular build caches
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_'
|
||||
|
||||
-------------------
|
||||
Build cache signing
|
||||
-------------------
|
||||
|
||||
By default, Spack will add a cryptographic signature to each package pushed to
|
||||
a build cache, and verifies the signature when installing from a build cache.
|
||||
|
||||
Keys for signing can be managed with the :ref:`spack gpg <cmd-spack-gpg>` command,
|
||||
as well as ``spack buildcache keys`` as mentioned above.
|
||||
|
||||
You can disable signing when pushing with ``spack buildcache push --unsigned``,
|
||||
and disable verification when installing from any build cache with
|
||||
``spack install --no-check-signature``.
|
||||
|
||||
Alternatively, signing and verification can be enabled or disabled on a per build cache
|
||||
basis:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --signed <name> <url> # enable signing and verification
|
||||
$ spack mirror add --unsigned <name> <url> # disable signing and verification
|
||||
|
||||
$ spack mirror set --signed <name> # enable signing and verification for an existing mirror
|
||||
$ spack mirror set --unsigned <name> # disable signing and verification for an existing mirror
|
||||
|
||||
Or you can directly edit the ``mirrors.yaml`` configuration file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mirrors:
|
||||
<name>:
|
||||
url: <url>
|
||||
signed: false # disable signing and verification
|
||||
|
||||
See also :ref:`mirrors`.
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
|
||||
----------
|
||||
Relocation
|
||||
@@ -287,13 +251,87 @@ To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/setup-spack <https://github.com/spack/setup-spack>`_ for setting up Spack in GitHub
|
||||
Actions
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_ for
|
||||
more details on the public build cache
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -82,7 +82,7 @@ class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on('cmake', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -90,7 +90,7 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
depends_on('cmake@2.8.12:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -137,10 +137,10 @@ and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
"-DWHATEVER:STRING=somevalue",
|
||||
self.define("ENABLE_BROKEN_FEATURE", False),
|
||||
self.define_from_variant("DETECT_HDF5", "hdf5"),
|
||||
self.define_from_variant("THREADS"), # True if +threads
|
||||
'-DWHATEVER:STRING=somevalue',
|
||||
self.define('ENABLE_BROKEN_FEATURE', False),
|
||||
self.define_from_variant('DETECT_HDF5', 'hdf5'),
|
||||
self.define_from_variant('THREADS'), # True if +threads
|
||||
]
|
||||
|
||||
return args
|
||||
@@ -151,10 +151,10 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("example", default=True, when="@2.0:")
|
||||
variant('example', default=True, when='@2.0:')
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define_from_variant("EXAMPLE", "example")]
|
||||
return [self.define_from_variant('EXAMPLE', 'example')]
|
||||
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
@@ -193,9 +193,9 @@ a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="RelWithDebInfo",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"))
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
@@ -205,9 +205,9 @@ package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="DebugRelease",
|
||||
description="The build type to build",
|
||||
values=("Debug", "Release", "DebugRelease"))
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator = 'Ninja'
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
@@ -258,7 +258,7 @@ Ninja generator, make sure to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
to the package as well. Aside from that, you shouldn't need to do
|
||||
anything else. Spack will automatically detect that you are using
|
||||
@@ -288,7 +288,7 @@ like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
root_cmakelists_dir = "src"
|
||||
root_cmakelists_dir = 'src'
|
||||
|
||||
|
||||
Note that this path is relative to the root of the extracted tarball,
|
||||
@@ -304,7 +304,7 @@ different sub-directory, simply override ``build_directory`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "my-build"
|
||||
build_directory = 'my-build'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build and install targets
|
||||
@@ -324,8 +324,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -90,7 +90,7 @@ and optimizers do require a paid license. In Spack, they are packaged as:
|
||||
TODO: Confirm and possible change(!) the scope of MPI components (runtime
|
||||
vs. devel) in current (and previous?) *cluster/professional/composer*
|
||||
editions, i.e., presence in downloads, possibly subject to license
|
||||
coverage(!); see `discussion in PR #4300
|
||||
coverage(!); see `disussion in PR #4300
|
||||
<https://github.com/spack/spack/pull/4300#issuecomment-305582898>`_. [NB:
|
||||
An "mpi" subdirectory is not indicative of the full MPI SDK being present
|
||||
(i.e., ``mpicc``, ..., and header files). The directory may just as well
|
||||
@@ -934,9 +934,9 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
.. code-block:: python
|
||||
|
||||
# Examples for absolute and conditional dependencies:
|
||||
depends_on("mkl")
|
||||
depends_on("mkl", when="+mkl")
|
||||
depends_on("mkl", when="fftw=mkl")
|
||||
depends_on('mkl')
|
||||
depends_on('mkl', when='+mkl')
|
||||
depends_on('mkl', when='fftw=mkl')
|
||||
|
||||
The ``MKLROOT`` environment variable (part of the documented API) will be set
|
||||
during all stages of client package installation, and is available to both
|
||||
@@ -972,8 +972,8 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.append("--with-blas=%s" % self.spec["blas"].libs.ld_flags)
|
||||
args.append("--with-lapack=%s" % self.spec["lapack"].libs.ld_flags)
|
||||
args.append('--with-blas=%s' % self.spec['blas'].libs.ld_flags)
|
||||
args.append('--with-lapack=%s' % self.spec['lapack'].libs.ld_flags)
|
||||
...
|
||||
|
||||
.. tip::
|
||||
@@ -989,13 +989,13 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].headers.include_flags
|
||||
self.spec['blas'].headers.include_flags
|
||||
|
||||
and to generate linker options (``-L<dir> -llibname ...``), use the same as above,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].libs.ld_flags
|
||||
self.spec['blas'].libs.ld_flags
|
||||
|
||||
See
|
||||
:ref:`MakefilePackage <makefilepackage>`
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -88,7 +88,7 @@ override the ``luarocks_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["flag1", "flag2"]
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -88,13 +88,13 @@ command-line. However, Makefiles that use ``?=`` for assignment honor
|
||||
environment variables. Since Spack already sets ``CC``, ``CXX``, ``F77``,
|
||||
and ``FC``, you won't need to worry about setting these variables. If
|
||||
there are any other variables you need to set, you can do this in the
|
||||
``setup_build_environment`` method:
|
||||
``edit`` method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("PREFIX", prefix)
|
||||
env.set("BLASLIB", spec["blas"].libs.ld_flags)
|
||||
def edit(self, spec, prefix):
|
||||
env["PREFIX"] = prefix
|
||||
env["BLASLIB"] = spec["blas"].libs.ld_flags
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
@@ -140,7 +140,7 @@ Edit Makefile
|
||||
Some Makefiles are just plain stubborn and will ignore command-line
|
||||
variables. The only way to ensure that these packages build correctly
|
||||
is to directly edit the Makefile. Spack provides a ``FileFilter`` class
|
||||
and a ``filter`` method to help with this. For example:
|
||||
and a ``filter_file`` method to help with this. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -48,8 +48,8 @@ class automatically adds the following dependencies:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java", type=("build", "run"))
|
||||
depends_on("maven", type="build")
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
|
||||
|
||||
In the ``pom.xml`` file, you may see sections like:
|
||||
@@ -72,8 +72,8 @@ should add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java@7:", type="build")
|
||||
depends_on("maven@3.5.4:", type="build")
|
||||
depends_on('java@7:', type='build')
|
||||
depends_on('maven@3.5.4:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -88,9 +88,9 @@ the build phase. For example:
|
||||
|
||||
def build_args(self):
|
||||
return [
|
||||
"-Pdist,native",
|
||||
"-Dtar",
|
||||
"-Dmaven.javadoc.skip=true"
|
||||
'-Pdist,native',
|
||||
'-Dtar',
|
||||
'-Dmaven.javadoc.skip=true'
|
||||
]
|
||||
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -86,8 +86,8 @@ the ``MesonPackage`` base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -95,8 +95,8 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson@0.43.0:", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson@0.43.0:', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -121,7 +121,7 @@ override the ``meson_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def meson_args(self):
|
||||
return ["--warnlevel=3"]
|
||||
return ['--warnlevel=3']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -118,7 +118,7 @@ so ``PerlPackage`` contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("perl")
|
||||
extends('perl')
|
||||
|
||||
|
||||
If your package requires a specific version of Perl, you should
|
||||
@@ -132,14 +132,14 @@ properly. If your package uses ``Makefile.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-extutils-makemaker", type="build")
|
||||
depends_on('perl-extutils-makemaker', type='build')
|
||||
|
||||
|
||||
If your package uses ``Build.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-module-build", type="build")
|
||||
depends_on('perl-module-build', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -165,11 +165,11 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
expat = self.spec["expat"].prefix
|
||||
expat = self.spec['expat'].prefix
|
||||
|
||||
return [
|
||||
"EXPATLIBPATH={0}".format(expat.lib),
|
||||
"EXPATINCPATH={0}".format(expat.include),
|
||||
'EXPATLIBPATH={0}'.format(expat.lib),
|
||||
'EXPATINCPATH={0}'.format(expat.include),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -83,7 +83,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt", type="build")
|
||||
depends_on('qt', type='build')
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, or need to
|
||||
@@ -91,7 +91,7 @@ link to the ``qt`` libraries, you can override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt@5.6.0:")
|
||||
depends_on('qt@5.6.0:')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to qmake
|
||||
@@ -103,7 +103,7 @@ override the ``qmake_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def qmake_args(self):
|
||||
return ["-recursive"]
|
||||
return ['-recursive']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
@@ -118,7 +118,7 @@ sub-directory by adding the following to the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "src"
|
||||
build_directory = 'src'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -163,28 +163,28 @@ attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cran = "caret"
|
||||
cran = 'caret'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://cloud.r-project.org/package=caret"
|
||||
url = "https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz"
|
||||
list_url = "https://cloud.r-project.org/src/contrib/Archive/caret"
|
||||
homepage = 'https://cloud.r-project.org/package=caret'
|
||||
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
|
||||
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
|
||||
|
||||
Likewise, the following ``bioc`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bioc = "BiocVersion"
|
||||
bioc = 'BiocVersion'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://bioconductor.org/packages/BiocVersion/"
|
||||
git = "https://git.bioconductor.org/packages/BiocVersion"
|
||||
homepage = 'https://bioconductor.org/packages/BiocVersion/'
|
||||
git = 'https://git.bioconductor.org/packages/BiocVersion'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -200,7 +200,7 @@ base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("r")
|
||||
extends('r')
|
||||
|
||||
|
||||
Take a close look at the homepage for ``caret``. If you look at the
|
||||
@@ -209,7 +209,7 @@ You should add this to your package like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r@3.2.0:", type=("build", "run"))
|
||||
depends_on('r@3.2.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -227,7 +227,7 @@ and list all of their dependencies in the following sections:
|
||||
* LinkingTo
|
||||
|
||||
As far as Spack is concerned, all 3 of these dependency types
|
||||
correspond to ``type=("build", "run")``, so you don't have to worry
|
||||
correspond to ``type=('build', 'run')``, so you don't have to worry
|
||||
about the details. If you are curious what they mean,
|
||||
https://github.com/spack/spack/issues/2951 has a pretty good summary:
|
||||
|
||||
@@ -330,7 +330,7 @@ the dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r-lattice@0.20:", type=("build", "run"))
|
||||
depends_on('r-lattice@0.20:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -361,20 +361,20 @@ like so:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
mpi_name = self.spec["mpi"].name
|
||||
mpi_name = self.spec['mpi'].name
|
||||
|
||||
# The type of MPI. Supported values are:
|
||||
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
|
||||
if mpi_name == "openmpi":
|
||||
Rmpi_type = "OPENMPI"
|
||||
elif mpi_name == "mpich":
|
||||
Rmpi_type = "MPICH2"
|
||||
if mpi_name == 'openmpi':
|
||||
Rmpi_type = 'OPENMPI'
|
||||
elif mpi_name == 'mpich':
|
||||
Rmpi_type = 'MPICH2'
|
||||
else:
|
||||
raise InstallError("Unsupported MPI type")
|
||||
raise InstallError('Unsupported MPI type')
|
||||
|
||||
return [
|
||||
"--with-Rmpi-type={0}".format(Rmpi_type),
|
||||
"--with-mpi={0}".format(spec["mpi"].prefix),
|
||||
'--with-Rmpi-type={0}'.format(Rmpi_type),
|
||||
'--with-mpi={0}'.format(spec['mpi'].prefix),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -84,8 +84,8 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
summary = "An implementation of the AsciiDoc text processor and publishing toolchain"
|
||||
description = "A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats."
|
||||
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
|
||||
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
|
||||
|
||||
|
||||
Either of these can be used for the description of the Spack package.
|
||||
@@ -98,7 +98,7 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
homepage = "https://asciidoctor.org"
|
||||
homepage = 'https://asciidoctor.org'
|
||||
|
||||
|
||||
This should be used as the official homepage of the Spack package.
|
||||
@@ -112,21 +112,21 @@ the base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("ruby")
|
||||
extends('ruby')
|
||||
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
required_ruby_version = ">= 2.3.0"
|
||||
required_ruby_version = '>= 2.3.0'
|
||||
|
||||
|
||||
This can be added to the Spack package using:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ruby@2.3.0:", type=("build", "run"))
|
||||
depends_on('ruby@2.3.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -124,7 +124,7 @@ are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ["PyQt5"]
|
||||
import_modules = ['PyQt5']
|
||||
|
||||
|
||||
These tests often catch missing dependencies and non-RPATHed
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -63,8 +63,8 @@ run package-specific unit tests.
|
||||
.. code-block:: python
|
||||
|
||||
def installtest(self):
|
||||
with working_dir("test"):
|
||||
pytest = which("py.test")
|
||||
with working_dir('test'):
|
||||
pytest = which('py.test')
|
||||
pytest()
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ the following dependency automatically:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.5:", type="build")
|
||||
depends_on('python@2.5:', type='build')
|
||||
|
||||
|
||||
Waf only supports Python 2.5 and up.
|
||||
@@ -113,7 +113,7 @@ phase, you can use:
|
||||
args = []
|
||||
|
||||
if self.run_tests:
|
||||
args.append("--test")
|
||||
args.append('--test')
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -199,7 +199,6 @@ def setup(sphinx):
|
||||
("py:class", "contextlib.contextmanager"),
|
||||
("py:class", "module"),
|
||||
("py:class", "_io.BufferedReader"),
|
||||
("py:class", "_io.BytesIO"),
|
||||
("py:class", "unittest.case.TestCase"),
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
@@ -216,7 +215,6 @@ def setup(sphinx):
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -9,96 +9,34 @@
|
||||
Container Images
|
||||
================
|
||||
|
||||
Spack :ref:`environments` can easily be turned into container images. This page
|
||||
outlines two ways in which this can be done:
|
||||
|
||||
1. By installing the environment on the host system, and copying the installations
|
||||
into the container image. This approach does not require any tools like Docker
|
||||
or Singularity to be installed.
|
||||
2. By generating a Docker or Singularity recipe that can be used to build the
|
||||
container image. In this approach, Spack builds the software inside the
|
||||
container runtime, not on the host system.
|
||||
|
||||
The first approach is easiest if you already have an installed environment,
|
||||
the second approach gives more control over the container image.
|
||||
|
||||
---------------------------
|
||||
From existing installations
|
||||
---------------------------
|
||||
|
||||
If you already have a Spack environment installed on your system, you can
|
||||
share the binaries as an OCI compatible container image. To get started you
|
||||
just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Create and install an environment in the current directory
|
||||
spack env create -d .
|
||||
spack -e . add pkg-a pkg-b
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ docker run -it example.com/name/image:my_env
|
||||
|
||||
The image generated by Spack consists of the specified base image with each package from the
|
||||
environment as a separate layer on top. The image is minimal by construction, it only contains the
|
||||
environment roots and its runtime dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
|
||||
the password for your account, but a personal access token you need to generate separately.
|
||||
|
||||
The specified ``--base-image`` should have a libc that is compatible with the host system.
|
||||
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``
|
||||
or newer: the libc in the container image must be at least the version of the host system,
|
||||
assuming ABI compatibility. It is also perfectly fine to use a completely different
|
||||
Linux distribution as long as the libc is compatible.
|
||||
|
||||
For convenience, Spack also turns the OCI registry into a :ref:`build cache <binary_caches_oci>`,
|
||||
so that future ``spack install`` of the environment will simply pull the binaries from the
|
||||
registry instead of doing source builds. The flag ``--update-index`` is needed to make Spack
|
||||
take the build cache into account when concretizing.
|
||||
|
||||
.. note::
|
||||
|
||||
When generating container images in CI, the approach above is recommended when CI jobs
|
||||
already run in a sandboxed environment. You can simply use ``spack`` directly
|
||||
in the CI job and push the resulting image to a registry. Subsequent CI jobs should
|
||||
run faster because Spack can install from the same registry instead of rebuilding from
|
||||
sources.
|
||||
|
||||
---------------------------------------------
|
||||
Generating recipes for Docker and Singularity
|
||||
---------------------------------------------
|
||||
|
||||
Apart from copying existing installations into container images, Spack can also
|
||||
generate recipes for container images. This is useful if you want to run Spack
|
||||
itself in a sandboxed environment instead of on the host system.
|
||||
|
||||
Since recipes need a little bit more boilerplate than
|
||||
Spack :ref:`environments` are a great tool to create container images, but
|
||||
preparing one that is suitable for production requires some more boilerplate
|
||||
than just:
|
||||
|
||||
.. code-block:: docker
|
||||
|
||||
COPY spack.yaml /environment
|
||||
RUN spack -e /environment install
|
||||
|
||||
Spack provides a command to generate customizable recipes for container images. Customizations
|
||||
include minimizing the size of the image, installing packages in the base image using the system
|
||||
package manager, and setting up a proper entrypoint to run the image.
|
||||
Additional actions may be needed to minimize the size of the
|
||||
container, or to update the system software that is installed in the base
|
||||
image, or to set up a proper entrypoint to run the image. These tasks are
|
||||
usually both necessary and repetitive, so Spack comes with a command
|
||||
to generate recipes for container images starting from a ``spack.yaml``.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
.. seealso::
|
||||
|
||||
This page is a reference for generating recipes to build container images.
|
||||
It means that your environment is built from scratch inside the container
|
||||
runtime.
|
||||
|
||||
Since v0.21, Spack can also create container images from existing package installations
|
||||
on your host system. See :ref:`binary_caches_oci` for more information on
|
||||
that topic.
|
||||
|
||||
--------------------
|
||||
A Quick Introduction
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------
|
||||
|
||||
Consider having a Spack environment like the following:
|
||||
|
||||
@@ -109,8 +47,8 @@ Consider having a Spack environment like the following:
|
||||
- gromacs+mpi
|
||||
- mpich
|
||||
|
||||
Producing a ``Dockerfile`` from it is as simple as changing directories to
|
||||
where the ``spack.yaml`` file is stored and running the following command:
|
||||
Producing a ``Dockerfile`` from it is as simple as moving to the directory
|
||||
where the ``spack.yaml`` file is stored and giving the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -176,9 +114,9 @@ configuration are discussed in details in the sections below.
|
||||
|
||||
.. _container_spack_images:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------------
|
||||
Spack Images on Docker Hub
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
@@ -248,9 +186,9 @@ by Spack use them as default base images for their ``build`` stage,
|
||||
even though handles to use custom base images provided by users are
|
||||
available to accommodate complex use cases.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
Configuring the Container Recipe
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
---------------------------------
|
||||
Creating Images From Environments
|
||||
---------------------------------
|
||||
|
||||
Any Spack Environment can be used for the automatic generation of container
|
||||
recipes. Sensible defaults are provided for things like the base image or the
|
||||
@@ -291,18 +229,18 @@ under the ``container`` attribute of environments:
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
-------------------
|
||||
Setting Base Images
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
-------------------
|
||||
|
||||
The ``images`` subsection is used to select both the image where
|
||||
Spack builds the software and the image where the built software
|
||||
is installed. This attribute can be set in different ways and
|
||||
which one to use depends on the use case at hand.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Use Official Spack Images From Dockerhub
|
||||
""""""""""""""""""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To generate a recipe that uses an official Docker image from the
|
||||
Spack organization to build the software and the corresponding official OS image
|
||||
@@ -507,9 +445,9 @@ responsibility to ensure that:
|
||||
Therefore we don't recommend its use in cases that can be otherwise
|
||||
covered by the simplified mode shown first.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
----------------------------
|
||||
Singularity Definition Files
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
----------------------------
|
||||
|
||||
In addition to producing recipes in ``Dockerfile`` format Spack can produce
|
||||
Singularity Definition Files by just changing the value of the ``format``
|
||||
@@ -530,9 +468,9 @@ attribute:
|
||||
The minimum version of Singularity required to build a SIF (Singularity Image Format)
|
||||
image from the recipes generated by Spack is ``3.5.3``.
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
------------------------------
|
||||
Extending the Jinja2 Templates
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
------------------------------
|
||||
|
||||
The Dockerfile and the Singularity definition file that Spack can generate are based on
|
||||
a few Jinja2 templates that are rendered according to the environment being containerized.
|
||||
@@ -653,9 +591,9 @@ The recipe that gets generated contains the two extra instruction that we added
|
||||
|
||||
.. _container_config_options:
|
||||
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
-----------------------
|
||||
Configuration Reference
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
-----------------------
|
||||
|
||||
The tables below describe all the configuration options that are currently supported
|
||||
to customize the generation of container recipes:
|
||||
@@ -752,13 +690,13 @@ to customize the generation of container recipes:
|
||||
- Description string
|
||||
- No
|
||||
|
||||
~~~~~~~~~~~~~~
|
||||
--------------
|
||||
Best Practices
|
||||
~~~~~~~~~~~~~~
|
||||
--------------
|
||||
|
||||
"""
|
||||
^^^
|
||||
MPI
|
||||
"""
|
||||
^^^
|
||||
Due to the dependency on Fortran for OpenMPI, which is the spack default
|
||||
implementation, consider adding ``gfortran`` to the ``apt-get install`` list.
|
||||
|
||||
@@ -769,9 +707,9 @@ For execution on HPC clusters, it can be helpful to import the docker
|
||||
image into Singularity in order to start a program with an *external*
|
||||
MPI. Otherwise, also add ``openssh-server`` to the ``apt-get install`` list.
|
||||
|
||||
""""
|
||||
^^^^
|
||||
CUDA
|
||||
""""
|
||||
^^^^
|
||||
Starting from CUDA 9.0, Nvidia provides minimal CUDA images based on
|
||||
Ubuntu. Please see `their instructions <https://hub.docker.com/r/nvidia/cuda/>`_.
|
||||
Avoid double-installing CUDA by adding, e.g.
|
||||
@@ -790,9 +728,9 @@ to your ``spack.yaml``.
|
||||
Users will either need ``nvidia-docker`` or e.g. Singularity to *execute*
|
||||
device kernels.
|
||||
|
||||
"""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Docker on Windows and OSX
|
||||
"""""""""""""""""""""""""
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
On Mac OS and Windows, docker runs on a hypervisor that is not allocated much
|
||||
memory by default, and some spack packages may fail to build due to lack of
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -357,23 +357,91 @@ If there is a hook that you would like and is missing, you can propose to add a
|
||||
``pre_install(spec)``
|
||||
"""""""""""""""""""""
|
||||
|
||||
A ``pre_install`` hook is run within the install subprocess, directly before the install starts.
|
||||
It expects a single argument of a spec.
|
||||
A ``pre_install`` hook is run within an install subprocess, directly before
|
||||
the install starts. It expects a single argument of a spec, and is run in
|
||||
a multiprocessing subprocess. Note that if you see ``pre_install`` functions associated with packages these are not hooks
|
||||
as we have defined them here, but rather callback functions associated with
|
||||
a package install.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
``post_install(spec, explicit=None)``
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
""""""""""""""""""""""
|
||||
``post_install(spec)``
|
||||
""""""""""""""""""""""
|
||||
|
||||
A ``post_install`` hook is run within the install subprocess, directly after the install finishes,
|
||||
but before the build stage is removed and the spec is registered in the database. It expects two
|
||||
arguments: spec and an optional boolean indicating whether this spec is being installed explicitly.
|
||||
A ``post_install`` hook is run within an install subprocess, directly after
|
||||
the install finishes, but before the build stage is removed. If you
|
||||
write one of these hooks, you should expect it to accept a spec as the only
|
||||
argument. This is run in a multiprocessing subprocess. This ``post_install`` is
|
||||
also seen in packages, but in this context not related to the hooks described
|
||||
here.
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
``pre_uninstall(spec)`` and ``post_uninstall(spec)``
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
These hooks are currently used for cleaning up module files after uninstall.
|
||||
""""""""""""""""""""""""""
|
||||
``on_install_start(spec)``
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
This hook is run at the beginning of ``lib/spack/spack/installer.py``,
|
||||
in the install function of a ``PackageInstaller``,
|
||||
and importantly is not part of a build process, but before it. This is when
|
||||
we have just newly grabbed the task, and are preparing to install. If you
|
||||
write a hook of this type, you should provide the spec to it.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def on_install_start(spec):
|
||||
"""On start of an install, we want to...
|
||||
"""
|
||||
print('on_install_start')
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_success(spec)``
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run on a successful install, and is also run inside the build
|
||||
process, akin to ``post_install``. The main difference is that this hook
|
||||
is run outside of the context of the stage directory, meaning after the
|
||||
build stage has been removed and the user is alerted that the install was
|
||||
successful. If you need to write a hook that is run on success of a particular
|
||||
phase, you should use ``on_phase_success``.
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
``on_install_failure(spec)``
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run given an install failure that happens outside of the build
|
||||
subprocess, but somewhere in ``installer.py`` when something else goes wrong.
|
||||
If you need to write a hook that is relevant to a failure within a build
|
||||
process, you would want to instead use ``on_phase_failure``.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
``on_install_cancel(spec)``
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
The same, but triggered if a spec install is cancelled for any reason.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
``on_phase_success(pkg, phase_name, log_file)``
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
This hook is run within the install subprocess, and specifically when a phase
|
||||
successfully finishes. Since we are interested in the package, the name of
|
||||
the phase, and any output from it, we require:
|
||||
|
||||
- **pkg**: the package variable, which also has the attached spec at ``pkg.spec``
|
||||
- **phase_name**: the name of the phase that was successful (e.g., configure)
|
||||
- **log_file**: the path to the file with output, in case you need to inspect or otherwise interact with it.
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
``on_phase_error(pkg, phase_name, log_file)``
|
||||
"""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
In the case of an error during a phase, we might want to trigger some event
|
||||
with a hook, and this is the purpose of this particular hook. Akin to
|
||||
``on_phase_success`` we require the same variables - the package that failed,
|
||||
the name of the phase, and the log file where we might find errors.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -142,21 +142,6 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||
environments, anonymous environments are explained in the next section,
|
||||
can both be created using the same flags that `spack env create` accepts.
|
||||
If an environment already exists then spack will simply activate it and ignore the
|
||||
create specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
# ...
|
||||
[myenv] $ ...
|
||||
|
||||
To deactivate an environment, use the command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -416,23 +401,6 @@ that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
|
||||
.. warning::
|
||||
Potential pitfalls of setting the build directory
|
||||
Spack does not check for out-of-source build compatibility with the packages and
|
||||
so the onerous of making sure the package supports out-of-source builds is on
|
||||
the user.
|
||||
For example, most ``autotool`` and ``makefile`` packages do not support out-of-source builds
|
||||
while all ``CMake`` packages do.
|
||||
Understanding these nuances are on the software developers and we strongly encourage
|
||||
developers to only redirect the build directory if they understand their package's
|
||||
build-system.
|
||||
|
||||
^^^^^^^
|
||||
Loading
|
||||
^^^^^^^
|
||||
@@ -489,11 +457,11 @@ a ``packages.yaml`` file) could contain:
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
packages:
|
||||
all:
|
||||
compiler: [intel]
|
||||
# ...
|
||||
...
|
||||
|
||||
This configuration sets the default compiler for all packages to
|
||||
``intel``.
|
||||
@@ -839,7 +807,7 @@ directories.
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
@@ -883,7 +851,7 @@ automatically named ``default``, so that
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
@@ -891,7 +859,7 @@ is equivalent to
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
@@ -901,7 +869,7 @@ and
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
@@ -909,7 +877,7 @@ is equivalent to
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -9,42 +9,46 @@
|
||||
Custom Extensions
|
||||
=================
|
||||
|
||||
*Spack extensions* allow you to extend Spack capabilities by deploying your
|
||||
*Spack extensions* permit you to extend Spack capabilities by deploying your
|
||||
own custom commands or logic in an arbitrary location on your filesystem.
|
||||
This might be extremely useful e.g. to develop and maintain a command whose purpose is
|
||||
too specific to be considered for reintegration into the mainline or to
|
||||
evolve a command through its early stages before starting a discussion to merge
|
||||
it upstream.
|
||||
|
||||
From Spack's point of view an extension is any path in your filesystem which
|
||||
respects the following naming and layout for files:
|
||||
respects a prescribed naming and layout for files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack-scripting/ # The top level directory must match the format 'spack-{extension_name}'
|
||||
├── pytest.ini # Optional file if the extension ships its own tests
|
||||
├── scripting # Folder that may contain modules that are needed for the extension commands
|
||||
│ ├── cmd # Folder containing extension commands
|
||||
│ │ └── filter.py # A new command that will be available
|
||||
│ └── functions.py # Module with internal details
|
||||
└── tests # Tests for this extension
|
||||
│ └── cmd # Folder containing extension commands
|
||||
│ └── filter.py # A new command that will be available
|
||||
├── tests # Tests for this extension
|
||||
│ ├── conftest.py
|
||||
│ └── test_filter.py
|
||||
└── templates # Templates that may be needed by the extension
|
||||
|
||||
In the example above, the extension is named *scripting*. It adds an additional command
|
||||
(``spack filter``) and unit tests to verify its behavior.
|
||||
In the example above the extension named *scripting* adds an additional command (``filter``)
|
||||
and unit tests to verify its behavior. The code for this example can be
|
||||
obtained by cloning the corresponding git repository:
|
||||
|
||||
The extension can import any core Spack module in its implementation. When loaded by
|
||||
the ``spack`` command, the extension itself is imported as a Python package in the
|
||||
``spack.extensions`` namespace. In the example above, since the extension is named
|
||||
"scripting", the corresponding Python module is ``spack.extensions.scripting``.
|
||||
|
||||
The code for this example extension can be obtained by cloning the corresponding git repository:
|
||||
.. TODO: write an ad-hoc "hello world" extension and make it part of the spack organization
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git -C /tmp clone https://github.com/spack/spack-scripting.git
|
||||
$ cd ~/
|
||||
$ mkdir tmp && cd tmp
|
||||
$ git clone https://github.com/alalazo/spack-scripting.git
|
||||
Cloning into 'spack-scripting'...
|
||||
remote: Counting objects: 11, done.
|
||||
remote: Compressing objects: 100% (7/7), done.
|
||||
remote: Total 11 (delta 0), reused 11 (delta 0), pack-reused 0
|
||||
Receiving objects: 100% (11/11), done.
|
||||
|
||||
As you can see by inspecting the sources, Python modules that are part of the extension
|
||||
can import any core Spack module.
|
||||
|
||||
---------------------------------
|
||||
Configure Spack to Use Extensions
|
||||
@@ -57,7 +61,7 @@ paths to ``config.yaml``. In the case of our example this means ensuring that:
|
||||
|
||||
config:
|
||||
extensions:
|
||||
- /tmp/spack-scripting
|
||||
- ~/tmp/spack-scripting
|
||||
|
||||
is part of your configuration file. Once this is setup any command that the extension provides
|
||||
will be available from the command line:
|
||||
@@ -82,32 +86,37 @@ will be available from the command line:
|
||||
--implicit select specs that are not installed or were installed implicitly
|
||||
--output OUTPUT where to dump the result
|
||||
|
||||
The corresponding unit tests can be run giving the appropriate options to ``spack unit-test``:
|
||||
The corresponding unit tests can be run giving the appropriate options
|
||||
to ``spack unit-test``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack unit-test --extension=scripting
|
||||
========================================== test session starts ===========================================
|
||||
platform linux -- Python 3.11.5, pytest-7.4.3, pluggy-1.3.0
|
||||
rootdir: /home/culpo/github/spack-scripting
|
||||
configfile: pytest.ini
|
||||
testpaths: tests
|
||||
plugins: xdist-3.5.0
|
||||
|
||||
============================================================== test session starts ===============================================================
|
||||
platform linux2 -- Python 2.7.15rc1, pytest-3.2.5, py-1.4.34, pluggy-0.4.0
|
||||
rootdir: /home/mculpo/tmp/spack-scripting, inifile: pytest.ini
|
||||
collected 5 items
|
||||
|
||||
tests/test_filter.py ..... [100%]
|
||||
tests/test_filter.py ...XX
|
||||
============================================================ short test summary info =============================================================
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
XPASS tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
|
||||
========================================== slowest 30 durations ==========================================
|
||||
2.31s setup tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.57s call tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.56s call tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
0.54s call tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.48s call tests/test_filter.py::test_filtering_specs[kwargs0-specs0-expected0]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs4-specs4-expected4]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs2-specs2-expected2]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs1-specs1-expected1]
|
||||
0.01s setup tests/test_filter.py::test_filtering_specs[kwargs3-specs3-expected3]
|
||||
|
||||
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||
=========================================== 5 passed in 5.06s ============================================
|
||||
=========================================================== slowest 20 test durations ============================================================
|
||||
3.74s setup tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.17s call tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.16s call tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.15s call tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.13s call tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.08s call tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.04s teardown tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags4-specs4-expected4]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s setup tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags2-specs2-expected2]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags1-specs1-expected1]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags0-specs0-expected0]
|
||||
0.00s teardown tests/test_filter.py::test_filtering_specs[flags3-specs3-expected3]
|
||||
====================================================== 3 passed, 2 xpassed in 4.51 seconds =======================================================
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -250,9 +250,10 @@ Compiler configuration
|
||||
|
||||
Spack has the ability to build packages with multiple compilers and
|
||||
compiler versions. Compilers can be made available to Spack by
|
||||
specifying them manually in ``compilers.yaml``, or automatically by
|
||||
running ``spack compiler find``, but for convenience Spack will
|
||||
automatically detect compilers the first time it needs them.
|
||||
specifying them manually in ``compilers.yaml`` or ``packages.yaml``,
|
||||
or automatically by running ``spack compiler find``, but for
|
||||
convenience Spack will automatically detect compilers the first time
|
||||
it needs them.
|
||||
|
||||
.. _cmd-spack-compilers:
|
||||
|
||||
@@ -457,6 +458,52 @@ specification. The operations available to modify the environment are ``set``, `
|
||||
prepend_path: # Similar for append|remove_path
|
||||
LD_LIBRARY_PATH: /ld/paths/added/by/setvars/sh
|
||||
|
||||
.. note::
|
||||
|
||||
Spack is in the process of moving compilers from a separate
|
||||
attribute to be handled like all other packages. As part of this
|
||||
process, the ``compilers.yaml`` section will eventually be replaced
|
||||
by configuration in the ``packages.yaml`` section. This new
|
||||
configuration is now available, although it is not yet the default
|
||||
behavior.
|
||||
|
||||
Compilers can also be configured as external packages in the
|
||||
``packages.yaml`` config file. Any external package for a compiler
|
||||
(e.g. ``gcc`` or ``llvm``) will be treated as a configured compiler
|
||||
assuming the paths to the compiler executables are determinable from
|
||||
the prefix.
|
||||
|
||||
If the paths to the compiler executable are not determinable from the
|
||||
prefix, you can add them to the ``extra_attributes`` field using the
|
||||
``compilers`` key. The ``compilers`` key accepts compilers for ``c``,
|
||||
``cxx``, ``fortran``, and ``f77``.
|
||||
|
||||
For all other fields from the ``compilers`` config, they can be added
|
||||
to the ``extra_attributes`` field for an external representing a
|
||||
compiler. These fields are used as-is in the internal representation
|
||||
of the compiler config.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
gcc:
|
||||
external:
|
||||
- spec: gcc@12.2.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
environment:
|
||||
set:
|
||||
GCC_ROOT: /usr
|
||||
external:
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build Your Own Compiler
|
||||
@@ -623,7 +670,7 @@ Fortran.
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
# ...
|
||||
...
|
||||
paths:
|
||||
cc: /usr/bin/clang
|
||||
cxx: /usr/bin/clang++
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -111,28 +111,3 @@ CUDA is split into fewer components and is simpler to specify:
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
Using an External OpenGL API
|
||||
-----------------------------------
|
||||
Depending on whether we have a graphics card or not, we may choose to use OSMesa or GLX to implement the OpenGL API.
|
||||
|
||||
If a graphics card is unavailable, OSMesa is recommended and can typically be built with Spack.
|
||||
However, if we prefer to utilize the system GLX tailored to our graphics card, we need to declare it as an external. Here's how to do it:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libglx:
|
||||
require: [opengl]
|
||||
opengl:
|
||||
buildable: false
|
||||
externals:
|
||||
- prefix: /usr/
|
||||
spec: opengl@4.6
|
||||
|
||||
Note that prefix has to be the root of both the libraries and the headers, using is /usr not the path the the lib.
|
||||
To know which spec for opengl is available use ``cd /usr/include/GL && grep -Ri gl_version``.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -10,7 +10,7 @@ Modules (modules.yaml)
|
||||
======================
|
||||
|
||||
The use of module systems to manage user environment in a controlled way
|
||||
is a common practice at HPC centers that is sometimes embraced also by
|
||||
is a common practice at HPC centers that is often embraced also by
|
||||
individual programmers on their development machines. To support this
|
||||
common practice Spack integrates with `Environment Modules
|
||||
<http://modules.sourceforge.net/>`_ and `Lmod
|
||||
@@ -21,38 +21,14 @@ Modules are one of several ways you can use Spack packages. For other
|
||||
options that may fit your use case better, you should also look at
|
||||
:ref:`spack load <spack-load>` and :ref:`environments <environments>`.
|
||||
|
||||
-----------
|
||||
Quick start
|
||||
-----------
|
||||
----------------------------
|
||||
Using module files via Spack
|
||||
----------------------------
|
||||
|
||||
In the current version of Spack, module files are not generated by default. To get started, you
|
||||
can generate module files for all currently installed packages by running either
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module tcl refresh
|
||||
|
||||
or
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack module lmod refresh
|
||||
|
||||
Spack can also generate module files for all future installations automatically through the
|
||||
following configuration:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config add modules:default:enable:[tcl]
|
||||
|
||||
or
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack config add modules:default:enable:[lmod]
|
||||
|
||||
Assuming you have a module system installed, you should now be able to use the ``module`` command
|
||||
to interact with them:
|
||||
If you have installed a supported module system you should be able to
|
||||
run ``module avail`` to see what module
|
||||
files have been installed. Here is sample output of those programs,
|
||||
showing lots of installed packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -89,17 +65,33 @@ scheme used at your site.
|
||||
Module file customization
|
||||
-------------------------
|
||||
|
||||
Module files are generated by post-install hooks after the successful
|
||||
installation of a package.
|
||||
|
||||
.. note::
|
||||
|
||||
Spack only generates modulefiles when a package is installed. If
|
||||
you attempt to install a package and it is already installed, Spack
|
||||
will not regenerate modulefiles for the package. This may lead to
|
||||
inconsistent modulefiles if the Spack module configuration has
|
||||
changed since the package was installed, either by editing a file
|
||||
or changing scopes or environments.
|
||||
|
||||
Later in this section there is a subsection on :ref:`regenerating
|
||||
modules <cmd-spack-module-refresh>` that will allow you to bring
|
||||
your modules to a consistent state.
|
||||
|
||||
The table below summarizes the essential information associated with
|
||||
the different file formats that can be generated by Spack:
|
||||
|
||||
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
| | Hierarchical | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+===========+==============+==============================+==============================================+======================+
|
||||
| ``tcl`` | No | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
| ``lmod`` | Yes | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------+--------------+------------------------------+----------------------------------------------+----------------------+
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
|
||||
+=============================+====================+===============================+==============================================+======================+
|
||||
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod |
|
||||
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
|
||||
|
||||
|
||||
Spack ships with sensible defaults for the generation of module files, but
|
||||
@@ -110,7 +102,7 @@ In general you can override or extend the default behavior by:
|
||||
2. writing specific rules in the ``modules.yaml`` configuration file
|
||||
3. writing your own templates to override or extend the defaults
|
||||
|
||||
The former method lets you express changes in the run-time environment
|
||||
The former method let you express changes in the run-time environment
|
||||
that are needed to use the installed software properly, e.g. injecting variables
|
||||
from language interpreters into their extensions. The latter two instead permit to
|
||||
fine tune the filesystem layout, content and creation of module files to meet
|
||||
@@ -118,62 +110,79 @@ site specific conventions.
|
||||
|
||||
.. _overide-api-calls-in-package-py:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting environment variables dynamically in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Override API calls in ``package.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are two methods that you can implement in any ``package.py`` to dynamically affect the
|
||||
content of the module files generated by Spack. The most important one is
|
||||
``setup_run_environment``, which can be used to set environment variables in the module file that
|
||||
depend on the spec:
|
||||
There are two methods that you can override in any ``package.py`` to affect the
|
||||
content of the module files generated by Spack. The first one:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
if self.spec.satisfies("+foo"):
|
||||
env.set("FOO", "bar")
|
||||
pass
|
||||
|
||||
The second, less commonly used, is ``setup_dependent_run_environment(self, env, dependent_spec)``,
|
||||
which allows a dependency to set variables in the module file of its dependents. This is typically
|
||||
used in packages like ``python``, ``r``, or ``perl`` to prepend the dependent's prefix to the
|
||||
search path of the interpreter (``PYTHONPATH``, ``R_LIBS``, ``PERL5LIB`` resp.), so it can locate
|
||||
the packages at runtime.
|
||||
|
||||
For example, a simplified version of the ``python`` package could look like this:
|
||||
can alter the content of the module file associated with the same package where it is overridden.
|
||||
The second method:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def setup_dependent_run_environment(self, env, dependent_spec):
|
||||
if dependent_spec.package.extends(self.spec):
|
||||
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
|
||||
pass
|
||||
|
||||
and would make any package that ``extends("python")`` have its library directory added to the
|
||||
``PYTHONPATH`` environment variable in the module file. It's much more convenient to set this
|
||||
variable here, than to repeat it in every Python extension's ``setup_run_environment`` method.
|
||||
can instead inject run-time environment modifications in the module files of packages
|
||||
that depend on it. In both cases you need to fill ``env`` with the desired
|
||||
list of environment modifications.
|
||||
|
||||
.. admonition:: The ``r`` package and callback APIs
|
||||
|
||||
An example in which it is crucial to override both methods
|
||||
is given by the ``r`` package. This package installs libraries and headers
|
||||
in non-standard locations and it is possible to prepend the appropriate directory
|
||||
to the corresponding environment variables:
|
||||
|
||||
================== =================================
|
||||
LD_LIBRARY_PATH ``self.prefix/rlib/R/lib``
|
||||
PKG_CONFIG_PATH ``self.prefix/rlib/pkgconfig``
|
||||
================== =================================
|
||||
|
||||
with the following snippet:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_run_environment
|
||||
|
||||
The ``r`` package also knows which environment variable should be modified
|
||||
to make language extensions provided by other packages available, and modifies
|
||||
it appropriately in the override of the second method:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/r/package.py
|
||||
:pyobject: R.setup_dependent_run_environment
|
||||
|
||||
.. _modules-yaml:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
The ``modules.yaml`` config file and module sets
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Write a configuration file
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The configuration files that control module generation behavior are named ``modules.yaml``. The
|
||||
default configuration looks like this:
|
||||
The configuration files that control module generation behavior
|
||||
are named ``modules.yaml``. The default configuration:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/modules.yaml
|
||||
:language: yaml
|
||||
|
||||
You can define one or more **module sets**, each of which can be configured separately with regard
|
||||
to install location, naming scheme, inclusion and exclusion, autoloading, et cetera.
|
||||
activates the hooks to generate ``tcl`` module files and inspects
|
||||
the installation folder of each package for the presence of a set of subdirectories
|
||||
(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
|
||||
to the environment variables listed below the folder name.
|
||||
|
||||
The default module set is aptly named ``default``. All
|
||||
:ref:`Spack commands that operate on modules <maintaining-module-files>` apply to the ``default``
|
||||
module set, unless another module set is specified explicitly (with the ``--name`` flag).
|
||||
Spack modules can be configured for multiple module sets. The default
|
||||
module set is named ``default``. All Spack commands which operate on
|
||||
modules default to apply the ``default`` module set, but can be
|
||||
applied to any module set in the configuration.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
Changing the modules root
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
As shown in the table above, the default module root for ``lmod`` is
|
||||
``$spack/share/spack/lmod`` and the default root for ``tcl`` is
|
||||
@@ -189,7 +198,7 @@ set by changing the ``roots`` key of the configuration.
|
||||
my_custom_lmod_modules:
|
||||
roots:
|
||||
lmod: /path/to/install/custom/lmod/modules
|
||||
# ...
|
||||
...
|
||||
|
||||
This configuration will create two module sets. The default module set
|
||||
will install its ``tcl`` modules to ``/path/to/install/tcl/modules``
|
||||
@@ -215,32 +224,25 @@ location could be confusing to users of your modules. In the next
|
||||
section, we will discuss enabling and disabling module types (module
|
||||
file generators) for each module set.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Automatically generating module files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""
|
||||
Activate other hooks
|
||||
""""""""""""""""""""
|
||||
|
||||
Spack can be configured to automatically generate module files as part of package installation.
|
||||
This is done by adding the desired module systems to the ``enable`` list.
|
||||
Any other module file generator shipped with Spack can be activated adding it to the
|
||||
list under the ``enable`` key in the module file. Currently the only generator that
|
||||
is not active by default is ``lmod``, which produces hierarchical lua module files.
|
||||
|
||||
Each module system can then be configured separately. In fact, you should list configuration
|
||||
options that affect a particular type of module files under a top level key corresponding
|
||||
to the generator being customized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
- lmod
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuring ``tcl`` and ``lmod`` modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can configure the behavior of either module system separately, under a key corresponding to
|
||||
the generator being customized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
- tcl
|
||||
- lmod
|
||||
tcl:
|
||||
# contains environment modules specific customizations
|
||||
lmod:
|
||||
@@ -251,70 +253,16 @@ either change the layout of the module files on the filesystem, or they will aff
|
||||
their content. For the latter point it is possible to use anonymous specs
|
||||
to fine tune the set of packages on which the modifications should be applied.
|
||||
|
||||
.. _autoloading-dependencies:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Autoloading and hiding dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A module file should set the variables that are needed for an application to work. But since an
|
||||
application often has many dependencies, where should all the environment variables for those be
|
||||
set? In Spack the rule is that each package sets the runtime variables that are needed by the
|
||||
package itself, and no more. This way, dependencies can be loaded standalone too, and duplication
|
||||
of environment variables is avoided.
|
||||
|
||||
That means however that if you want to use an application, you need to load the modules for all its
|
||||
dependencies. Of course this is not something you would want users to do manually.
|
||||
|
||||
Since Spack knows the dependency graph of every package, it can easily generate module files that
|
||||
automatically load the modules for its dependencies recursively. It is enabled by default for both
|
||||
Lmod and Environment Modules under the ``autoload: direct`` config option. The former system has
|
||||
builtin support through the ``depends_on`` function, the latter simply uses a ``module load``
|
||||
statement. Both module systems (at least in newer versions) do reference counting, so that if a
|
||||
module is loaded by two different modules, it will only be unloaded after the others are.
|
||||
|
||||
The ``autoload`` key accepts the values ``none``, ``direct``, and ``all``. To disable it, use
|
||||
``none``, and to enable, it's best to stick to ``direct``, which only autoloads the direct link and
|
||||
run type dependencies, relying on recursive autoloading to load the rest.
|
||||
|
||||
A common complaint about autoloading is the large number of modules that are visible to the user.
|
||||
Spack has a solution for this as well: ``hide_implicits: true``. This ensures that only those
|
||||
packages you've explicitly installed are exposed by ``module avail``, but still allows for
|
||||
autoloading of hidden dependencies. Lmod should support hiding implicits in general, while
|
||||
Environment Modules requires version 4.7 or higher.
|
||||
|
||||
.. note::
|
||||
If supported by your module system, we highly encourage the following configuration that enables
|
||||
autoloading and hiding of implicits. It ensures all runtime variables are set correctly,
|
||||
including those for dependencies, without overwhelming the user with a large number of available
|
||||
modules. Further, it makes it easier to get readable module names without collisions, see the
|
||||
section below on :ref:`modules-projections`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
lmod:
|
||||
hide_implicits: true
|
||||
all:
|
||||
autoload: direct
|
||||
|
||||
.. _anonymous_specs:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting environment variables for selected packages in config
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""
|
||||
Selection by anonymous specs
|
||||
""""""""""""""""""""""""""""
|
||||
|
||||
In the configuration file you can filter particular specs, and make further changes to the
|
||||
environment variables that go into their module files. This is very powerful when you want to avoid
|
||||
:ref:`modifying the package itself <overide-api-calls-in-package-py>`, or when you want to set
|
||||
certain variables on multiple selected packages at once.
|
||||
|
||||
For instance, in the snippet below:
|
||||
In the configuration file you can use *anonymous specs* (i.e. specs
|
||||
that **are not required to have a root package** and are thus used just
|
||||
to express constraints) to apply certain modifications on a selected set
|
||||
of the installed software. For instance, in the snippet below:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -357,28 +305,12 @@ the variable ``FOOBAR`` will be unset.
|
||||
.. note::
|
||||
Order does matter
|
||||
The modifications associated with the ``all`` keyword are always evaluated
|
||||
first, no matter where they appear in the configuration file. All the other changes to
|
||||
environment variables for matching specs are evaluated from top to bottom.
|
||||
first, no matter where they appear in the configuration file. All the other
|
||||
spec constraints are instead evaluated top to bottom.
|
||||
|
||||
.. warning::
|
||||
|
||||
As general advice, it's often better to set as few unnecessary variables as possible. For
|
||||
example, the following seemingly innocent and potentially useful configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
all:
|
||||
environment:
|
||||
set:
|
||||
"{name}_ROOT": "{prefix}"
|
||||
|
||||
sets ``BINUTILS_ROOT`` to its prefix in modules for ``binutils``, which happens to break
|
||||
the ``gcc`` compiler: it uses this variable as its default search path for certain object
|
||||
files and libraries, and by merely setting it, everything fails to link.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
Exclude or include specific module files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can use anonymous specs also to prevent module files from being written or
|
||||
to force them to be written. Consider the case where you want to hide from users
|
||||
@@ -398,19 +330,14 @@ you will prevent the generation of module files for any package that
|
||||
is compiled with ``gcc@4.4.7``, with the only exception of any ``gcc``
|
||||
or any ``llvm`` installation.
|
||||
|
||||
It is safe to combine ``exclude`` and ``autoload``
|
||||
:ref:`mentioned above <autoloading-dependencies>`. When ``exclude`` prevents a module file to be
|
||||
generated for a dependency, the ``autoload`` feature will simply not generate a statement to load
|
||||
it.
|
||||
|
||||
|
||||
.. _modules-projections:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""
|
||||
Customize the naming of modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
The names of environment modules generated by Spack are not always easy to
|
||||
The names of environment modules generated by spack are not always easy to
|
||||
fully comprehend due to the long hash in the name. There are three module
|
||||
configuration options to help with that. The first is a global setting to
|
||||
adjust the hash length. It can be set anywhere from 0 to 32 and has a default
|
||||
@@ -426,13 +353,6 @@ shows how to set hash length in the module file names:
|
||||
tcl:
|
||||
hash_length: 7
|
||||
|
||||
.. tip::
|
||||
|
||||
Using ``hide_implicits: true`` (see :ref:`autoloading-dependencies`) vastly reduces the number
|
||||
modules exposed to the user. The hidden modules always contain the hash in their name, and are
|
||||
not influenced by the ``hash_length`` setting. Hidden implicits thus make it easier to use a
|
||||
short hash length or no hash at all, without risking name conflicts.
|
||||
|
||||
To help make module names more readable, and to help alleviate name conflicts
|
||||
with a short hash, one can use the ``suffixes`` option in the modules
|
||||
configuration file. This option will add strings to modules that match a spec.
|
||||
@@ -445,12 +365,12 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3.12: 'python-3.12'
|
||||
^python@2.7.12: 'python-2.7.12'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
will add a ``python-2.7.12`` version string to any packages compiled with
|
||||
python matching the spec, ``python@2.7.12``. This is useful to know which
|
||||
version of python a set of python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
||||
@@ -548,11 +468,41 @@ that are already in the Lmod hierarchy.
|
||||
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
|
||||
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_.
|
||||
|
||||
""""""""""""""""""""""
|
||||
Select default modules
|
||||
""""""""""""""""""""""
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _customize-env-modifications:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
Customize environment modifications
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
You can control which prefixes in a Spack package are added to
|
||||
environment variables with the ``prefix_inspections`` section; this
|
||||
@@ -650,9 +600,9 @@ stack to users who are likely to inspect the modules to find full
|
||||
paths to software, when it is desirable to present the users with a
|
||||
simpler set of paths than those generated by the Spack install tree.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Filter out environment modifications
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Modifications to certain environment variables in module files are there by
|
||||
default, for instance because they are generated by prefix inspections.
|
||||
@@ -672,37 +622,49 @@ do so by using the ``exclude_env_vars``:
|
||||
The configuration above will generate module files that will not contain
|
||||
modifications to either ``CPATH`` or ``LIBRARY_PATH``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Select default modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, when multiple modules of the same name share a directory,
|
||||
the highest version number will be the default module. This behavior
|
||||
of the ``module`` command can be overridden with a symlink named
|
||||
``default`` to the desired default module. If you wish to configure
|
||||
default modules with Spack, add a ``defaults`` key to your modules
|
||||
configuration:
|
||||
.. _autoloading-dependencies:
|
||||
|
||||
"""""""""""""""""""""
|
||||
Autoload dependencies
|
||||
"""""""""""""""""""""
|
||||
|
||||
Often it is required for a module to have its (transient) dependencies loaded as well.
|
||||
One example where this is useful is when one package needs to use executables provided
|
||||
by its dependency; when the dependency is autoloaded, the executable will be in the
|
||||
PATH. Similarly for scripting languages such as Python, packages and their dependencies
|
||||
have to be loaded together.
|
||||
|
||||
Autoloading is enabled by default for Lmod and Environment Modules. The former
|
||||
has builtin support for through the ``depends_on`` function. The latter uses
|
||||
``module load`` statement to load and track dependencies.
|
||||
|
||||
Autoloading can also be enabled conditionally:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
my-module-set:
|
||||
tcl:
|
||||
defaults:
|
||||
- gcc@10.2.1
|
||||
- hdf5@1.2.10+mpi+hl%gcc
|
||||
modules:
|
||||
default:
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
||||
^python:
|
||||
autoload: direct
|
||||
|
||||
These defaults may be arbitrarily specific. For any package that
|
||||
satisfies a default, Spack will generate the module file in the
|
||||
appropriate path, and will generate a default symlink to the module
|
||||
file as well.
|
||||
The configuration file above will produce module files that will
|
||||
load their direct dependencies if the package installed depends on ``python``.
|
||||
The allowed values for the ``autoload`` statement are either ``none``,
|
||||
``direct`` or ``all``.
|
||||
|
||||
.. warning::
|
||||
If Spack is configured to generate multiple default packages in the
|
||||
same directory, the last modulefile to be generated will be the
|
||||
default module.
|
||||
|
||||
.. _maintaining-module-files:
|
||||
.. note::
|
||||
Tcl prerequisites
|
||||
In the ``tcl`` section of the configuration file it is possible to use
|
||||
the ``prerequisites`` directive that accepts the same values as
|
||||
``autoload``. It will produce module files that have a ``prereq``
|
||||
statement, which autoloads dependencies on Environment Modules when its
|
||||
``auto_handling`` configuration option is enabled. If Environment Modules
|
||||
is installed with Spack, ``auto_handling`` is enabled by default starting
|
||||
version 4.2. Otherwise it is enabled by default since version 5.0.
|
||||
|
||||
------------------------
|
||||
Maintaining Module Files
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -97,35 +97,6 @@ Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Extra attributes for external packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Sometimes external packages require additional attributes to be used
|
||||
effectively. This information can be defined on a per-package basis
|
||||
and stored in the ``extra_attributes`` section of the external package
|
||||
configuration. In addition to per-package information, this section
|
||||
can be used to define environment modifications to be performed
|
||||
whenever the package is used. For example, if an external package is
|
||||
built without ``rpath`` support, it may require ``LD_LIBRARY_PATH``
|
||||
settings to find its dependencies. This could be configured as
|
||||
follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: mpich@3.3 %clang@12.0.0 +hwloc
|
||||
prefix: /path/to/mpich
|
||||
extra_attributes:
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /path/to/hwloc/lib64
|
||||
|
||||
See :ref:`configuration_environment_variables` for more information on
|
||||
how to configure environment modifications in Spack config files.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -487,56 +458,6 @@ present. For instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Conflicts and strong preferences
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If the semantic of requirements is too strong, you can also express "strong preferences" and "conflicts"
|
||||
from configuration files:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
prefer:
|
||||
- '%clang'
|
||||
conflict:
|
||||
- '+shared'
|
||||
|
||||
The ``prefer`` and ``conflict`` sections can be used whenever a ``require`` section is allowed.
|
||||
The argument is always a list of constraints, and each constraint can be either a simple string,
|
||||
or a more complex object:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
conflict:
|
||||
- spec: '%clang'
|
||||
when: 'target=x86_64_v3'
|
||||
message: 'reason why clang cannot be used'
|
||||
|
||||
The ``spec`` attribute is mandatory, while both ``when`` and ``message`` are optional.
|
||||
|
||||
.. note::
|
||||
|
||||
Requirements allow for expressing both "strong preferences" and "conflicts".
|
||||
The syntax for doing so, though, may not be immediately clear. For
|
||||
instance, if we want to prevent any package from using ``%clang``, we can set:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- one_of: ['%clang', '@:']
|
||||
|
||||
Since only one of the requirements must hold, and ``@:`` is always true, the rule above is
|
||||
equivalent to a conflict. For "strong preferences" we need to substitute the ``one_of`` policy
|
||||
with ``any_of``.
|
||||
|
||||
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
@@ -647,8 +568,6 @@ manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
.. _assigning-package-attributes:
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
@@ -659,11 +578,10 @@ You can assign class-level attributes in the configuration:
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
package_attributes:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -237,7 +237,7 @@ for details):
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -4379,16 +4379,10 @@ implementation was selected for this build:
|
||||
elif "mvapich" in spec:
|
||||
configure_args.append("--with-mvapich")
|
||||
|
||||
It's also a bit more concise than satisfies.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``satisfies()`` method tests whether this spec has, at least, all the constraints of the argument spec,
|
||||
while ``in`` tests whether a spec or any of its dependencies satisfy the provided spec.
|
||||
|
||||
If the provided spec is anonymous (e.g., ":1.2:", "+shared") or has the
|
||||
same name as the spec being checked, then ``in`` works the same as
|
||||
``satisfies()``; however, use of ``satisfies()`` is more intuitive.
|
||||
It's also a bit more concise than satisfies. The difference between
|
||||
the two functions is that ``satisfies()`` tests whether spec
|
||||
constraints overlap at all, while ``in`` tests whether a spec or any
|
||||
of its dependencies satisfy the provided spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Architecture specifiers
|
||||
@@ -5290,7 +5284,7 @@ installed example.
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
Output showing the identification of each test part after runnig the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -5787,7 +5781,7 @@ with those implemented in the package itself.
|
||||
* - `Cxx
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
|
||||
- Compiles and runs several ``hello`` programs
|
||||
* - `Fortran
|
||||
* - `Fortan
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
|
||||
- Compiles and runs ``hello`` programs (``F`` and ``f90``)
|
||||
* - `Mpi
|
||||
@@ -6979,18 +6973,3 @@ you probably care most about are:
|
||||
You may also care about `license exceptions
|
||||
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
|
||||
e.g. ``Apache-2.0 WITH LLVM-exception``.
|
||||
|
||||
Many of the licenses that are currently in the spack repositories have been
|
||||
automatically determined. While this is great for bulk adding license
|
||||
information and is most likely correct, there are sometimes edge cases that
|
||||
require manual intervention. To determine which licenses are validated and
|
||||
which are not, there is the `checked_by` parameter in the license directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<license>", when="<when>", checked_by="<github username>")
|
||||
|
||||
When you have validated a github license, either when doing so explicitly or
|
||||
as part of packaging a new package, please set the `checked_by` parameter
|
||||
to your Github username to signal that the license has been manually
|
||||
verified.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -810,7 +810,7 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
...
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -17,7 +17,7 @@ experimental software separately from the built-in repository. Spack
|
||||
allows you to configure local repositories using either the
|
||||
``repos.yaml`` or the ``spack repo`` command.
|
||||
|
||||
A package repository is a directory structured like this::
|
||||
A package repository a directory structured like this::
|
||||
|
||||
repo/
|
||||
repo.yaml
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.0
|
||||
pytest==8.0.1
|
||||
isort==5.13.2
|
||||
black==24.2.0
|
||||
flake8==7.0.0
|
||||
mypy==1.8.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.18.1
|
||||
pygments==2.17.1
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
flake8==6.1.0
|
||||
mypy==1.7.0
|
||||
|
@@ -1,4 +1,4 @@
|
||||
.. Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -142,7 +142,7 @@ Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exists in this class of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
@@ -272,7 +272,7 @@ Internal Implementation
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the technical
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
@@ -295,7 +295,7 @@ infrastructure.
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. University of
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
@@ -305,7 +305,7 @@ contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastructure
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
2
lib/spack/env/cc
vendored
2
lib/spack/env/cc
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -171,7 +171,7 @@ def polite_path(components: Iterable[str]):
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_+.-]")
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
@@ -920,34 +920,28 @@ def get_filetype(path_name):
|
||||
return output.strip()
|
||||
|
||||
|
||||
def has_shebang(path):
|
||||
"""Returns whether a path has a shebang line. Returns False if the file cannot be opened."""
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
return f.read(2) == b"#!"
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def is_nonsymlink_exe_with_shebang(path):
|
||||
"""Returns whether the path is an executable regular file with a shebang. Returns False too
|
||||
when the path is a symlink to a script, and also when the file cannot be opened."""
|
||||
"""
|
||||
Returns whether the path is an executable script with a shebang.
|
||||
Return False when the path is a *symlink* to an executable script.
|
||||
"""
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
except OSError:
|
||||
return False
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
|
||||
# Should not be a symlink
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
return False
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
return False
|
||||
|
||||
# Should be executable
|
||||
if not st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
|
||||
# Should start with a shebang
|
||||
with open(path, "rb") as f:
|
||||
return f.read(2) == b"#!"
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
|
||||
return has_shebang(path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp_if_not_world_writable(path, group):
|
||||
@@ -1383,89 +1377,120 @@ def traverse_tree(
|
||||
yield (source_path, dest_path)
|
||||
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
return False, False, False
|
||||
|
||||
is_link = stat.S_ISLNK(lst.st_mode)
|
||||
|
||||
# Check whether file is a dir.
|
||||
if not is_link:
|
||||
is_dir = stat.S_ISDIR(lst.st_mode)
|
||||
return True, is_link, is_dir
|
||||
|
||||
# Check whether symlink points to a dir.
|
||||
try:
|
||||
st = os.stat(path)
|
||||
is_dir = stat.S_ISDIR(st.st_mode)
|
||||
except (IOError, OSError):
|
||||
# Dangling symlink (i.e. it lexists but not exists)
|
||||
is_dir = False
|
||||
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
class BaseDirectoryVisitor:
|
||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
"""Handle the non-symlink file at ``os.path.join(root, rel_path)``
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current file from ``root``
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current file from ``root``
|
||||
depth (int): depth of current file from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth) -> None:
|
||||
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``. Note: ``rel_path`` is
|
||||
the location of the symlink, not to what it is pointing to. The symlink may be dangling.
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``.
|
||||
Note: ``rel_path`` is the location of the symlink, not to what it is
|
||||
pointing to. The symlink may be dangling.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""Return True from this function to recurse into the directory at
|
||||
os.path.join(root, rel_path). Return False in order not to recurse further.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current directory from ``root``
|
||||
depth: depth of current directory from the ``root`` directory
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current directory from ``root``
|
||||
depth (int): depth of current directory from the ``root`` directory
|
||||
|
||||
Returns:
|
||||
bool: ``True`` when the directory should be recursed into. ``False`` when
|
||||
not"""
|
||||
return False
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
"""Return ``True`` to recurse into the symlinked directory and ``False`` in order not to.
|
||||
Note: ``rel_path`` is the path to the symlink itself. Following symlinked directories
|
||||
blindly can cause infinite recursion due to cycles.
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""Return ``True`` to recurse into the symlinked directory and ``False`` in
|
||||
order not to. Note: ``rel_path`` is the path to the symlink itself.
|
||||
Following symlinked directories blindly can cause infinite recursion due to
|
||||
cycles.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory
|
||||
|
||||
Returns:
|
||||
bool: ``True`` when the directory should be recursed into. ``False`` when
|
||||
not"""
|
||||
return False
|
||||
|
||||
def after_visit_dir(self, root: str, rel_path: str, depth: int) -> None:
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not called when
|
||||
``rel_path`` was not recursed into.
|
||||
def after_visit_dir(self, root, rel_path, depth):
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not
|
||||
called when ``rel_path`` was not recursed into.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current directory from ``root``
|
||||
depth: depth of current directory from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current directory from ``root``
|
||||
depth (int): depth of current directory from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
def after_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> None:
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not called when
|
||||
``rel_path`` was not recursed into.
|
||||
def after_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""Called after recursion into ``rel_path`` finished. This function is not
|
||||
called when ``rel_path`` was not recursed into.
|
||||
|
||||
Parameters:
|
||||
root: root directory
|
||||
rel_path: relative path to current symlink from ``root``
|
||||
depth: depth of current symlink from the ``root`` directory"""
|
||||
root (str): root directory
|
||||
rel_path (str): relative path to current symlink from ``root``
|
||||
depth (int): depth of current symlink from the ``root`` directory"""
|
||||
pass
|
||||
|
||||
|
||||
def visit_directory_tree(
|
||||
root: str, visitor: BaseDirectoryVisitor, rel_path: str = "", depth: int = 0
|
||||
):
|
||||
"""Recurses the directory root depth-first through a visitor pattern using the interface from
|
||||
:py:class:`BaseDirectoryVisitor`
|
||||
def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
||||
"""Recurses the directory root depth-first through a visitor pattern using the
|
||||
interface from :py:class:`BaseDirectoryVisitor`
|
||||
|
||||
Parameters:
|
||||
root: path of directory to recurse into
|
||||
visitor: what visitor to use
|
||||
rel_path: current relative path from the root
|
||||
depth: current depth from the root
|
||||
root (str): path of directory to recurse into
|
||||
visitor (BaseDirectoryVisitor): what visitor to use
|
||||
rel_path (str): current relative path from the root
|
||||
depth (str): current depth from the root
|
||||
"""
|
||||
dir = os.path.join(root, rel_path)
|
||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
|
||||
@@ -1473,19 +1498,26 @@ def visit_directory_tree(
|
||||
for f in dir_entries:
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from symlinks to files, and it is
|
||||
# possible to create a broken symlink to a directory (e.g. using os.symlink without
|
||||
# `target_is_directory=True`), invoking `isdir` on a symlink on Windows that is broken in
|
||||
# this manner will result in an error. In this case we can work around the issue by reading
|
||||
# the target and resolving the directory ourselves
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and evaluate file vs directory
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative to the CWD and therefore makes sense
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -1047,9 +1047,9 @@ def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context: str, base: type = BaseException) -> "GroupedExceptionForwarder":
|
||||
def forward(self, context: str) -> "GroupedExceptionForwarder":
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self, base)
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
self.exceptions.append((context, exc, tb))
|
||||
@@ -1072,18 +1072,15 @@ class GroupedExceptionForwarder:
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler, base: type):
|
||||
def __init__(self, context: str, handler: GroupedExceptionHandler):
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
self._base = base
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
if not issubclass(exc_type, self._base):
|
||||
return False
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -8,7 +8,7 @@
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
from typing import Callable, Dict, List, Optional, Tuple
|
||||
from collections import OrderedDict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, touch, traverse_tree
|
||||
@@ -51,32 +51,32 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
|
||||
- A list of merge conflicts in dst/
|
||||
"""
|
||||
|
||||
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
|
||||
def __init__(self, ignore=None):
|
||||
self.ignore = ignore if ignore is not None else lambda f: False
|
||||
|
||||
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
|
||||
# bit to the relative path in the destination dir.
|
||||
self.projection: str = ""
|
||||
# When mapping <src root> to <dst root>/<projection>, we need
|
||||
# to prepend the <projection> bit to the relative path in the
|
||||
# destination dir.
|
||||
self.projection = ""
|
||||
|
||||
# Two files f and g conflict if they are not os.path.samefile(f, g) and they are both
|
||||
# projected to the same destination file. These conflicts are not necessarily fatal, and
|
||||
# can be resolved or ignored. For example <prefix>/LICENSE or
|
||||
# <site-packages>/<namespace>/__init__.py conflicts can be ignored).
|
||||
self.file_conflicts: List[MergeConflict] = []
|
||||
# When a file blocks another file, the conflict can sometimes
|
||||
# be resolved / ignored (e.g. <prefix>/LICENSE or
|
||||
# or <site-packages>/<namespace>/__init__.py conflicts can be
|
||||
# ignored).
|
||||
self.file_conflicts = []
|
||||
|
||||
# When we have to create a dir where a file is, or a file where a dir is, we have fatal
|
||||
# errors, listed here.
|
||||
self.fatal_conflicts: List[MergeConflict] = []
|
||||
# When we have to create a dir where a file is, or a file
|
||||
# where a dir is, we have fatal errors, listed here.
|
||||
self.fatal_conflicts = []
|
||||
|
||||
# What directories we have to make; this is an ordered dict, so that we have a fast lookup
|
||||
# and can run mkdir in order.
|
||||
self.directories: Dict[str, Tuple[str, str]] = {}
|
||||
# What directories we have to make; this is an ordered set,
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
|
||||
# are guaranteed to be grouped by src_root in the order they were visited.
|
||||
self.files: Dict[str, Tuple[str, str]] = {}
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Register a directory if dst / rel_path is not blocked by a file or ignored.
|
||||
"""
|
||||
@@ -104,7 +104,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
self.directories[proj_rel_path] = (root, rel_path)
|
||||
return True
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Replace symlinked dirs with actual directories when possible in low depths,
|
||||
otherwise handle it as a file (i.e. we link to the symlink).
|
||||
@@ -136,56 +136,40 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
self.visit_file(root, rel_path, depth)
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
proj_rel_path = os.path.join(self.projection, rel_path)
|
||||
|
||||
if self.ignore(rel_path):
|
||||
pass
|
||||
elif proj_rel_path in self.directories:
|
||||
# Can't create a file where a dir is; fatal error
|
||||
src_a_root, src_a_relpath = self.directories[proj_rel_path]
|
||||
self.fatal_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(*self.directories[proj_rel_path]),
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
)
|
||||
elif proj_rel_path in self.files:
|
||||
# When two files project to the same path, they conflict iff they are distinct.
|
||||
# If they are the same (i.e. one links to the other), register regular files rather
|
||||
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
|
||||
# file, not the symlink.
|
||||
|
||||
src_a = os.path.join(*self.files[proj_rel_path])
|
||||
src_b = os.path.join(root, rel_path)
|
||||
|
||||
try:
|
||||
samefile = os.path.samefile(src_a, src_b)
|
||||
except OSError:
|
||||
samefile = False
|
||||
|
||||
if not samefile:
|
||||
# Distinct files produce a conflict.
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
|
||||
# In some cases we can resolve file-file conflicts
|
||||
src_a_root, src_a_relpath = self.files[proj_rel_path]
|
||||
self.file_conflicts.append(
|
||||
MergeConflict(
|
||||
dst=proj_rel_path,
|
||||
src_a=os.path.join(src_a_root, src_a_relpath),
|
||||
src_b=os.path.join(root, rel_path),
|
||||
)
|
||||
return
|
||||
|
||||
if not symlink:
|
||||
# Remove the link in favor of the actual file. The del is necessary to maintain the
|
||||
# order of the files dict, which is grouped by root.
|
||||
del self.files[proj_rel_path]
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
)
|
||||
else:
|
||||
# Otherwise register this file to be linked.
|
||||
self.files[proj_rel_path] = (root, rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth, symlink=True)
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
def set_projection(self, projection: str) -> None:
|
||||
def set_projection(self, projection):
|
||||
self.projection = os.path.normpath(projection)
|
||||
|
||||
# Todo, is this how to check in general for empty projection?
|
||||
@@ -213,19 +197,24 @@ def set_projection(self, projection: str) -> None:
|
||||
|
||||
|
||||
class DestinationMergeVisitor(BaseDirectoryVisitor):
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor and:
|
||||
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
|
||||
and:
|
||||
|
||||
a. registers additional conflicts when merging to the destination prefix
|
||||
b. removes redundant mkdir operations when directories already exist in the destination prefix.
|
||||
a. registers additional conflicts when merging
|
||||
to the destination prefix
|
||||
b. removes redundant mkdir operations when
|
||||
directories already exist in the destination
|
||||
prefix.
|
||||
|
||||
This also makes sure that symlinked directories in the target prefix will never be merged with
|
||||
This also makes sure that symlinked directories
|
||||
in the target prefix will never be merged with
|
||||
directories in the sources directories.
|
||||
"""
|
||||
|
||||
def __init__(self, source_merge_visitor: SourceMergeVisitor):
|
||||
def __init__(self, source_merge_visitor):
|
||||
self.src = source_merge_visitor
|
||||
|
||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
# If destination dir is a file in a src dir, add a conflict,
|
||||
# and don't traverse deeper
|
||||
if rel_path in self.src.files:
|
||||
@@ -247,7 +236,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
# don't descend into it.
|
||||
return False
|
||||
|
||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
"""
|
||||
Symlinked directories in the destination prefix should
|
||||
be seen as files; we should not accidentally merge
|
||||
@@ -273,7 +262,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
|
||||
# Never descend into symlinked target dirs.
|
||||
return False
|
||||
|
||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
# Can't merge a file if target already exists
|
||||
if rel_path in self.src.directories:
|
||||
src_a_root, src_a_relpath = self.src.directories[rel_path]
|
||||
@@ -291,7 +280,7 @@ def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
)
|
||||
)
|
||||
|
||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Treat symlinked files as ordinary files (without "dereferencing")
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -204,23 +204,17 @@ def color_when(value):
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False, zsh=False):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
self.zsh = zsh
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
result = f"\033[{s}m"
|
||||
|
||||
if self.enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
return result
|
||||
return r"\[\033[%sm\]" % s
|
||||
else:
|
||||
return "\033[%sm" % s
|
||||
else:
|
||||
return ""
|
||||
|
||||
@@ -267,11 +261,9 @@ def colorize(string, **kwargs):
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
zsh = kwargs.get("zsh", False)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -244,7 +244,7 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
+ lines
|
||||
+ ["as they might result in non-deterministic hashes"]
|
||||
)
|
||||
except (TypeError, AttributeError):
|
||||
except TypeError:
|
||||
details = []
|
||||
|
||||
errors.append(error_cls(summary=error_msg, details=details))
|
||||
@@ -292,6 +292,12 @@ def _avoid_mismatched_variants(error_cls):
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
for pkg_name in packages_yaml:
|
||||
# 'all:' must be more forgiving, since it is setting defaults for everything
|
||||
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
|
||||
@@ -311,7 +317,7 @@ def _avoid_mismatched_variants(error_cls):
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
)
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
errors.append(make_error(preferences, summary))
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
@@ -323,41 +329,11 @@ def _avoid_mismatched_variants(error_cls):
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
f"to the invalid value '{str(variant)}'"
|
||||
)
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
errors.append(make_error(preferences, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _wrongly_named_spec(error_cls):
|
||||
"""Warns if the wrong name is used for an external spec"""
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
for pkg_name in packages_yaml:
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
|
||||
externals = packages_yaml[pkg_name].get("externals", [])
|
||||
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
|
||||
for entry in externals:
|
||||
spec = spack.spec.Spec(entry["spec"])
|
||||
regular_pkg_is_wrong = not is_virtual and pkg_name != spec.name
|
||||
virtual_pkg_is_wrong = is_virtual and not any(
|
||||
p.name == spec.name for p in spack.repo.PATH.providers_for(pkg_name)
|
||||
)
|
||||
if regular_pkg_is_wrong or virtual_pkg_is_wrong:
|
||||
summary = f"Wrong external spec detected for '{pkg_name}': {spec}"
|
||||
errors.append(_make_config_error(entry, summary, error_cls=error_cls))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
package_directives = AuditClass(
|
||||
group="packages",
|
||||
@@ -691,8 +667,8 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for trigger, conflicts in pkg_cls.conflicts.items():
|
||||
for conflict, _ in conflicts:
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
for trigger, _ in triggers:
|
||||
vrn = spack.spec.Spec(conflict)
|
||||
try:
|
||||
vrn.constrain(trigger)
|
||||
@@ -718,21 +694,25 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
)
|
||||
|
||||
# Check "depends_on" directive
|
||||
for trigger in pkg_cls.dependencies:
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="depends_on", error_cls=error_cls
|
||||
for _, triggers in pkg_cls.dependencies.items():
|
||||
triggers = list(triggers)
|
||||
for trigger in list(triggers):
|
||||
vrn = spack.spec.Spec(trigger)
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="depends_on", error_cls=error_cls
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Check "provides" directive
|
||||
for when_spec in pkg_cls.provided:
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, when_spec, directive="provides", error_cls=error_cls
|
||||
# Check "patch" directive
|
||||
for _, triggers in pkg_cls.provided.items():
|
||||
triggers = [spack.spec.Spec(x) for x in triggers]
|
||||
for vrn in triggers:
|
||||
errors.extend(
|
||||
_analyze_variants_in_directive(
|
||||
pkg_cls, vrn, directive="patch", error_cls=error_cls
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Check "resource" directive
|
||||
for vrn in pkg_cls.resources:
|
||||
@@ -746,102 +726,47 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
|
||||
|
||||
@package_directives
|
||||
def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
"""Reports issues with 'depends_on' directives.
|
||||
|
||||
Issues might be unknown dependencies, unknown variants or variant values, or declaration
|
||||
of nested dependencies.
|
||||
"""
|
||||
def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
for when, deps_by_name in pkg_cls.dependencies.items():
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
nested_dependencies = dep.spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||
ndir = len(nested_dependencies) + 1
|
||||
details = [
|
||||
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = pkg_name + ": unknown package '{0}' in " "'depends_on' directive".format(
|
||||
dependency_name
|
||||
)
|
||||
details = [" in " + filename]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
def check_virtual_with_variants(spec, msg):
|
||||
if not spec.virtual or not spec.variants:
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
check_virtual_with_variants(dep.spec, "virtual dependency cannot have variants")
|
||||
check_virtual_with_variants(dep.spec, "virtual when= spec cannot have variants")
|
||||
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dep_name):
|
||||
continue
|
||||
|
||||
# check for unknown dependencies
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
summary = f"{pkg_name}: unknown package '{dep_name}' in 'depends_on' directive"
|
||||
details = [f" in {filename}"]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
# Check for self-referential specs similar to:
|
||||
#
|
||||
# depends_on("foo@X.Y", when="^foo+bar")
|
||||
#
|
||||
# That would allow clingo to choose whether to have foo@X.Y+bar in the graph.
|
||||
problematic_edges = [
|
||||
x for x in when.edges_to_dependencies(dep_name) if not x.virtuals
|
||||
]
|
||||
if problematic_edges and not dep.patches:
|
||||
summary = (
|
||||
f"{pkg_name}: dependency on '{dep.spec}' when '{when}' is self-referential"
|
||||
)
|
||||
details = [
|
||||
(
|
||||
f" please specify better using '^[virtuals=...] {dep_name}', or "
|
||||
f"substitute with an equivalent condition on '{pkg_name}'"
|
||||
),
|
||||
f" in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
continue
|
||||
|
||||
# check variants
|
||||
dependency_variants = dep.spec.variants
|
||||
for _, dependency_edge in dependency_data.items():
|
||||
dependency_variants = dependency_edge.spec.variants
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
except Exception as e:
|
||||
summary = (
|
||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||
pkg_name + ": wrong variant used for a "
|
||||
"dependency in a 'depends_on' directive"
|
||||
)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = (
|
||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||
)
|
||||
error_msg += f" in package '{dep_name}'"
|
||||
error_msg = "the variant {0} does not " "exist".format(error_msg)
|
||||
error_msg += " in package '" + dependency_name + "'"
|
||||
|
||||
errors.append(
|
||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||
error_cls(summary=summary, details=[error_msg, "in " + filename])
|
||||
)
|
||||
|
||||
return errors
|
||||
@@ -908,17 +833,14 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dep_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.append(dep.spec)
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
@@ -990,53 +912,39 @@ def _named_specs_in_when_arguments(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
def _refers_to_pkg(when):
|
||||
when_spec = spack.spec.Spec(when)
|
||||
return when_spec.name is None or when_spec.name == pkg_name
|
||||
|
||||
def _error_items(when_dict):
|
||||
for when, elts in when_dict.items():
|
||||
if not _refers_to_pkg(when):
|
||||
yield when, elts, [f"using '{when}', should be '^{when}'"]
|
||||
|
||||
def _extracts_errors(triggers, summary):
|
||||
_errors = []
|
||||
for trigger in list(triggers):
|
||||
if not _refers_to_pkg(trigger):
|
||||
when_spec = spack.spec.Spec(trigger)
|
||||
if when_spec.name is not None and when_spec.name != pkg_name:
|
||||
details = [f"using '{trigger}', should be '^{trigger}'"]
|
||||
_errors.append(error_cls(summary=summary, details=details))
|
||||
return _errors
|
||||
|
||||
for when, dnames, details in _error_items(pkg_cls.dependencies):
|
||||
errors.extend(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition for '{dname}' dependency", details)
|
||||
for dname in dnames
|
||||
)
|
||||
for dname, triggers in pkg_cls.dependencies.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, providers, details in _error_items(pkg_cls.provided):
|
||||
errors.extend(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition for '{provided}' virtual", details)
|
||||
for provided in providers
|
||||
)
|
||||
for provided, triggers in pkg_cls.provided.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, requirements, details in _error_items(pkg_cls.requirements):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'requires' directive", details)
|
||||
)
|
||||
for _, triggers in pkg_cls.requirements.items():
|
||||
triggers = [when_spec for when_spec, _, _ in triggers]
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, _, details in _error_items(pkg_cls.patches):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'patch' directives", details)
|
||||
)
|
||||
triggers = list(pkg_cls.patches)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, _, details in _error_items(pkg_cls.resources):
|
||||
errors.append(
|
||||
error_cls(f"{pkg_name}: wrong 'when=' condition in 'resource' directives", details)
|
||||
)
|
||||
triggers = list(pkg_cls.resources)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
@@ -1,10 +1,11 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
import errno
|
||||
import hashlib
|
||||
import io
|
||||
import itertools
|
||||
@@ -22,8 +23,9 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
@@ -48,7 +50,6 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.util.archive
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
@@ -67,10 +68,7 @@
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
#: The build cache layout version that this version of Spack creates.
|
||||
#: Version 2: includes parent directories of the package prefix in the tarball
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 1
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
@@ -232,11 +230,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
)
|
||||
return
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any, in_buildcache=any)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
spec_list = db.query_local(installed=False, in_buildcache=True)
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
dag_hash = indexed_spec.dag_hash()
|
||||
@@ -1132,46 +1126,195 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gzip_compressed_tarfile(path):
|
||||
"""Create a reproducible, compressed tarfile"""
|
||||
# Create gzip compressed tarball of the install prefix
|
||||
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
|
||||
# If the filename="" is dropped, Python will use fileobj.name instead.
|
||||
# This should effectively mimick `gzip --no-name`.
|
||||
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
|
||||
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
|
||||
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
|
||||
name="", mode="w", fileobj=outer_checksum
|
||||
) as tar:
|
||||
yield tar, inner_checksum, outer_checksum
|
||||
|
||||
|
||||
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
|
||||
"""Compute tarfile entry name as the relative path from the (system) root."""
|
||||
return _path(*_path(absolute_path).parts[1:]).as_posix()
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
|
||||
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
|
||||
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
|
||||
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
|
||||
|
||||
Args:
|
||||
tar: tarfile object to add files to
|
||||
prefix: absolute install prefix of spec"""
|
||||
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
|
||||
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
|
||||
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
|
||||
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
|
||||
|
||||
try: # skip buildinfo file if it exists
|
||||
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
|
||||
skip = lambda entry: stat_key(entry.stat(follow_symlinks=False)) in files_to_skip
|
||||
except OSError:
|
||||
skip = lambda entry: False
|
||||
files_to_skip = []
|
||||
|
||||
spack.util.archive.reproducible_tarfile_from_prefix(
|
||||
tar,
|
||||
prefix,
|
||||
# Spack <= 0.21 did not include parent directories, leading to issues when tarballs are
|
||||
# used in runtimes like AWS lambda.
|
||||
include_parent_directories=True,
|
||||
skip=skip,
|
||||
)
|
||||
dir_stack = [prefix]
|
||||
while dir_stack:
|
||||
dir = dir_stack.pop()
|
||||
|
||||
# Add the dir before its contents
|
||||
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
|
||||
dir_info.type = tarfile.DIRTYPE
|
||||
dir_info.mode = 0o755
|
||||
tar.addfile(dir_info)
|
||||
|
||||
# Sort by name: reproducible & improves compression
|
||||
with os.scandir(dir) as it:
|
||||
entries = sorted(it, key=lambda entry: entry.name)
|
||||
|
||||
new_dirs = []
|
||||
for entry in entries:
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
new_dirs.append(entry.path)
|
||||
continue
|
||||
|
||||
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
|
||||
|
||||
s = entry.stat(follow_symlinks=False)
|
||||
|
||||
# Skip existing binary distribution files.
|
||||
id = stat_key(s)
|
||||
if id in files_to_skip:
|
||||
continue
|
||||
|
||||
# Normalize the mode
|
||||
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
|
||||
|
||||
if entry.is_symlink():
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
# Deduplicate hardlinks
|
||||
if s.st_nlink > 1:
|
||||
if id in hardlink_to_tarinfo_name:
|
||||
file_info.type = tarfile.LNKTYPE
|
||||
file_info.linkname = hardlink_to_tarinfo_name[id]
|
||||
tar.addfile(file_info)
|
||||
continue
|
||||
hardlink_to_tarinfo_name[id] = file_info.name
|
||||
|
||||
# If file not yet seen, copy it.
|
||||
file_info.type = tarfile.REGTYPE
|
||||
file_info.size = s.st_size
|
||||
|
||||
with open(entry.path, "rb") as f:
|
||||
tar.addfile(file_info, f)
|
||||
|
||||
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
|
||||
|
||||
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
|
||||
myfileobj = None
|
||||
|
||||
def __init__(self, fileobj, algorithm=hashlib.sha256):
|
||||
self.fileobj = fileobj
|
||||
self.hasher = algorithm()
|
||||
self.length = 0
|
||||
|
||||
def hexdigest(self):
|
||||
return self.hasher.hexdigest()
|
||||
|
||||
def write(self, data):
|
||||
if isinstance(data, (bytes, bytearray)):
|
||||
length = len(data)
|
||||
else:
|
||||
data = memoryview(data)
|
||||
length = data.nbytes
|
||||
|
||||
if length > 0:
|
||||
self.fileobj.write(data)
|
||||
self.hasher.update(data)
|
||||
|
||||
self.length += length
|
||||
|
||||
return length
|
||||
|
||||
def read(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read() on write-only object")
|
||||
|
||||
def read1(self, size=-1):
|
||||
raise OSError(errno.EBADF, "read1() on write-only object")
|
||||
|
||||
def peek(self, n):
|
||||
raise OSError(errno.EBADF, "peek() on write-only object")
|
||||
|
||||
@property
|
||||
def closed(self):
|
||||
return self.fileobj is None
|
||||
|
||||
def close(self):
|
||||
fileobj = self.fileobj
|
||||
if fileobj is None:
|
||||
return
|
||||
self.fileobj.close()
|
||||
self.fileobj = None
|
||||
|
||||
def flush(self):
|
||||
self.fileobj.flush()
|
||||
|
||||
def fileno(self):
|
||||
return self.fileobj.fileno()
|
||||
|
||||
def rewind(self):
|
||||
raise OSError("Can't rewind while computing checksum")
|
||||
|
||||
def readable(self):
|
||||
return False
|
||||
|
||||
def writable(self):
|
||||
return True
|
||||
|
||||
def seekable(self):
|
||||
return True
|
||||
|
||||
def tell(self):
|
||||
return self.fileobj.tell()
|
||||
|
||||
def seek(self, offset, whence=io.SEEK_SET):
|
||||
# In principle forward seek is possible with b"0" padding,
|
||||
# but this is not implemented.
|
||||
if offset == 0 and whence == io.SEEK_CUR:
|
||||
return
|
||||
raise OSError("Can't seek while computing checksum")
|
||||
|
||||
def readline(self, size=-1):
|
||||
raise OSError(errno.EBADF, "readline() on write-only object")
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
|
||||
tar,
|
||||
inner_checksum,
|
||||
outer_checksum,
|
||||
):
|
||||
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(
|
||||
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
|
||||
)
|
||||
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
|
||||
tarinfo.type = tarfile.REGTYPE
|
||||
tarinfo.size = len(bstring)
|
||||
tarinfo.mode = 0o644
|
||||
@@ -1458,14 +1601,14 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
||||
return spec_dict, layout_version
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=None):
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
unsigned: if ``True`` or ``False`` override the mirror signature verification defaults
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
@@ -1486,9 +1629,7 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1506,16 +1647,8 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
mirror_urls = try_first + try_next
|
||||
|
||||
# TODO: turn `mirrors_for_spec` into a list of Mirror instances, instead of doing that here.
|
||||
def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
mirrors = try_first + try_next
|
||||
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
@@ -1524,17 +1657,14 @@ def fetch_url_to_mirror(url):
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for try_signed in (True, False):
|
||||
for mirror in mirrors:
|
||||
# Override mirror's default if
|
||||
currently_unsigned = unsigned if unsigned is not None else not mirror.signed
|
||||
|
||||
# If it's an OCI index, do things differently, since we cannot compose URLs.
|
||||
fetch_url = mirror.fetch_url
|
||||
parsed = urllib.parse.urlparse(mirror)
|
||||
|
||||
# TODO: refactor this to some "nice" place.
|
||||
if fetch_url.startswith("oci://"):
|
||||
ref = spack.oci.image.ImageReference.from_string(
|
||||
fetch_url[len("oci://") :]
|
||||
).with_tag(spack.oci.image.default_tag(spec))
|
||||
if parsed.scheme == "oci":
|
||||
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
|
||||
spack.oci.image.default_tag(spec)
|
||||
)
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
@@ -1571,7 +1701,7 @@ def fetch_url_to_mirror(url):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
@@ -1593,16 +1723,13 @@ def fetch_url_to_mirror(url):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": False,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
else:
|
||||
ext = "json.sig" if try_signed else "json"
|
||||
specfile_path = url_util.join(
|
||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, specfile_prefix
|
||||
)
|
||||
specfile_path = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, specfile_prefix)
|
||||
specfile_url = f"{specfile_path}.{ext}"
|
||||
spackfile_url = url_util.join(fetch_url, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
spackfile_url = url_util.join(mirror, BUILD_CACHE_RELATIVE_PATH, tarball)
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
@@ -1615,21 +1742,21 @@ def fetch_url_to_mirror(url):
|
||||
except InvalidMetadataFile as e:
|
||||
tty.warn(
|
||||
f"Ignoring binary package for {spec.name}/{spec.dag_hash()[:7]} "
|
||||
f"from {fetch_url} due to invalid metadata file: {e}"
|
||||
f"from {mirror} due to invalid metadata file: {e}"
|
||||
)
|
||||
local_specfile_stage.destroy()
|
||||
continue
|
||||
|
||||
if try_signed and not currently_unsigned:
|
||||
if try_signed and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn(f"Failed to verify: {specfile_url}")
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if currently_unsigned or signature_verified or not try_signed:
|
||||
if unsigned or signature_verified or not try_signed:
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
@@ -1652,7 +1779,6 @@ def fetch_url_to_mirror(url):
|
||||
"tarball_stage": tarball_stage,
|
||||
"specfile_stage": local_specfile_stage,
|
||||
"signature_verified": signature_verified,
|
||||
"signature_required": not currently_unsigned,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
@@ -1851,7 +1977,7 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, ".spack")
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
@@ -1871,7 +1997,7 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
|
||||
else:
|
||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||
|
||||
if signature_required:
|
||||
if not unsigned:
|
||||
if os.path.exists("%s.asc" % specfile_path):
|
||||
suppress = config.get("config:suppress_gpg_warnings", False)
|
||||
try:
|
||||
@@ -1900,12 +2026,11 @@ def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool,
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Yield all members of tarfile that start with given prefix, and strip that prefix (including
|
||||
symlinks)"""
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Only yield members in the package prefix.
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
@@ -1913,17 +2038,15 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
if not result:
|
||||
continue
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
yield m
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
@@ -1949,8 +2072,7 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
bchecksum = spec_dict["binary_cache_checksum"]
|
||||
|
||||
filename = download_result["tarball_stage"].save_filename
|
||||
signature_verified: bool = download_result["signature_verified"]
|
||||
signature_required: bool = download_result["signature_required"]
|
||||
signature_verified = download_result["signature_verified"]
|
||||
tmpdir = None
|
||||
|
||||
if layout_version == 0:
|
||||
@@ -1959,14 +2081,12 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, signature_required, bchecksum
|
||||
)
|
||||
tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
elif 1 <= layout_version <= 2:
|
||||
elif layout_version == 1:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
@@ -1974,10 +2094,9 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if signature_required and not signature_verified:
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option, "
|
||||
"or configure the mirror with signed: false."
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
@@ -1994,10 +2113,8 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
try:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
tar.extractall(
|
||||
path=spec.prefix,
|
||||
members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar)),
|
||||
)
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
@@ -2032,47 +2149,20 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Find the lowest `binary_distribution` file (hard-coded forward slash is on purpose).
|
||||
binary_distribution = min(
|
||||
(
|
||||
e.name
|
||||
for e in tar.getmembers()
|
||||
if e.isfile() and e.name.endswith(".spack/binary_distribution")
|
||||
),
|
||||
key=len,
|
||||
default=None,
|
||||
)
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if binary_distribution is None:
|
||||
raise ValueError("Tarball is not a Spack package, missing binary_distribution file")
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
pkg_path = pathlib.PurePosixPath(binary_distribution).parent.parent
|
||||
|
||||
# Even the most ancient Spack version has required to list the dir of the package itself, so
|
||||
# guard against broken tarballs where `path.parent.parent` is empty.
|
||||
if pkg_path == pathlib.PurePosixPath():
|
||||
raise ValueError("Invalid tarball, missing package prefix dir")
|
||||
|
||||
pkg_prefix = str(pkg_path)
|
||||
|
||||
# Ensure all tar entries are in the pkg_prefix dir, and if they're not, they should be parent
|
||||
# dirs of it.
|
||||
has_prefix = False
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
stripped = member.name.rstrip("/")
|
||||
if not (
|
||||
stripped.startswith(pkg_prefix) or member.isdir() and pkg_prefix.startswith(stripped)
|
||||
):
|
||||
raise ValueError(f"Tarball contains file {stripped} outside of prefix {pkg_prefix}")
|
||||
if member.isdir() and stripped == pkg_prefix:
|
||||
has_prefix = True
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
# This is technically not required, but let's be defensive about the existence of the package
|
||||
# prefix dir.
|
||||
if not has_prefix:
|
||||
raise ValueError(f"Tarball does not contain a common prefix {pkg_prefix}")
|
||||
|
||||
return pkg_prefix
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
@@ -2119,7 +2209,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -16,7 +16,6 @@
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
@@ -207,19 +206,17 @@ def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
if platform == "darwin":
|
||||
# Add a proper compiler hint to the root spec. We use GCC for
|
||||
# everything but MacOS and Windows.
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
elif platform == "linux":
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
@@ -147,7 +147,7 @@ def _add_compilers_if_missing() -> None:
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user