Compare commits
1 Commits
package-me
...
fix-linux-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc0fed4539 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
36
.github/workflows/bootstrap.yml
vendored
36
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -42,8 +42,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -80,8 +80,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -145,8 +145,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -158,13 +158,13 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -265,7 +265,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -284,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -303,8 +302,8 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -317,11 +316,10 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -335,13 +333,13 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
|
||||
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -2,6 +2,6 @@ black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.6.1
|
||||
mypy==1.5.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
||||
|
||||
18
.github/workflows/unit_tests.yaml
vendored
18
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
@@ -45,13 +45,9 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
@@ -98,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
@@ -137,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
@@ -189,9 +185,9 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11"]
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -63,10 +62,7 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, also Q&A.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
||||
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
github-actions-v0.3: true
|
||||
spack-install: true
|
||||
|
||||
@@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("autoconf", type="build", when="@master")
|
||||
depends_on("automake", type="build", when="@master")
|
||||
depends_on("libtool", type="build", when="@master")
|
||||
depends_on('autoconf', type='build', when='@master')
|
||||
depends_on('automake', type='build', when='@master')
|
||||
depends_on('libtool', type='build', when='@master')
|
||||
|
||||
It is typically redundant to list the ``m4`` macro processor package as a
|
||||
dependency, since ``autoconf`` already depends on it.
|
||||
@@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
.. code-block:: python
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
which('bash')('autogen.sh')
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
@@ -186,9 +186,9 @@ To opt out of this feature, use the following setting:
|
||||
To enable it conditionally on different architectures, define a property and
|
||||
make the package depend on ``gnuconfig`` as a build dependency:
|
||||
|
||||
.. code-block:: python
|
||||
.. code-block
|
||||
|
||||
depends_on("gnuconfig", when="@1.0:")
|
||||
depends_on('gnuconfig', when='@1.0:')
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
@@ -230,7 +230,7 @@ version, this can be done like so:
|
||||
|
||||
@property
|
||||
def force_autoreconf(self):
|
||||
return self.version == Version("1.2.3")
|
||||
return self.version == Version('1.2.3')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Finding configure flags
|
||||
@@ -278,22 +278,13 @@ function like so:
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
if '+mpi' in self.spec:
|
||||
args.append('--enable-mpi')
|
||||
else:
|
||||
args.append("--disable-mpi")
|
||||
args.append('--disable-mpi')
|
||||
|
||||
return args
|
||||
|
||||
|
||||
Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_disable>` helper:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return [self.enable_or_disable("mpi")]
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
requested. This is important, as many Autotools packages will enable
|
||||
options by default if the dependencies are found, and disable them
|
||||
@@ -304,11 +295,9 @@ and `here <https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Automagic_depe
|
||||
for a rationale as to why these so-called "automagic" dependencies
|
||||
are a problem.
|
||||
|
||||
.. note::
|
||||
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Helper functions
|
||||
@@ -319,8 +308,6 @@ You may have noticed that most of the Autotools flags are of the form
|
||||
``--without-baz``. Since these flags are so common, Spack provides a
|
||||
couple of helper functions to make your life easier.
|
||||
|
||||
.. _autotools_enable_or_disable:
|
||||
|
||||
"""""""""""""""""
|
||||
enable_or_disable
|
||||
"""""""""""""""""
|
||||
@@ -332,11 +319,11 @@ typically used to enable or disable some feature within the package.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"memchecker",
|
||||
'memchecker',
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
description='Memchecker support for debugging [degrades performance]'
|
||||
)
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
config_args.extend(self.enable_or_disable('memchecker'))
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
@@ -356,15 +343,15 @@ the ``with_or_without`` method.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"schedulers",
|
||||
'schedulers',
|
||||
values=disjoint_sets(
|
||||
("auto",), ("alps", "lsf", "tm", "slurm", "sge", "loadleveler")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
description="List of schedulers for which support is enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if not spec.satisfies("schedulers=auto"):
|
||||
config_args.extend(self.with_or_without("schedulers"))
|
||||
if 'schedulers=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('schedulers'))
|
||||
|
||||
In this example, specifying the variant ``schedulers=slurm,sge`` will
|
||||
generate the following configuration options:
|
||||
@@ -389,16 +376,16 @@ generated, using the ``activation_value`` argument to
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"fabrics",
|
||||
'fabrics',
|
||||
values=disjoint_sets(
|
||||
("auto",), ("psm", "psm2", "verbs", "mxm", "ucx", "libfabric")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
description="List of fabrics that are enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if not spec.satisfies("fabrics=auto"):
|
||||
config_args.extend(self.with_or_without("fabrics",
|
||||
activation_value="prefix"))
|
||||
if 'fabrics=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('fabrics',
|
||||
activation_value='prefix'))
|
||||
|
||||
``activation_value`` accepts a callable that generates the configure
|
||||
parameter value given the variant value; but the special value
|
||||
@@ -422,16 +409,16 @@ When Spack variants and configure flags do not correspond one-to-one, the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.enable_or_disable("debug-tools", variant="debug_tools")
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
|
||||
|
||||
Or when one variant controls multiple flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.with_or_without("memchecker", variant="debug_tools")
|
||||
config_args += self.with_or_without("profiler", variant="debug_tools")
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.with_or_without('memchecker', variant='debug_tools')
|
||||
config_args += self.with_or_without('profiler', variant='debug_tools')
|
||||
|
||||
|
||||
""""""""""""""""""""
|
||||
@@ -445,8 +432,8 @@ For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("profiler", when="@2.0:")
|
||||
config_args += self.with_or_without("profiler")
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler')
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
@@ -465,10 +452,10 @@ the variant values require atypical behavior.
|
||||
def with_or_without_verbs(self, activated):
|
||||
# Up through version 1.6, this option was named --with-openib.
|
||||
# In version 1.7, it was renamed to be --with-verbs.
|
||||
opt = "verbs" if self.spec.satisfies("@1.7:") else "openib"
|
||||
opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib'
|
||||
if not activated:
|
||||
return f"--without-{opt}"
|
||||
return f"--with-{opt}={self.spec['rdma-core'].prefix}"
|
||||
return '--without-{0}'.format(opt)
|
||||
return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix)
|
||||
|
||||
Defining ``with_or_without_verbs`` overrides the behavior of a
|
||||
``fabrics=verbs`` variant, changing the configure-time option to
|
||||
@@ -492,7 +479,7 @@ do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configure_directory = "src"
|
||||
configure_directory = 'src'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Building out of source
|
||||
@@ -504,7 +491,7 @@ This can be done using the ``build_directory`` variable:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "spack-build"
|
||||
build_directory = 'spack-build'
|
||||
|
||||
By default, Spack will build the package in the same directory that
|
||||
contains the ``configure`` script
|
||||
@@ -527,8 +514,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -87,7 +87,7 @@ A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
@@ -95,25 +95,25 @@ A typical usage of these methods may look something like this:
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if self.spec.satisfies("+mpi"):
|
||||
entries.append(cmake_cache_option("FOO_MPI", True, "enable mpi"))
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option("FOO_MPI", False, "disable mpi"))
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append("#Entries for build options")
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = self.spec.satisfies("+bar")
|
||||
entries.append(cmake_cache_option("FOO_BAR", bar_on, "toggle bar"))
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append("#Entries for dependencies")
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec["blas"].name == "baz": # baz is our blas provider
|
||||
entries.append(cmake_cache_string("FOO_BLAS", "baz", "Use baz"))
|
||||
entries.append(cmake_cache_path("BAZ_PREFIX", self.spec["baz"].prefix))
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
||||
@@ -54,8 +54,8 @@ to terminate such build attempts with a suitable message:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
conflicts("cuda_arch=none", when="+cuda",
|
||||
msg="CUDA architecture is required")
|
||||
conflicts('cuda_arch=none', when='+cuda',
|
||||
msg='CUDA architecture is required')
|
||||
|
||||
Similarly, if your software does not support all versions of the property,
|
||||
you could add ``conflicts`` to your package for those versions. For example,
|
||||
@@ -66,13 +66,13 @@ custom message should a user attempt such a build:
|
||||
.. code-block:: python
|
||||
|
||||
unsupported_cuda_archs = [
|
||||
"10", "11", "12", "13",
|
||||
"20", "21",
|
||||
"30", "32", "35", "37"
|
||||
'10', '11', '12', '13',
|
||||
'20', '21',
|
||||
'30', '32', '35', '37'
|
||||
]
|
||||
for value in unsupported_cuda_archs:
|
||||
conflicts(f"cuda_arch={value}", when="+cuda",
|
||||
msg=f"CUDA architecture {value} is not supported")
|
||||
conflicts('cuda_arch={0}'.format(value), when='+cuda',
|
||||
msg='CUDA architecture {0} is not supported'.format(value))
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
@@ -107,16 +107,16 @@ class of your package. For example, you can add it to your
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if spec.satisfies("+cuda"):
|
||||
if '+cuda' in spec:
|
||||
# Set up the cuda macros needed by the build
|
||||
args.append("-DWITH_CUDA=ON")
|
||||
cuda_arch_list = spec.variants["cuda_arch"].value
|
||||
args.append('-DWITH_CUDA=ON')
|
||||
cuda_arch_list = spec.variants['cuda_arch'].value
|
||||
cuda_arch = cuda_arch_list[0]
|
||||
if cuda_arch != "none":
|
||||
args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch}")
|
||||
if cuda_arch != 'none':
|
||||
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch))
|
||||
else:
|
||||
# Ensure build with cuda is disabled
|
||||
args.append("-DWITH_CUDA=OFF")
|
||||
args.append('-DWITH_CUDA=OFF')
|
||||
...
|
||||
return args
|
||||
|
||||
@@ -125,7 +125,7 @@ You will need to customize options as needed for your build.
|
||||
|
||||
This example also illustrates how to check for the ``cuda`` variant using
|
||||
``self.spec`` and how to retrieve the ``cuda_arch`` variant's value, which
|
||||
is a list, using ``self.spec.variants["cuda_arch"].value``.
|
||||
is a list, using ``self.spec.variants['cuda_arch'].value``.
|
||||
|
||||
With over 70 packages using ``CudaPackage`` as of January 2021 there are
|
||||
lots of examples to choose from to get more ideas for using this package.
|
||||
|
||||
@@ -57,13 +57,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
phases = ['configure', 'build', 'install']
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
phases = ['bootstrap', 'build', 'install']
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
@@ -78,7 +78,7 @@ If we look at ``perl``, we see that it defines a ``configure`` method:
|
||||
.. code-block:: python
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
configure = Executable("./Configure")
|
||||
configure = Executable('./Configure')
|
||||
configure(*self.configure_args())
|
||||
|
||||
There is also a corresponding ``configure_args`` function that handles
|
||||
@@ -92,7 +92,7 @@ phases are pretty simple:
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
The ``cmake`` package looks very similar, but with a ``bootstrap``
|
||||
function instead of ``configure``:
|
||||
@@ -100,14 +100,14 @@ function instead of ``configure``:
|
||||
.. code-block:: python
|
||||
|
||||
def bootstrap(self, spec, prefix):
|
||||
bootstrap = Executable("./bootstrap")
|
||||
bootstrap = Executable('./bootstrap')
|
||||
bootstrap(*self.bootstrap_args())
|
||||
|
||||
def build(self, spec, prefix):
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
Again, there is a ``boostrap_args`` function that determines the
|
||||
correct bootstrap flags to use.
|
||||
@@ -128,16 +128,16 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
if '+cpanm' in spec:
|
||||
with working_dir(join_path('cpanm', 'cpanm')):
|
||||
perl = spec['perl'].command
|
||||
perl('Makefile.PL')
|
||||
make()
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -174,10 +174,10 @@ In the ``perl`` package, we can see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("build")
|
||||
@run_after('build')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
make("test")
|
||||
make('test')
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
@@ -189,7 +189,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
@on_package_attributes(run_tests=True)
|
||||
|
||||
works as expected. However, if you reverse the ordering:
|
||||
@@ -197,7 +197,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
.. code-block:: python
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
|
||||
the tests will always be run regardless of whether or not
|
||||
``--test=root`` is requested. See https://github.com/spack/spack/issues/3833
|
||||
|
||||
@@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("gmake", type="build")
|
||||
depends_on('gmake', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
env["PREFIX"] = prefix
|
||||
env["BLASLIB"] = spec["blas"].libs.ld_flags
|
||||
env['PREFIX'] = prefix
|
||||
env['BLASLIB'] = spec['blas'].libs.ld_flags
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
@@ -113,7 +113,7 @@ you can do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["CC=cc"]
|
||||
build_targets = ['CC=cc']
|
||||
|
||||
|
||||
If you do need access to the spec, you can create a property like so:
|
||||
@@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so:
|
||||
spec = self.spec
|
||||
|
||||
return [
|
||||
"CC=cc",
|
||||
f"BLASLIB={spec['blas'].libs.ld_flags}",
|
||||
'CC=cc',
|
||||
'BLASLIB={0}'.format(spec['blas'].libs.ld_flags),
|
||||
]
|
||||
|
||||
|
||||
@@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example:
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile = FileFilter('Makefile')
|
||||
|
||||
makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}")
|
||||
makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}")
|
||||
makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}")
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc)
|
||||
makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx)
|
||||
makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77)
|
||||
makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc)
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
@@ -181,16 +181,16 @@ well for storing variables:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = {
|
||||
"CC": "cc",
|
||||
"MAKE": "make",
|
||||
'CC': 'cc',
|
||||
'MAKE': 'make',
|
||||
}
|
||||
|
||||
if spec.satisfies("+blas"):
|
||||
config["BLAS_LIBS"] = spec["blas"].libs.joined()
|
||||
if '+blas' in spec:
|
||||
config['BLAS_LIBS'] = spec['blas'].libs.joined()
|
||||
|
||||
with open("make.inc", "w") as inc:
|
||||
with open('make.inc', 'w') as inc:
|
||||
for key in config:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
inc.write('{0} = {1}\n'.format(key, config[key]))
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
@@ -204,14 +204,14 @@ them in a list:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = [
|
||||
f"INSTALL_DIR = {prefix}",
|
||||
"INCLUDE_DIR = $(INSTALL_DIR)/include",
|
||||
"LIBRARY_DIR = $(INSTALL_DIR)/lib",
|
||||
'INSTALL_DIR = {0}'.format(prefix),
|
||||
'INCLUDE_DIR = $(INSTALL_DIR)/include',
|
||||
'LIBRARY_DIR = $(INSTALL_DIR)/lib',
|
||||
]
|
||||
|
||||
with open("make.inc", "w") as inc:
|
||||
with open('make.inc', 'w') as inc:
|
||||
for var in config:
|
||||
inc.write(f"{var}\n")
|
||||
inc.write('{0}\n'.format(var))
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
@@ -284,7 +284,7 @@ can tell Spack where to locate it like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "src"
|
||||
build_directory = 'src'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -299,8 +299,8 @@ install the package:
|
||||
|
||||
def install(self, spec, prefix):
|
||||
mkdir(prefix.bin)
|
||||
install("foo", prefix.bin)
|
||||
install_tree("lib", prefix.lib)
|
||||
install('foo', prefix.bin)
|
||||
install_tree('lib', prefix.lib)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://pypi.org/project/setuptools/"
|
||||
url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip"
|
||||
list_url = "https://pypi.org/simple/setuptools/"
|
||||
homepage = 'https://pypi.org/project/setuptools/'
|
||||
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
|
||||
list_url = 'https://pypi.org/simple/setuptools/'
|
||||
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
pypi = "setuptools/setuptools-49.2.0.zip"
|
||||
pypi = 'setuptools/setuptools-49.2.0.zip'
|
||||
|
||||
|
||||
If a package has a different homepage listed on PyPI, you can
|
||||
@@ -208,7 +208,7 @@ dependencies to your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("py-setuptools@42:", type="build")
|
||||
depends_on('py-setuptools@42:', type='build')
|
||||
|
||||
|
||||
Note that ``py-wheel`` is already listed as a build dependency in the
|
||||
@@ -232,7 +232,7 @@ Look for dependencies under the following keys:
|
||||
* ``dependencies`` under ``[project]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[project.optional-dependencies]``
|
||||
|
||||
@@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to
|
||||
* ``setup_requires``
|
||||
|
||||
These packages are usually only needed at build-time, so you can
|
||||
add them with ``type="build"``.
|
||||
add them with ``type='build'``.
|
||||
|
||||
* ``install_requires``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``extras_require``
|
||||
|
||||
@@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to
|
||||
|
||||
These are packages that are required to run the unit tests for the
|
||||
package. These dependencies can be specified using the
|
||||
``type="test"`` dependency type. However, the PyPI tarballs rarely
|
||||
``type='test'`` dependency type. However, the PyPI tarballs rarely
|
||||
contain unit tests, so there is usually no reason to add these.
|
||||
|
||||
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
|
||||
@@ -321,7 +321,7 @@ older versions of flit may use the following keys:
|
||||
* ``requires`` under ``[tool.flit.metadata]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[tool.flit.metadata.requires-extra]``
|
||||
|
||||
@@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("py-pip@22.1:", type="build")
|
||||
depends_on('py-pip@22.1:', type='build')
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
return {
|
||||
"blas": spec["blas"].libs.names[0],
|
||||
"lapack": spec["lapack"].libs.names[0],
|
||||
'blas': spec['blas'].libs.names[0],
|
||||
'lapack': spec['lapack'].libs.names[0],
|
||||
}
|
||||
|
||||
|
||||
@@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
options = []
|
||||
if spec.satisfies("+libyaml"):
|
||||
options.append("--with-libyaml")
|
||||
if '+libyaml' in spec:
|
||||
options.append('--with-libyaml')
|
||||
else:
|
||||
options.append("--without-libyaml")
|
||||
options.append('--without-libyaml')
|
||||
return options
|
||||
|
||||
|
||||
@@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``:
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
options = []
|
||||
if spec.satisfies("+libyaml"):
|
||||
if '+libyaml' in spec:
|
||||
options.extend([
|
||||
spec["libyaml"].libs.search_flags,
|
||||
spec["libyaml"].headers.include_flags,
|
||||
spec['libyaml'].libs.search_flags,
|
||||
spec['libyaml'].headers.include_flags,
|
||||
])
|
||||
return options
|
||||
|
||||
@@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ["six"]
|
||||
import_modules = ['six']
|
||||
|
||||
|
||||
Sometimes the list of module names to import depends on how the
|
||||
@@ -571,9 +571,9 @@ This can be expressed like so:
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
modules = ["yaml"]
|
||||
if self.spec.satisfies("+libyaml"):
|
||||
modules.append("yaml.cyaml")
|
||||
modules = ['yaml']
|
||||
if '+libyaml' in self.spec:
|
||||
modules.append('yaml.cyaml')
|
||||
return modules
|
||||
|
||||
|
||||
@@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
automatically detected ``import_modules`` ``["nilearn", "nilearn.surface",
|
||||
"nilearn.plotting", "nilearn.plotting.data"]`` set:
|
||||
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = ["nilearn.plotting"]
|
||||
skip_modules = ['nilearn.plotting']
|
||||
|
||||
This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]``
|
||||
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||
|
||||
Import tests can be run during the installation using ``spack install
|
||||
--test=root`` or at any time after the installation using
|
||||
@@ -612,11 +612,11 @@ after the ``install`` phase:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def install_test(self):
|
||||
with working_dir("spack-test", create=True):
|
||||
python("-c", "import numpy; numpy.test('full', verbose=2)")
|
||||
with working_dir('spack-test', create=True):
|
||||
python('-c', 'import numpy; numpy.test("full", verbose=2)')
|
||||
|
||||
|
||||
when testing is enabled during the installation (i.e., ``spack install
|
||||
@@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "python"
|
||||
build_directory = 'python'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -81,27 +81,28 @@ class of your package. For example, you can add it to your
|
||||
class MyRocmPackage(CMakePackage, ROCmPackage):
|
||||
...
|
||||
# Ensure +rocm and amdgpu_targets are passed to dependencies
|
||||
depends_on("mydeppackage", when="+rocm")
|
||||
depends_on('mydeppackage', when='+rocm')
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on(f"mydeppackage amdgpu_target={val}",
|
||||
when=f"amdgpu_target={val}")
|
||||
depends_on('mydeppackage amdgpu_target={0}'.format(val),
|
||||
when='amdgpu_target={0}'.format(val))
|
||||
...
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if spec.satisfies("+rocm"):
|
||||
if '+rocm' in spec:
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
"-DENABLE_HIP=ON",
|
||||
f"-DHIP_ROOT_DIR={spec['hip'].prefix}"])
|
||||
rocm_archs = spec.variants["amdgpu_target"].value
|
||||
if "none" not in rocm_archs:
|
||||
args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}")
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
.format(",".join(rocm_archs)))
|
||||
else:
|
||||
# Ensure build with hip is disabled
|
||||
args.append("-DENABLE_HIP=OFF")
|
||||
args.append('-DENABLE_HIP=OFF')
|
||||
...
|
||||
return args
|
||||
...
|
||||
@@ -113,7 +114,7 @@ build.
|
||||
|
||||
This example also illustrates how to check for the ``rocm`` variant using
|
||||
``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value
|
||||
using ``self.spec.variants["amdgpu_target"].value``.
|
||||
using ``self.spec.variants['amdgpu_target'].value``.
|
||||
|
||||
All five packages using ``ROCmPackage`` as of January 2021 also use the
|
||||
:ref:`CudaPackage <cudapackage>`. So it is worth looking at those packages
|
||||
|
||||
@@ -57,7 +57,7 @@ overridden like so:
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
scons("check")
|
||||
scons('check')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
@@ -88,7 +88,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("scons", type="build")
|
||||
depends_on('scons', type='build')
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, you can override
|
||||
@@ -96,7 +96,7 @@ this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("scons@2.3.0:", type="build")
|
||||
depends_on('scons@2.3.0:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so:
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
args = [
|
||||
f"PREFIX={prefix}",
|
||||
f"ZLIB={spec['zlib'].prefix}",
|
||||
'PREFIX={0}'.format(prefix),
|
||||
'ZLIB={0}'.format(spec['zlib'].prefix),
|
||||
]
|
||||
|
||||
if spec.satisfies("+debug"):
|
||||
args.append("DEBUG=yes")
|
||||
if '+debug' in spec:
|
||||
args.append('DEBUG=yes')
|
||||
else:
|
||||
args.append("DEBUG=no")
|
||||
args.append('DEBUG=no')
|
||||
|
||||
return args
|
||||
|
||||
@@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option:
|
||||
* env_vars: [ string ]
|
||||
Environment variables to propagate through to SCons. Either the
|
||||
string "all" or a comma separated list of variable names, e.g.
|
||||
"LD_LIBRARY_PATH,HOME".
|
||||
- default: "LD_LIBRARY_PATH,PYTHONPATH"
|
||||
'LD_LIBRARY_PATH,HOME'.
|
||||
- default: 'LD_LIBRARY_PATH,PYTHONPATH'
|
||||
|
||||
|
||||
In the case of cantera, using ``env_vars=all`` allows us to use
|
||||
|
||||
@@ -212,12 +212,18 @@ under the ``container`` attribute of environments:
|
||||
final:
|
||||
- libgomp
|
||||
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
app: "gromacs"
|
||||
mpi: "mpich"
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
A detailed description of the options available can be found in the
|
||||
:ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
Setting Base Images
|
||||
@@ -519,13 +525,6 @@ the example below:
|
||||
COPY data /share/myapp/data
|
||||
{% endblock %}
|
||||
|
||||
The Dockerfile is generated by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e /opt/environment containerize
|
||||
|
||||
Note that the environment must be active for spack to read the template.
|
||||
The recipe that gets generated contains the two extra instruction that we added in our template extension:
|
||||
|
||||
.. code-block:: Dockerfile
|
||||
|
||||
@@ -1549,7 +1549,7 @@ its value:
|
||||
|
||||
def configure_args(self):
|
||||
...
|
||||
if self.spec.satisfies("+shared"):
|
||||
if "+shared" in self.spec:
|
||||
extra_args.append("--enable-shared")
|
||||
else:
|
||||
extra_args.append("--disable-shared")
|
||||
@@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if spec.satisfies("languages=jit"):
|
||||
if "languages=jit" in spec:
|
||||
options.append("--enable-host-shared")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""
|
||||
@@ -2557,10 +2557,9 @@ Conditional dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You may have a package that only requires a dependency under certain
|
||||
conditions. For example, you may have a package with optional MPI support.
|
||||
You would then provide a variant to reflect that the feature is optional
|
||||
and specify the MPI dependency only applies when MPI support is enabled.
|
||||
In that case, you could say something like:
|
||||
conditions. For example, you may have a package that has optional MPI support,
|
||||
- MPI is only a dependency when you want to enable MPI support for the
|
||||
package. In that case, you could say something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2568,39 +2567,13 @@ In that case, you could say something like:
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
``when`` can include constraints on the variant, version, compiler, etc. and
|
||||
the :mod:`syntax<spack.spec>` is the same as for Specs written on the command
|
||||
line.
|
||||
|
||||
Suppose the above package also has, since version 3, optional `Trilinos`
|
||||
support and you want them both to build either with or without MPI. Further
|
||||
suppose you require a version of `Trilinos` no older than 12.6. In that case,
|
||||
the `trilinos` variant and dependency directives would be:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("trilinos", default=False, description="Enable Trilinos support")
|
||||
|
||||
depends_on("trilinos@12.6:", when="@3: +trilinos")
|
||||
depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi")
|
||||
|
||||
|
||||
Alternatively, you could use the `when` context manager to equivalently specify
|
||||
the `trilinos` variant dependencies as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with when("@3: +trilinos"):
|
||||
depends_on("trilinos@12.6:")
|
||||
depends_on("trilinos +mpi", when="+mpi")
|
||||
|
||||
|
||||
The argument to ``when`` in either case can include any Spec constraints that
|
||||
are supported on the command line using the same :ref:`syntax <sec-specs>`.
|
||||
|
||||
.. note::
|
||||
|
||||
If a dependency isn't typically used, you can save time by making it
|
||||
conditional since Spack will not build the dependency unless it is
|
||||
required for the Spec.
|
||||
|
||||
If a dependency/feature of a package isn't typically used, you can save time
|
||||
by making it conditional (since Spack will not build the dependency unless it
|
||||
is required for the Spec).
|
||||
|
||||
.. _dependency_dependency_patching:
|
||||
|
||||
@@ -3528,7 +3501,7 @@ need to override methods like ``configure_args``:
|
||||
|
||||
def configure_args(self):
|
||||
args = ["--enable-cxx"] + self.enable_or_disable("libs")
|
||||
if self.spec.satisfies("libs=static"):
|
||||
if "libs=static" in self.spec:
|
||||
args.append("--with-pic")
|
||||
return args
|
||||
|
||||
@@ -3662,8 +3635,7 @@ regardless of the build system. The arguments for the phase are:
|
||||
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
||||
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -3673,6 +3645,56 @@ If the ``package.py`` has build instructions in a separate
|
||||
|
||||
In this case the package is passed as the second argument, and ``self`` is the builder instance.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are cases where a software actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases natively, if a recipe is written using builders explicitly.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
|
||||
will likely contain some overriding of default builder methods:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
In more complex cases it might happen that the build system changes according to certain conditions,
|
||||
for instance across versions. That can be expressed with conditional variant values:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Mixin base classes
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -3719,106 +3741,6 @@ for instance:
|
||||
|
||||
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
----------------------
|
||||
Multiple build systems
|
||||
----------------------
|
||||
|
||||
There are cases where a package actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases by splitting the build instructions into separate builder classes.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
variant("my_feature", default=True)
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default.
|
||||
|
||||
Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
In this example, ``spack install example +feature build_sytem=cmake`` will
|
||||
pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``.
|
||||
|
||||
Similarly, ``spack install example +feature build_system=autotools`` will pick
|
||||
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
|
||||
|
||||
Dependencies are always specified in the package class. When some dependencies
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
# Runtime dependencies
|
||||
depends_on("ncurses")
|
||||
depends_on("libxml2")
|
||||
|
||||
# Lowerbounds for cmake only apply when using cmake as the build system
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.18:", when="@2.0:", type="build")
|
||||
depends_on("cmake@3:", type="build")
|
||||
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
with when("build_system=autotools"):
|
||||
depends_on("perl", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
Very often projects switch from one build system to another, or add support
|
||||
for a new build system from a certain version, which means that the choice
|
||||
of the build system typically depends on a version range. Those situations can
|
||||
be handled by using conditional values in the ``build_system`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
The ``build_system`` can be used as an ordinary variant, which also means that it can
|
||||
be used in ``depends_on`` statements. This can be useful when a package *requires* that
|
||||
its dependency has a CMake config file, meaning that the dependent can only build when the
|
||||
dependency is built with CMake, and not Autotools. In that case, you can force the choice
|
||||
of the build system in the dependent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Dependent(CMakePackage):
|
||||
|
||||
depends_on("example build_system=cmake")
|
||||
|
||||
|
||||
.. _install-environment:
|
||||
|
||||
-----------------------
|
||||
@@ -4391,7 +4313,7 @@ for supported features, for instance:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if spec.satisfies("target=avx512"):
|
||||
if "avx512" in spec.target:
|
||||
args.append("--with-avx512")
|
||||
|
||||
The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding
|
||||
@@ -6826,30 +6748,3 @@ To achieve backward compatibility with the single-class format Spack creates in
|
||||
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
|
||||
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
|
||||
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
|
||||
|
||||
------------------------------
|
||||
Specifying License Information
|
||||
------------------------------
|
||||
|
||||
A significant portion of software that Spack packages is open source. Most open
|
||||
source software is released under one or more common open source licenses.
|
||||
Specifying the specific license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the SPDX identifier for
|
||||
a project and then add it using the license directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<SPDX Identifier HERE>")
|
||||
|
||||
Note that specifying a license without a when clause makes it apply to all
|
||||
versions and variants of the package, which might not actually be the case.
|
||||
For example, a project might have switched licenses at some point or have
|
||||
certain build configurations that include files that are licensed differently.
|
||||
To account for this, you can specify when licenses should be applied. For
|
||||
example, to specify that a specific license identifier should only apply
|
||||
to versionup to and including 1.5, you could write the following directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("...", when="@:1.5")
|
||||
|
||||
|
||||
@@ -213,16 +213,6 @@ pipeline jobs.
|
||||
``spack ci generate``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Throughout this documentation, references to the "mirror" mean the target
|
||||
mirror which is checked for the presence of up-to-date specs, and where
|
||||
any scheduled jobs should push built binary packages. In the past, this
|
||||
defaulted to the mirror at index 0 in the mirror configs, and could be
|
||||
overridden using the ``--buildcache-destination`` argument. Starting with
|
||||
Spack 0.23, ``spack ci generate`` will require you to identify this mirror
|
||||
by the name "buildcache-destination". While you can configure any number
|
||||
of mirrors as sources for your pipelines, you will need to identify the
|
||||
destination mirror by name.
|
||||
|
||||
Concretizes the specs in the active environment, stages them (as described in
|
||||
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
|
||||
During concretization of the environment, ``spack ci generate`` also writes a
|
||||
|
||||
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.23.0
|
||||
python-levenshtein==0.22.0
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.7
|
||||
urllib3==2.0.6
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.6.1
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 3.6--3.12, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
|
||||
|
@@ -156,37 +156,6 @@ def lookup(name):
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def polite_path(components: Iterable[str]):
|
||||
"""
|
||||
Given a list of strings which are intended to be path components,
|
||||
generate a path, and format each component to avoid generating extra
|
||||
path entries.
|
||||
|
||||
For example all "/", "\", and ":" characters will be replaced with
|
||||
"_". Other characters like "=" will also be replaced.
|
||||
"""
|
||||
return os.path.join(*[polite_filename(x) for x in components])
|
||||
|
||||
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
"""
|
||||
Replace generally problematic filename characters with underscores.
|
||||
|
||||
This differs from sanitize_filename in that it is more aggressive in
|
||||
changing characters in the name. For example it removes "=" which can
|
||||
confuse path parsing in external tools.
|
||||
"""
|
||||
# This character set applies for both Windows and Linux. It does not
|
||||
# account for reserved filenames in Windows.
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
@@ -17,9 +17,7 @@ class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
def architecture_compatible(self, target, constraint):
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
@@ -36,7 +34,7 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
spec = CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
@@ -79,20 +77,16 @@ def _gcc_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
if len(pversion.version) < 2 or len(cversion.version) < 2:
|
||||
return False
|
||||
return pversion.version[:2] == cversion.version[:2]
|
||||
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
@@ -101,7 +95,7 @@ def compiler_compatible(
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if loose:
|
||||
if kwargs.get("loose", False):
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
@@ -122,10 +116,9 @@ def compiler_compatible(
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
def compatible(self, target, constraint, **kwargs):
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
loosematch = kwargs.get("loose", False)
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loose
|
||||
target, constraint, loose=loosematch
|
||||
)
|
||||
|
||||
@@ -307,17 +307,10 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub and GitLab have stable sha256
|
||||
hashes."""
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
r"^https?://(?:.+)?gitlab(?:.+)/"
|
||||
r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
@@ -328,27 +321,19 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(gitlab_patch_url_re, patch.url):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
if not re.match(github_patch_url_re, patch.url):
|
||||
continue
|
||||
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@@ -797,7 +797,11 @@ def tarball_directory_name(spec):
|
||||
Return name of the tarball directory according to the convention
|
||||
<os>-<architecture>/<compiler>/<package>-<version>/
|
||||
"""
|
||||
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
|
||||
return os.path.join(
|
||||
str(spec.architecture),
|
||||
f"{spec.compiler.name}-{spec.compiler.version}",
|
||||
f"{spec.name}-{spec.version}",
|
||||
)
|
||||
|
||||
|
||||
def tarball_name(spec, ext):
|
||||
@@ -805,10 +809,10 @@ def tarball_name(spec, ext):
|
||||
Return the name of the tarfile according to the convention
|
||||
<os>-<architecture>-<package>-<dag_hash><ext>
|
||||
"""
|
||||
spec_formatted = spec.format_path(
|
||||
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
|
||||
return (
|
||||
f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-"
|
||||
f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}"
|
||||
)
|
||||
return f"{spec_formatted}{ext}"
|
||||
|
||||
|
||||
def tarball_path_name(spec, ext):
|
||||
@@ -909,7 +913,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
extra_args={"ContentType": "application/json"},
|
||||
)
|
||||
|
||||
# Push the hash
|
||||
@@ -917,7 +921,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -228,7 +228,7 @@ def _install_and_test(
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"):
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
@@ -446,11 +446,16 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
|
||||
@@ -40,15 +40,12 @@
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
@@ -58,21 +55,17 @@
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
@@ -83,6 +76,7 @@
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
get_path,
|
||||
inspect_path,
|
||||
is_system_path,
|
||||
validate,
|
||||
)
|
||||
@@ -115,6 +109,7 @@
|
||||
SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
|
||||
SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
|
||||
|
||||
|
||||
# Platform-specific library suffix.
|
||||
if sys.platform == "darwin":
|
||||
dso_suffix = "dylib"
|
||||
@@ -411,13 +406,19 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
"""Set environment variables used by the Spack compiler wrapper
|
||||
(which have the prefix `SPACK_`) and also add the compiler wrappers
|
||||
to PATH.
|
||||
|
||||
This determines the injected -L/-I/-rpath options; each of these specifies a search order and
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
This determines the injected -L/-I/-rpath options; each
|
||||
of these specifies a search order and this function computes these
|
||||
options in a manner that is intended to match the DAG traversal order
|
||||
in `modifications_from_dependencies`: that method uses a post-order
|
||||
traversal so that `PrependPath` actions from dependencies take lower
|
||||
precedence; we use a post-order traversal here to match the visitation
|
||||
order of `modifications_from_dependencies` (so we are visiting the
|
||||
lowest priority packages first).
|
||||
"""
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
@@ -536,42 +537,45 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
"""
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
marker = "_set_run_already_called"
|
||||
if getattr(pkg.module, marker, False):
|
||||
return
|
||||
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
|
||||
m = module
|
||||
m.make_jobs = jobs
|
||||
|
||||
if context == Context.BUILD:
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
m.make_jobs = jobs
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
m.make = MakeExecutable("make", jobs)
|
||||
m.gmake = MakeExecutable("gmake", jobs)
|
||||
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
if sys.platform == "win32":
|
||||
m.nmake = Executable("nmake")
|
||||
m.msbuild = Executable("msbuild")
|
||||
# analog to configure for win32
|
||||
m.cscript = Executable("cscript")
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable("./configure")
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# Standard CMake arguments
|
||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
# Put spack compiler paths in module scope.
|
||||
link_dir = spack.paths.build_env_path
|
||||
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
@@ -595,6 +599,9 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
||||
|
||||
m.static_to_shared_library = static_to_shared_library
|
||||
|
||||
# Put a marker on this module so that it won't execute the body of this
|
||||
# function again, since it is not needed
|
||||
setattr(m, marker, True)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
|
||||
@@ -720,15 +727,12 @@ def load_external_modules(pkg):
|
||||
load_module(external_module)
|
||||
|
||||
|
||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
def setup_package(pkg, dirty, context="build"):
|
||||
"""Execute all environment setup routines."""
|
||||
if context not in (Context.BUILD, Context.TEST):
|
||||
raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}")
|
||||
if context not in ["build", "test"]:
|
||||
raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
|
||||
|
||||
# First populate the package.py's module with the relevant globals that could be used in any
|
||||
# of the setup_* functions.
|
||||
setup_context = SetupContext(pkg.spec, context=context)
|
||||
setup_context.set_all_package_py_globals()
|
||||
set_module_variables_for_package(pkg)
|
||||
|
||||
# Keep track of env changes from packages separately, since we want to
|
||||
# issue warnings when packages make "suspicious" modifications.
|
||||
@@ -736,15 +740,13 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
env_mods = EnvironmentModifications()
|
||||
|
||||
# setup compilers for build contexts
|
||||
need_compiler = context == Context.BUILD or (
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
env_mods.extend(setup_context.get_env_modifications())
|
||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||
tty.debug("setup_package: collected all modifications from dependencies")
|
||||
|
||||
# architecture specific setup
|
||||
@@ -752,7 +754,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
if context == Context.BUILD:
|
||||
if context == "build":
|
||||
tty.debug("setup_package: setup build environment for root")
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_build_environment(env_mods)
|
||||
@@ -763,7 +765,16 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
"config to assume that the package is part of the system"
|
||||
" includes and omit it when invoked with '--cflags'."
|
||||
)
|
||||
elif context == Context.TEST:
|
||||
elif context == "test":
|
||||
tty.debug("setup_package: setup test environment for root")
|
||||
env_mods.extend(
|
||||
inspect_path(
|
||||
pkg.spec.prefix,
|
||||
spack.user_environment.prefix_inspections(pkg.spec.platform),
|
||||
exclude=is_system_path,
|
||||
)
|
||||
)
|
||||
pkg.setup_run_environment(env_mods)
|
||||
env_mods.prepend_path("PATH", ".")
|
||||
|
||||
# First apply the clean environment changes
|
||||
@@ -802,245 +813,158 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
return env_base
|
||||
|
||||
|
||||
class EnvironmentVisitor:
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
# than for their deps, depending on the context.
|
||||
self.root_hashes = set(s.dag_hash() for s in roots)
|
||||
def _make_runnable(pkg, env):
|
||||
# Helper method which prepends a Package's bin/ prefix to the PATH
|
||||
# environment variable
|
||||
prefix = pkg.prefix
|
||||
|
||||
if context == Context.BUILD:
|
||||
# Drop direct run deps in build context
|
||||
# We don't really distinguish between install and build time test deps,
|
||||
# so we include them here as build-time test deps.
|
||||
self.root_depflag = dt.BUILD | dt.TEST | dt.LINK
|
||||
elif context == Context.TEST:
|
||||
# This is more of an extended run environment
|
||||
self.root_depflag = dt.TEST | dt.RUN | dt.LINK
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
depflag = self.root_depflag
|
||||
else:
|
||||
depflag = dt.LINK | dt.RUN
|
||||
return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag))
|
||||
for dirname in ["bin", "bin64"]:
|
||||
bin_dir = os.path.join(prefix, dirname)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
class UseMode(Flag):
|
||||
#: Entrypoint spec (a spec to be built; an env root, etc)
|
||||
ROOT = auto()
|
||||
def modifications_from_dependencies(
|
||||
spec, context, custom_mods_only=True, set_package_py_globals=True
|
||||
):
|
||||
"""Returns the environment modifications that are required by
|
||||
the dependencies of a spec and also applies modifications
|
||||
to this spec's package at module scope, if need be.
|
||||
|
||||
#: A spec used at runtime, but no executables in PATH
|
||||
RUNTIME = auto()
|
||||
Environment modifications include:
|
||||
|
||||
#: A spec used at runtime, with executables in PATH
|
||||
RUNTIME_EXECUTABLE = auto()
|
||||
- Updating PATH so that executables can be found
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies
|
||||
- Running custom package environment modifications
|
||||
|
||||
#: A spec that's a direct build or test dep
|
||||
BUILDTIME_DIRECT = auto()
|
||||
Custom package modifications can conflict with the default PATH changes
|
||||
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
|
||||
environment variables), so this applies changes in a fixed order:
|
||||
|
||||
#: A spec that should be visible in search paths in a build env.
|
||||
BUILDTIME = auto()
|
||||
- All modifications (custom and default) from external deps first
|
||||
- All modifications from non-external deps afterwards
|
||||
|
||||
#: Flag is set when the (node, mode) is finalized
|
||||
ADDED = auto()
|
||||
With that order, `PrependPath` actions from non-external default
|
||||
environment modifications will take precedence over custom modifications
|
||||
from external packages.
|
||||
|
||||
A secondary constraint is that custom and default modifications are
|
||||
grouped on a per-package basis: combined with the post-order traversal this
|
||||
means that default modifications of dependents can override custom
|
||||
modifications of dependencies (again, this would only occur for PATH,
|
||||
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
|
||||
|
||||
def effective_deptypes(
|
||||
*specs: spack.spec.Spec, context: Context = Context.BUILD
|
||||
) -> List[Tuple[spack.spec.Spec, UseMode]]:
|
||||
"""Given a list of input specs and a context, return a list of tuples of
|
||||
all specs that contribute to (environment) modifications, together with
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in visitor.edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
if parent is None:
|
||||
use_modes[child] = UseMode.ROOT
|
||||
continue
|
||||
|
||||
parent_mode = use_modes[parent]
|
||||
|
||||
# Nothing to propagate.
|
||||
if not parent_mode:
|
||||
continue
|
||||
|
||||
# Dependending on the context, include particular deps from the root.
|
||||
if UseMode.ROOT & parent_mode:
|
||||
if context == Context.BUILD:
|
||||
if (dt.BUILD | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME_DIRECT
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
elif context == Context.TEST:
|
||||
if (dt.RUN | dt.TEST) & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
elif context == Context.RUN:
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
elif dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
|
||||
# Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps.
|
||||
if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME
|
||||
if dt.RUN & depflag:
|
||||
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
|
||||
|
||||
# Propagate BUILDTIME through link deps.
|
||||
if UseMode.BUILDTIME & parent_mode:
|
||||
if dt.LINK & depflag:
|
||||
use_modes[child] |= UseMode.BUILDTIME
|
||||
|
||||
# Finalize the spec; the invariant is that all in-edges are processed
|
||||
# before out-edges, meaning that parent is done.
|
||||
if not (UseMode.ADDED & parent_mode):
|
||||
use_modes[parent] |= UseMode.ADDED
|
||||
nodes_with_type.append((parent, parent_mode))
|
||||
|
||||
# Attach the leaf nodes, since we only added nodes with out-edges.
|
||||
for spec, parent_mode in use_modes.items():
|
||||
if parent_mode and not (UseMode.ADDED & parent_mode):
|
||||
nodes_with_type.append((spec, parent_mode))
|
||||
|
||||
return nodes_with_type
|
||||
|
||||
|
||||
class SetupContext:
|
||||
"""This class encapsulates the logic to determine environment modifications, and is used as
|
||||
well to set globals in modules of package.py."""
|
||||
|
||||
def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
|
||||
"""Construct a ModificationsFromDag object.
|
||||
Args:
|
||||
specs: single root spec for build/test context, possibly more for run context
|
||||
context: build, run, or test"""
|
||||
if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1:
|
||||
raise ValueError("Cannot setup build environment for multiple specs")
|
||||
specs_with_type = effective_deptypes(*specs, context=context)
|
||||
|
||||
self.specs = specs
|
||||
self.context = context
|
||||
self.external: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]]
|
||||
# Reverse so we go from leaf to root
|
||||
self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type)
|
||||
|
||||
# Split into non-external and external, maintaining topo order per group.
|
||||
self.external, self.nonexternal = stable_partition(
|
||||
reversed(specs_with_type), lambda t: t[0].external
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec for which we want the modifications
|
||||
context (str): either 'build' for build-time modifications or 'run'
|
||||
for run-time modifications
|
||||
custom_mods_only (bool): if True returns only custom modifications, if False
|
||||
returns custom and default modifications
|
||||
set_package_py_globals (bool): whether or not to set the global variables in the
|
||||
package.py files (this may be problematic when using buildcaches that have
|
||||
been built on a different but compatible OS)
|
||||
"""
|
||||
if context not in ["build", "run", "test"]:
|
||||
raise ValueError(
|
||||
"Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
|
||||
)
|
||||
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
|
||||
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
|
||||
|
||||
if context == Context.RUN or context == Context.TEST:
|
||||
self.should_be_runnable |= UseMode.ROOT
|
||||
self.should_setup_run_env |= UseMode.ROOT
|
||||
env = EnvironmentModifications()
|
||||
|
||||
# Everything that calls setup_run_environment and setup_dependent_* needs globals set.
|
||||
self.should_set_package_py_globals = (
|
||||
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
|
||||
)
|
||||
# In a build context, the root and direct build deps need build-specific globals set.
|
||||
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
|
||||
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
|
||||
# function; these sets form the building blocks of those collections.
|
||||
build_deps = set(spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(spec.traverse(root=False, deptype="link"))
|
||||
build_link_deps = build_deps | link_deps
|
||||
build_and_supporting_deps = set()
|
||||
for build_dep in build_deps:
|
||||
build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
|
||||
run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
|
||||
test_and_supporting_deps = set()
|
||||
for test_dep in set(spec.dependencies(deptype="test")):
|
||||
test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
|
||||
|
||||
def set_all_package_py_globals(self):
|
||||
"""Set the globals in modules of package.py files."""
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
# All dependencies that might have environment modifications to apply
|
||||
custom_mod_deps = set()
|
||||
if context == "build":
|
||||
custom_mod_deps.update(build_and_supporting_deps)
|
||||
# Tests may be performed after build
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
else:
|
||||
# test/run context
|
||||
custom_mod_deps.update(run_and_supporting_deps)
|
||||
if context == "test":
|
||||
custom_mod_deps.update(test_and_supporting_deps)
|
||||
custom_mod_deps.update(link_deps)
|
||||
|
||||
if self.should_set_package_py_globals & flag:
|
||||
if self.context == Context.BUILD and self.needs_build_context & flag:
|
||||
set_package_py_globals(pkg, context=Context.BUILD)
|
||||
else:
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
# Determine 'exe_deps': the set of packages with binaries we want to use
|
||||
if context == "build":
|
||||
exe_deps = build_and_supporting_deps | test_and_supporting_deps
|
||||
elif context == "run":
|
||||
exe_deps = set(spec.traverse(deptype="run"))
|
||||
elif context == "test":
|
||||
exe_deps = test_and_supporting_deps
|
||||
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
if id(spec) not in self.nodes_in_subdag:
|
||||
continue
|
||||
dependent_module = ModuleChangePropagator(spec.package)
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
def default_modifications_for_dep(dep):
|
||||
if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
|
||||
prefix = dep.prefix
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
- Updating PATH for packages that are required at runtime
|
||||
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
|
||||
tools can find Spack-built dependencies (when context=build)
|
||||
- Running custom package environment modifications (setup_run_environment,
|
||||
setup_dependent_build_environment, setup_dependent_run_environment)
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", prefix)
|
||||
|
||||
The (partial) order imposed on the specs is externals first, then topological
|
||||
from leaf to root. That way externals cannot contribute search paths that would shadow
|
||||
Spack's prefixes, and dependents override variables set by dependencies."""
|
||||
env = EnvironmentModifications()
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
tty.debug(f"Adding env modifications for {dspec.name}")
|
||||
pkg = dspec.package
|
||||
for directory in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(prefix, directory, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
|
||||
if self.should_setup_dependent_build_env & flag:
|
||||
self._make_buildtime_detectable(dspec, env)
|
||||
if dep in exe_deps and not is_system_path(dep.prefix):
|
||||
_make_runnable(dep, env)
|
||||
|
||||
for spec in self.specs:
|
||||
builder = spack.builder.create(pkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
def add_modifications_for_dep(dep):
|
||||
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||
# Some callers of this function only want the custom modifications.
|
||||
# For callers that want both custom and default modifications, we want
|
||||
# to perform the default modifications here (this groups custom
|
||||
# and default modifications together on a per-package basis).
|
||||
if not custom_mods_only:
|
||||
default_modifications_for_dep(dep)
|
||||
|
||||
if self.should_be_runnable & flag:
|
||||
self._make_runnable(dspec, env)
|
||||
# Perform custom modifications here (PrependPath actions performed in
|
||||
# the custom method override the default environment modifications
|
||||
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
|
||||
# PKG_CONFIG_PATH)
|
||||
if dep in custom_mod_deps:
|
||||
dpkg = dep.package
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
|
||||
if self.should_setup_run_env & flag:
|
||||
# TODO: remove setup_dependent_run_environment...
|
||||
for spec in dspec.dependents(deptype=dt.RUN):
|
||||
if id(spec) in self.nodes_in_subdag:
|
||||
pkg.setup_dependent_run_environment(env, spec)
|
||||
pkg.setup_run_environment(env)
|
||||
return env
|
||||
current_module = ModuleChangePropagator(spec.package)
|
||||
dpkg.setup_dependent_package(current_module, spec)
|
||||
current_module.propagate_changes_to_mro()
|
||||
|
||||
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
if context == "build":
|
||||
builder = spack.builder.create(dpkg)
|
||||
builder.setup_dependent_build_environment(env, spec)
|
||||
else:
|
||||
dpkg.setup_dependent_run_environment(env, spec)
|
||||
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||
|
||||
env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix)
|
||||
for d in ("lib", "lib64", "share"):
|
||||
pcdir = os.path.join(dep.prefix, d, "pkgconfig")
|
||||
if os.path.isdir(pcdir):
|
||||
env.prepend_path("PKG_CONFIG_PATH", pcdir)
|
||||
# Note that we want to perform environment modifications in a fixed order.
|
||||
# The Spec.traverse method provides this: i.e. in addition to
|
||||
# the post-order semantics, it also guarantees a fixed traversal order
|
||||
# among dependencies which are not constrained by post-order semantics.
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
|
||||
def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
if is_system_path(dep.prefix):
|
||||
return
|
||||
for dspec in spec.traverse(root=False, order="post"):
|
||||
# Default env modifications for non-external packages can override
|
||||
# custom modifications of external packages (this can only occur
|
||||
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
|
||||
if not dspec.external:
|
||||
add_modifications_for_dep(dspec)
|
||||
|
||||
for d in ("bin", "bin64"):
|
||||
bin_dir = os.path.join(dep.prefix, d)
|
||||
if os.path.isdir(bin_dir):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
return env
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
@@ -1072,7 +996,7 @@ def get_cmake_prefix_path(pkg):
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context: str = kwargs.get("context", "build")
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
@@ -1088,7 +1012,7 @@ def _setup_pkg_and_run(
|
||||
if not kwargs.get("fake", False):
|
||||
kwargs["unmodified_env"] = os.environ.copy()
|
||||
kwargs["env_modifications"] = setup_package(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context)
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
@@ -46,7 +46,6 @@ class AutotoolsPackage(spack.package_base.PackageBase):
|
||||
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
||||
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
||||
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
||||
depends_on("gmake", type="build")
|
||||
conflicts("platform=windows")
|
||||
|
||||
def flags_to_build_system_args(self, flags):
|
||||
|
||||
@@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags):
|
||||
# We specify for each of them.
|
||||
if flags["ldflags"]:
|
||||
ldflags = " ".join(flags["ldflags"])
|
||||
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
|
||||
# cmake has separate linker arguments for types of builds.
|
||||
self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}")
|
||||
self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}")
|
||||
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
self.cmake_flag_args.append(ld_string.format(type, ldflags))
|
||||
|
||||
# CMake has libs options separated by language. Apply ours to each.
|
||||
if flags["ldlibs"]:
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
@@ -30,10 +29,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
legacy_buildsystem = "makefile"
|
||||
|
||||
build_system("makefile")
|
||||
|
||||
with when("build_system=makefile"):
|
||||
conflicts("platform=windows")
|
||||
depends_on("gmake", type="build")
|
||||
conflicts("platform=windows", when="build_system=makefile")
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
|
||||
@@ -49,7 +49,6 @@
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
@@ -679,7 +678,7 @@ def generate_gitlab_ci_yaml(
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option. DEPRECATED
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -776,39 +775,17 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
if remote_mirror_override:
|
||||
tty.die(
|
||||
"Using the deprecated --buildcache-destination cli option and "
|
||||
"having a mirror named 'buildcache-destination' at the same time "
|
||||
"is not allowed"
|
||||
)
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
# TODO: Remove this block in spack 0.23
|
||||
remote_mirror_url = None
|
||||
if deprecated_mirror_config:
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci generate requires an env containing a mirror")
|
||||
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copies = {}
|
||||
buildcache_copy_src_prefix = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
# Check for a list of "known broken" specs that we should not bother
|
||||
@@ -820,7 +797,6 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
enable_artifacts_buildcache = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||
|
||||
rebuild_index_enabled = True
|
||||
@@ -829,15 +805,13 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
@@ -857,9 +831,8 @@ def generate_gitlab_ci_yaml(
|
||||
cfg.default_modify_scope(),
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
shared_pr_mirror = None
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||
spack.mirror.add(
|
||||
@@ -911,7 +884,6 @@ def generate_gitlab_ci_yaml(
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
@@ -926,11 +898,11 @@ def generate_gitlab_ci_yaml(
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
# Speed up staging by first fetching binary indices from all mirrors
|
||||
# (including the override mirror we may have just added above).
|
||||
try:
|
||||
bindist.binary_index.update()
|
||||
except bindist.FetchCacheError as e:
|
||||
@@ -1141,7 +1113,6 @@ def main_script_replacements(cmd):
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_buildcache:
|
||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||
job_object["artifacts"]["paths"].extend(
|
||||
@@ -1171,12 +1142,10 @@ def main_script_replacements(cmd):
|
||||
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||
|
||||
# Clean up remote mirror override if enabled
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config:
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
if remote_mirror_override:
|
||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||
|
||||
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
|
||||
|
||||
@@ -1207,28 +1176,10 @@ def main_script_replacements(cmd):
|
||||
sync_job["needs"] = [
|
||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||
]
|
||||
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||
buildcache_destination.fetch_url
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
if "buildcache-source" in pipeline_mirrors:
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
else:
|
||||
# TODO: Remove this condition in Spack 0.23
|
||||
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
|
||||
output_object["copy"] = sync_job
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if temp_storage_url_prefix:
|
||||
# There were some rebuild jobs scheduled, so we will need to
|
||||
# schedule a job to clean up the temporary storage location
|
||||
@@ -1262,13 +1213,6 @@ def main_script_replacements(cmd):
|
||||
signing_job["when"] = "always"
|
||||
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||
if buildcache_destination
|
||||
else remote_mirror_override or remote_mirror_url
|
||||
)
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
@@ -1277,13 +1221,13 @@ def main_script_replacements(cmd):
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
target_mirror = buildcache_destination.push_url
|
||||
final_job["script"] = _unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
@@ -1305,24 +1249,20 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
# TODO: Remove this line in Spack 0.23
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
if remote_mirror_override:
|
||||
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||
@@ -2062,23 +2002,43 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
|
||||
|
||||
def create_buildcache(
|
||||
input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False
|
||||
input_spec: spack.spec.Spec,
|
||||
*,
|
||||
pipeline_mirror_url: Optional[str] = None,
|
||||
buildcache_mirror_url: Optional[str] = None,
|
||||
sign_binaries: bool = False,
|
||||
) -> List[PushResult]:
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
Arguments:
|
||||
input_spec: Installed spec to package and push
|
||||
destination_mirror_urls: List of urls to push to
|
||||
buildcache_mirror_url: URL for the buildcache mirror
|
||||
pipeline_mirror_url: URL for the pipeline mirror
|
||||
sign_binaries: Whether or not to sign buildcache entry
|
||||
|
||||
Returns: A list of PushResults, indicating success or failure.
|
||||
"""
|
||||
results = []
|
||||
|
||||
for mirror_url in destination_mirror_urls:
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
|
||||
success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries),
|
||||
url=buildcache_mirror_url,
|
||||
)
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
# temporary storage mirror (this is only done if either
|
||||
# artifacts buildcache is enabled or a temporary storage url
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
results.append(
|
||||
PushResult(
|
||||
success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries),
|
||||
url=pipeline_mirror_url,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||
@@ -15,4 +14,4 @@
|
||||
|
||||
|
||||
def build_env(parser, args):
|
||||
env_utility.emulate_env_utility("build-env", Context.BUILD, args)
|
||||
env_utility.emulate_env_utility("build-env", "build", args)
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -16,7 +15,6 @@
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
@@ -130,38 +128,18 @@ def checksum(parser, args):
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.
|
||||
# For example, GitHub release pages sometimes have multiple tarballs with different shasum:
|
||||
# - releases/download/1.0/<pkg>-1.0.tar.gz (uploaded tarball)
|
||||
# - archive/refs/tags/1.0.tar.gz (generated tarball)
|
||||
# We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so
|
||||
# here we check whether the crawled and computed URLs disagree, and if so, prioritize the
|
||||
# former if that URL exists (just sending a HEAD request that is).
|
||||
url_changed_for_version = set()
|
||||
for version, url in url_dict.items():
|
||||
possible_urls = pkg.all_urls_for_version(version)
|
||||
if url not in possible_urls:
|
||||
for possible_url in possible_urls:
|
||||
if web_util.url_exists(possible_url):
|
||||
url_dict[version] = possible_url
|
||||
break
|
||||
else:
|
||||
url_changed_for_version.add(version)
|
||||
|
||||
if not url_dict:
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
filtered_url_dict = spack.stage.interactive_version_filter(
|
||||
url_dict, pkg.versions, url_changes=url_changed_for_version
|
||||
)
|
||||
if not filtered_url_dict:
|
||||
exit(0)
|
||||
url_dict = filtered_url_dict
|
||||
else:
|
||||
tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}")
|
||||
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
|
||||
@@ -191,14 +191,6 @@ def ci_generate(args):
|
||||
"""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||
|
||||
if args.buildcache_destination:
|
||||
tty.warn(
|
||||
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||
)
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
@@ -272,6 +264,12 @@ def ci_rebuild(args):
|
||||
if not ci_config:
|
||||
tty.die("spack ci rebuild requires an env containing ci cfg")
|
||||
|
||||
tty.msg(
|
||||
"SPACK_BUILDCACHE_DESTINATION={0}".format(
|
||||
os.environ.get("SPACK_BUILDCACHE_DESTINATION", None)
|
||||
)
|
||||
)
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -279,7 +277,6 @@ def ci_rebuild(args):
|
||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||
@@ -288,12 +285,9 @@ def ci_rebuild(args):
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
# TODO: Remove this in Spack 0.23
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
@@ -350,36 +344,21 @@ def ci_rebuild(args):
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" in pipeline_mirrors:
|
||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||
else:
|
||||
deprecated_mirror_config = True
|
||||
# TODO: This will be an error in Spack 0.23
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
# TODO: Remove in Spack 0.23
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
if buildcache_destination:
|
||||
buildcache_mirror_url = buildcache_destination.push_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
# artifacts from upstream to downstream jobs.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
pipeline_mirror_url = None
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
temp_storage_url_prefix = None
|
||||
if "temporary-storage-url-prefix" in ci_config:
|
||||
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
enable_artifacts_mirror = False
|
||||
if "enable-artifacts-buildcache" in ci_config:
|
||||
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||
@@ -475,14 +454,12 @@ def ci_rebuild(args):
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||
pipeline_mirrors.append(pipeline_mirror_url)
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == "spack_protected_branch":
|
||||
@@ -500,8 +477,7 @@ def ci_rebuild(args):
|
||||
)
|
||||
pipeline_mirrors.append(remote_mirror_override)
|
||||
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||
if spack_pipeline_type == "spack_pull_request":
|
||||
if shared_pr_mirror_url != "None":
|
||||
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||
|
||||
@@ -523,7 +499,6 @@ def ci_rebuild(args):
|
||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(" {0}".format(match["mirror_url"]))
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if enable_artifacts_mirror:
|
||||
matching_mirror = matches[0]["mirror_url"]
|
||||
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||
@@ -538,8 +513,7 @@ def ci_rebuild(args):
|
||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||
# package destination. This ensures that the when we rebuild everything, we only
|
||||
# consume binary dependencies built in this pipeline.
|
||||
# TODO: Remove this in Spack 0.23
|
||||
if deprecated_mirror_config and full_rebuild:
|
||||
if full_rebuild:
|
||||
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
@@ -605,9 +579,7 @@ def ci_rebuild(args):
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
|
||||
),
|
||||
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
]
|
||||
@@ -704,25 +676,21 @@ def ci_rebuild(args):
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if pipeline_mirror_url:
|
||||
mirror_urls.append(pipeline_mirror_url)
|
||||
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
if buildcache_mirror_url or pipeline_mirror_url:
|
||||
for result in spack_ci.create_buildcache(
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
@@ -14,8 +15,7 @@
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import build_environment, traverse
|
||||
from spack.context import Context
|
||||
from spack import traverse
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -42,14 +42,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context: Context = Context.BUILD):
|
||||
if context == Context.BUILD:
|
||||
# TODO: run deps shouldn't be required for build env.
|
||||
def __init__(self, context="build"):
|
||||
if context not in ("build", "test"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
|
||||
elif context == Context.TEST:
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
else:
|
||||
raise ValueError("context can only be Context.BUILD or Context.TEST")
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
@@ -76,7 +76,7 @@ def neighbors(self, item):
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context: Context, args):
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
|
||||
@@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context: Context, args):
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
# in Spec.tree(...).
|
||||
deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"),
|
||||
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -63,9 +62,6 @@ class {class_name}({base_class_name}):
|
||||
# notify when the package is updated.
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
# FIXME: Add the SPDX identifier of the project's license below.
|
||||
license("UNKNOWN")
|
||||
|
||||
{versions}
|
||||
|
||||
{dependencies}
|
||||
@@ -827,11 +823,6 @@ def get_versions(args, name):
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
|
||||
if url_dict_filtered is None:
|
||||
exit(0)
|
||||
url_dict = url_dict_filtered
|
||||
except UndetectableVersionError:
|
||||
# Use fake versions
|
||||
tty.warn("Couldn't detect version in: {0}".format(args.url))
|
||||
@@ -843,7 +834,11 @@ def get_versions(args, name):
|
||||
url_dict = {version: args.url}
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -97,16 +96,22 @@ def env_activate_setup_parser(subparser):
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
"--with-view",
|
||||
"-v",
|
||||
metavar="name",
|
||||
help="set runtime environment variables for specific view",
|
||||
"--with-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH, etc., with associated view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"--without-view",
|
||||
"-V",
|
||||
action="store_true",
|
||||
help="do not set runtime environment variables for any view",
|
||||
"--without-view",
|
||||
action="store_const",
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH, etc., with associated view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -192,20 +197,10 @@ def env_activate(args):
|
||||
|
||||
# Activate new environment
|
||||
active_env = ev.Environment(env_path)
|
||||
|
||||
# Check if runtime environment variables are requested, and if so, for what view.
|
||||
view: Optional[str] = None
|
||||
if args.with_view:
|
||||
view = args.with_view
|
||||
if not active_env.has_view(view):
|
||||
tty.die(f"The environment does not have a view named '{view}'")
|
||||
elif not args.without_view and active_env.has_view(ev.default_view_name):
|
||||
view = ev.default_view_name
|
||||
|
||||
cmds += spack.environment.shell.activate_header(
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view
|
||||
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None
|
||||
)
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, view=view))
|
||||
env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view))
|
||||
cmds += env_mods.shell_modifications(args.shell)
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
@@ -72,10 +72,6 @@ def variant(s):
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
def license(s):
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
def __init__(self, variants):
|
||||
self.variants = variants
|
||||
@@ -352,22 +348,6 @@ def print_virtuals(pkg):
|
||||
color.cprint(" None")
|
||||
|
||||
|
||||
def print_licenses(pkg):
|
||||
"""Output the licenses of the project."""
|
||||
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Licenses: "))
|
||||
|
||||
if len(pkg.licenses) == 0:
|
||||
color.cprint(" None")
|
||||
else:
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
@@ -397,7 +377,6 @@ def info(parser, args):
|
||||
(args.all or not args.no_dependencies, print_dependencies),
|
||||
(args.all or args.virtuals, print_virtuals),
|
||||
(args.all or args.tests, print_tests),
|
||||
(args.all or True, print_licenses),
|
||||
]
|
||||
for print_it, func in sections:
|
||||
if print_it:
|
||||
|
||||
@@ -240,7 +240,8 @@ def default_log_file(spec):
|
||||
"""Computes the default filename for the log file and creates
|
||||
the corresponding directory if not present
|
||||
"""
|
||||
basename = spec.format_path("test-{name}-{version}-{hash}.xml")
|
||||
fmt = "test-{x.name}-{x.version}-{hash}.xml"
|
||||
basename = fmt.format(x=spec, hash=spec.dag_hash())
|
||||
dirname = fs.os.path.join(spack.paths.reports_path, "junit")
|
||||
fs.mkdirp(dirname)
|
||||
return fs.os.path.join(dirname, basename)
|
||||
|
||||
@@ -5,8 +5,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.find
|
||||
@@ -110,14 +108,16 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
if "dependencies" in args.things_to_load:
|
||||
include_roots = "package" in args.things_to_load
|
||||
specs = [
|
||||
dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
|
||||
]
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec))
|
||||
env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
from spack.context import Context
|
||||
|
||||
description = (
|
||||
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||
@@ -15,4 +14,4 @@
|
||||
|
||||
|
||||
def test_env(parser, args):
|
||||
env_utility.emulate_env_utility("test-env", Context.TEST, args)
|
||||
env_utility.emulate_env_utility("test-env", "test", args)
|
||||
|
||||
@@ -88,8 +88,9 @@ def unload(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs).reversed()
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
for spec in specs:
|
||||
env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
|
||||
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
|
||||
cmds = env_mod.shell_modifications(args.shell)
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
def choose_virtual_or_external(self, spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
|
||||
@@ -272,6 +272,13 @@ def _os_pkg_manager(self):
|
||||
raise spack.error.SpackError(msg)
|
||||
return os_pkg_manager
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
|
||||
@tengine.context_property
|
||||
def labels(self):
|
||||
return self.container_config.get("labels", {})
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module provides classes used in user and build environment"""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Context(Enum):
|
||||
"""Enum used to indicate the context in which an environment has to be setup: build,
|
||||
run or test."""
|
||||
|
||||
BUILD = 1
|
||||
RUN = 2
|
||||
TEST = 3
|
||||
|
||||
def __str__(self):
|
||||
return ("build", "run", "test")[self.value - 1]
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, s: str):
|
||||
if s == "build":
|
||||
return Context.BUILD
|
||||
elif s == "run":
|
||||
return Context.RUN
|
||||
elif s == "test":
|
||||
return Context.TEST
|
||||
raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}")
|
||||
@@ -64,7 +64,6 @@ class OpenMpi(Package):
|
||||
"depends_on",
|
||||
"extends",
|
||||
"maintainers",
|
||||
"license",
|
||||
"provides",
|
||||
"patch",
|
||||
"variant",
|
||||
@@ -863,44 +862,6 @@ def _execute_maintainer(pkg):
|
||||
return _execute_maintainer
|
||||
|
||||
|
||||
def _execute_license(pkg, license_identifier: str, when):
|
||||
# If when is not specified the license always holds
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
for other_when_spec in pkg.licenses:
|
||||
if when_spec.intersects(other_when_spec):
|
||||
when_message = ""
|
||||
if when_spec != make_when_spec(None):
|
||||
when_message = f"when {when_spec}"
|
||||
other_when_message = ""
|
||||
if other_when_spec != make_when_spec(None):
|
||||
other_when_message = f"when {other_when_spec}"
|
||||
err_msg = (
|
||||
f"{pkg.name} is specified as being licensed as {license_identifier} "
|
||||
f"{when_message}, but it is also specified as being licensed under "
|
||||
f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict."
|
||||
)
|
||||
raise OverlappingLicenseError(err_msg)
|
||||
|
||||
pkg.licenses[when_spec] = license_identifier
|
||||
|
||||
|
||||
@directive("licenses")
|
||||
def license(license_identifier: str, when=None):
|
||||
"""Add a new license directive, to specify the SPDX identifier the software is
|
||||
distributed under.
|
||||
|
||||
Args:
|
||||
license_identifiers: A list of SPDX identifiers specifying the licenses
|
||||
the software is distributed under.
|
||||
when: A spec specifying when the license applies.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_license(pkg, license_identifier, when)
|
||||
|
||||
|
||||
@directive("requirements")
|
||||
def requires(*requirement_specs, policy="one_of", when=None, msg=None):
|
||||
"""Allows a package to request a configuration to be present in all valid solutions.
|
||||
@@ -959,7 +920,3 @@ class DependencyPatchError(DirectiveError):
|
||||
|
||||
class UnsupportedPackageDirective(DirectiveError):
|
||||
"""Raised when an invalid or unsupported package directive is specified."""
|
||||
|
||||
|
||||
class OverlappingLicenseError(DirectiveError):
|
||||
"""Raised when two licenses are declared that apply on overlapping specs."""
|
||||
|
||||
@@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format_path(projection)
|
||||
path = spec.format(projection)
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
|
||||
@@ -365,7 +365,6 @@
|
||||
read,
|
||||
root,
|
||||
spack_env_var,
|
||||
spack_env_view_var,
|
||||
update_yaml,
|
||||
)
|
||||
|
||||
@@ -398,6 +397,5 @@
|
||||
"read",
|
||||
"root",
|
||||
"spack_env_var",
|
||||
"spack_env_view_var",
|
||||
"update_yaml",
|
||||
]
|
||||
|
||||
@@ -64,8 +64,6 @@
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
#: environment variable used to indicate the active environment view
|
||||
spack_env_view_var = "SPACK_ENV_VIEW"
|
||||
|
||||
#: currently activated environment
|
||||
_active_environment: Optional["Environment"] = None
|
||||
@@ -1480,12 +1478,11 @@ def _concretize_separately(self, tests=False):
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
arguments, root_specs = [], []
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, uspec_constraints, tests))
|
||||
i += 1
|
||||
arguments.append((uspec_constraints, tests))
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
@@ -1504,39 +1501,34 @@ def _concretize_separately(self, tests=False):
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
if len(arguments) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||
)
|
||||
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
msg = "Starting concretization"
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
if sys.platform != "darwin":
|
||||
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
|
||||
if pool_size > 1:
|
||||
msg = msg + " pool with {0} processes".format(pool_size)
|
||||
tty.msg(msg)
|
||||
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug()
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(f"{duration:6.1f}s [{percentage:3.0f}%] {root_specs[i]}")
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
concretized_root_specs = spack.util.parallel.parallel_map(
|
||||
_concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug()
|
||||
)
|
||||
|
||||
finish = time.time()
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
tty.msg("Environment concretized in %.2f seconds." % (finish - start))
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
@@ -1603,14 +1595,16 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
if not self.has_view(default_view_name):
|
||||
raise SpackEnvironmentError(f"{self.name} does not have a default view enabled")
|
||||
if not self.views:
|
||||
raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name))
|
||||
|
||||
if default_view_name not in self.views:
|
||||
raise SpackEnvironmentError(
|
||||
"{0} does not have a default view enabled".format(self.name)
|
||||
)
|
||||
|
||||
return self.views[default_view_name]
|
||||
|
||||
def has_view(self, view_name: str) -> bool:
|
||||
return view_name in self.views
|
||||
|
||||
def update_default_view(self, path_or_bool: Union[str, bool]) -> None:
|
||||
"""Updates the path of the default view.
|
||||
|
||||
@@ -1696,34 +1690,62 @@ def check_views(self):
|
||||
"Loading the environment view will require reconcretization." % self.name
|
||||
)
|
||||
|
||||
def _env_modifications_for_view(
|
||||
self, view: ViewDescriptor, reverse: bool = False
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
|
||||
except Exception as e:
|
||||
# Failing to setup spec-specific changes shouldn't be a hard error.
|
||||
tty.warn(
|
||||
"couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
|
||||
)
|
||||
return spack.util.environment.EnvironmentModifications()
|
||||
return mods.reversed() if reverse else mods
|
||||
def _env_modifications_for_default_view(self, reverse=False):
|
||||
all_mods = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
def add_view_to_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to activate an environment using the provided
|
||||
view. Removes duplicate paths.
|
||||
visited = set()
|
||||
|
||||
errors = []
|
||||
for root_spec in self.concrete_roots():
|
||||
if root_spec in self.default_view and root_spec.installed and root_spec.package:
|
||||
for spec in root_spec.traverse(deptype="run", root=True):
|
||||
if spec.name in visited:
|
||||
# It is expected that only one instance of the package
|
||||
# can be added to the environment - do not attempt to
|
||||
# add multiple.
|
||||
tty.debug(
|
||||
"Not adding {0} to shell modifications: "
|
||||
"this package has already been added".format(
|
||||
spec.format("{name}/{hash:7}")
|
||||
)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
visited.add(spec.name)
|
||||
|
||||
try:
|
||||
mods = uenv.environment_modifications_for_spec(spec, self.default_view)
|
||||
except Exception as e:
|
||||
msg = "couldn't get environment settings for %s" % spec.format(
|
||||
"{name}@{version} /{hash:7}"
|
||||
)
|
||||
errors.append((msg, str(e)))
|
||||
continue
|
||||
|
||||
all_mods.extend(mods.reversed() if reverse else mods)
|
||||
|
||||
return all_mods, errors
|
||||
|
||||
def add_default_view_to_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to activate an environment using the
|
||||
default view. Removes duplicate paths.
|
||||
|
||||
Args:
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to activate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor))
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor))
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view))
|
||||
|
||||
mods, errors = self._env_modifications_for_default_view()
|
||||
env_mod.extend(mods)
|
||||
if errors:
|
||||
for err in errors:
|
||||
tty.warn(*err)
|
||||
|
||||
# deduplicate paths from specs mapped to the same location
|
||||
for env_var in env_mod.group_by_name():
|
||||
@@ -1731,21 +1753,23 @@ def add_view_to_env(
|
||||
|
||||
return env_mod
|
||||
|
||||
def rm_view_from_env(
|
||||
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
|
||||
) -> spack.util.environment.EnvironmentModifications:
|
||||
"""Collect the environment modifications to deactivate an environment using the provided
|
||||
view. Reverses the action of ``add_view_to_env``.
|
||||
def rm_default_view_from_env(self, env_mod):
|
||||
"""
|
||||
Collect the environment modifications to deactivate an environment using the
|
||||
default view. Reverses the action of ``add_default_view_to_env``.
|
||||
|
||||
Args:
|
||||
env_mod: the environment modifications object that is modified.
|
||||
view: the name of the view to deactivate."""
|
||||
descriptor = self.views.get(view)
|
||||
if not descriptor:
|
||||
env_mod (spack.util.environment.EnvironmentModifications): the environment
|
||||
modifications object that is modified.
|
||||
"""
|
||||
if default_view_name not in self.views:
|
||||
# No default view to add to shell
|
||||
return env_mod
|
||||
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed())
|
||||
env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True))
|
||||
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed())
|
||||
|
||||
mods, _ = self._env_modifications_for_default_view(reverse=True)
|
||||
env_mod.extend(mods)
|
||||
|
||||
return env_mod
|
||||
|
||||
@@ -2398,12 +2422,10 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
def _concretize_task(packed_arguments):
|
||||
spec_constraints, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
return index, spec, time.time() - start
|
||||
return _concretize_from_constraints(spec_constraints, tests)
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.color import colorize
|
||||
@@ -14,14 +13,12 @@
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
def activate_header(env, shell, prompt=None):
|
||||
# Construct the commands to run
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
# TODO: figure out how to make color work for csh
|
||||
cmds += "setenv SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "setenv SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += 'alias despacktivate "spack env deactivate";\n'
|
||||
if prompt:
|
||||
cmds += "if (! $?SPACK_OLD_PROMPT ) "
|
||||
@@ -32,8 +29,6 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
prompt = colorize("@G{%s} " % prompt, color=True)
|
||||
|
||||
cmds += "set -gx SPACK_ENV %s;\n" % env.path
|
||||
if view:
|
||||
cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view
|
||||
cmds += "function despacktivate;\n"
|
||||
cmds += " spack env deactivate;\n"
|
||||
cmds += "end;\n"
|
||||
@@ -45,21 +40,15 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV=%s"\n' % env.path
|
||||
if view:
|
||||
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
if view:
|
||||
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
|
||||
cmds += "export SPACK_ENV=%s;\n" % env.path
|
||||
if view:
|
||||
cmds += "export SPACK_ENV_VIEW=%s;\n" % view
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
if prompt:
|
||||
cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
@@ -77,14 +66,12 @@ def deactivate_header(shell):
|
||||
cmds = ""
|
||||
if shell == "csh":
|
||||
cmds += "unsetenv SPACK_ENV;\n"
|
||||
cmds += "unsetenv SPACK_ENV_VIEW;\n"
|
||||
cmds += "if ( $?SPACK_OLD_PROMPT ) "
|
||||
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
|
||||
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
|
||||
cmds += "unalias despacktivate;\n"
|
||||
elif shell == "fish":
|
||||
cmds += "set -e SPACK_ENV;\n"
|
||||
cmds += "set -e SPACK_ENV_VIEW;\n"
|
||||
cmds += "functions -e despacktivate;\n"
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
@@ -92,19 +79,14 @@ def deactivate_header(shell):
|
||||
elif shell == "bat":
|
||||
# TODO: Color
|
||||
cmds += 'set "SPACK_ENV="\n'
|
||||
cmds += 'set "SPACK_ENV_VIEW="\n'
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n"
|
||||
cmds += "fi;\n"
|
||||
cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n"
|
||||
cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n"
|
||||
cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n"
|
||||
@@ -118,23 +100,24 @@ def deactivate_header(shell):
|
||||
return cmds
|
||||
|
||||
|
||||
def activate(
|
||||
env: ev.Environment, use_env_repo=False, view: Optional[str] = "default"
|
||||
) -> EnvironmentModifications:
|
||||
"""Activate an environment and append environment modifications
|
||||
def activate(env, use_env_repo=False, add_view=True):
|
||||
"""
|
||||
Activate an environment and append environment modifications
|
||||
|
||||
To activate an environment, we add its configuration scope to the
|
||||
existing Spack configuration, and we set active to the current
|
||||
environment.
|
||||
|
||||
Arguments:
|
||||
env: the environment to activate
|
||||
use_env_repo: use the packages exactly as they appear in the environment's repository
|
||||
view: generate commands to add runtime environment variables for named view
|
||||
env (spack.environment.Environment): the environment to activate
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
|
||||
Returns:
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment."""
|
||||
modifications to activate environment.
|
||||
"""
|
||||
ev.activate(env, use_env_repo=use_env_repo)
|
||||
|
||||
env_mods = EnvironmentModifications()
|
||||
@@ -146,9 +129,9 @@ def activate(
|
||||
# become PATH variables.
|
||||
#
|
||||
try:
|
||||
if view and env.has_view(view):
|
||||
if add_view and ev.default_view_name in env.views:
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env.add_view_to_env(env_mods, view)
|
||||
env.add_default_view_to_env(env_mods)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
@@ -162,15 +145,17 @@ def activate(
|
||||
return env_mods
|
||||
|
||||
|
||||
def deactivate() -> EnvironmentModifications:
|
||||
"""Deactivate an environment and collect corresponding environment modifications.
|
||||
def deactivate():
|
||||
"""
|
||||
Deactivate an environment and collect corresponding environment modifications.
|
||||
|
||||
Note: unloads the environment in its current state, not in the state it was
|
||||
loaded in, meaning that specs that were removed from the spack environment
|
||||
after activation are not unloaded.
|
||||
|
||||
Returns:
|
||||
Environment variables modifications to activate environment.
|
||||
spack.util.environment.EnvironmentModifications: Environment variables
|
||||
modifications to activate environment.
|
||||
"""
|
||||
env_mods = EnvironmentModifications()
|
||||
active = ev.active_environment()
|
||||
@@ -178,12 +163,10 @@ def deactivate() -> EnvironmentModifications:
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
active_view = os.getenv(ev.spack_env_view_var)
|
||||
|
||||
if active_view and active.has_view(active_view):
|
||||
if ev.default_view_name in active.views:
|
||||
try:
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
active.rm_view_from_env(env_mods, active_view)
|
||||
active.rm_default_view_from_env(env_mods)
|
||||
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn(
|
||||
|
||||
@@ -500,7 +500,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, locator_spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, locator_spec.format_path(proj))
|
||||
return os.path.join(self._root, locator_spec.format(proj))
|
||||
return self._root
|
||||
|
||||
def get_all_specs(self):
|
||||
@@ -776,7 +776,7 @@ def get_relative_projection_for_spec(self, spec):
|
||||
spec = spec.package.extendee_spec
|
||||
|
||||
p = spack.projections.get_projection(self.projections, spec)
|
||||
return spec.format_path(p) if p else ""
|
||||
return spec.format(p) if p else ""
|
||||
|
||||
def get_projection_for_spec(self, spec):
|
||||
"""
|
||||
@@ -791,7 +791,7 @@ def get_projection_for_spec(self, spec):
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, spec.format_path(proj))
|
||||
return os.path.join(self._root, spec.format(proj))
|
||||
return self._root
|
||||
|
||||
|
||||
|
||||
@@ -1039,7 +1039,7 @@ def test_pkg_id(cls, spec):
|
||||
Returns:
|
||||
str: the install test package identifier
|
||||
"""
|
||||
return spec.format_path("{name}-{version}-{hash:7}")
|
||||
return spec.format("{name}-{version}-{hash:7}")
|
||||
|
||||
@classmethod
|
||||
def test_log_name(cls, spec):
|
||||
|
||||
@@ -131,12 +131,12 @@ def set_term_title(self, text: str):
|
||||
if not sys.stdout.isatty():
|
||||
return
|
||||
|
||||
status = f"{text} {self.get_progress()}"
|
||||
sys.stdout.write(f"\x1b]0;Spack: {status}\x07")
|
||||
status = "{0} {1}".format(text, self.get_progress())
|
||||
sys.stdout.write("\033]0;Spack: {0}\007".format(status))
|
||||
sys.stdout.flush()
|
||||
|
||||
def get_progress(self) -> str:
|
||||
return f"[{self.pkg_num}/{self.pkg_count}]"
|
||||
return "[{0}/{1}]".format(self.pkg_num, self.pkg_count)
|
||||
|
||||
|
||||
class TermStatusLine:
|
||||
@@ -175,7 +175,7 @@ def clear(self):
|
||||
|
||||
# Move the cursor to the beginning of the first "Waiting for" message and clear
|
||||
# everything after it.
|
||||
sys.stdout.write(f"\x1b[{lines}F\x1b[J")
|
||||
sys.stdout.write("\x1b[%sF\x1b[J" % lines)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
@@ -220,13 +220,14 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
|
||||
# consists in module file generation and registration in the DB.
|
||||
if pkg.spec.external:
|
||||
_process_external_package(pkg, explicit)
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
|
||||
_print_installed_pkg("{0} (external {1})".format(pkg.prefix, package_id(pkg)))
|
||||
return True
|
||||
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose(
|
||||
f"{package_id(pkg)} is installed in an upstream Spack instance at "
|
||||
f"{pkg.spec.prefix}"
|
||||
"{0} is installed in an upstream Spack instance at {1}".format(
|
||||
package_id(pkg), pkg.spec.prefix
|
||||
)
|
||||
)
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
|
||||
@@ -295,7 +296,7 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
package is the bootstrap compiler (``True``) or one of its dependencies
|
||||
(``False``). The list will be empty if there are no compilers.
|
||||
"""
|
||||
tty.debug(f"Bootstrapping {compiler} compiler")
|
||||
tty.debug("Bootstrapping {0} compiler".format(compiler))
|
||||
compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
|
||||
if compilers:
|
||||
return []
|
||||
@@ -304,9 +305,9 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
|
||||
# Set the architecture for the compiler package in a way that allows the
|
||||
# concretizer to back off if needed for the older bootstrapping compiler
|
||||
dep.constrain(f"platform={str(architecture.platform)}")
|
||||
dep.constrain(f"os={str(architecture.os)}")
|
||||
dep.constrain(f"target={architecture.target.microarchitecture.family.name}:")
|
||||
dep.constrain("platform=%s" % str(architecture.platform))
|
||||
dep.constrain("os=%s" % str(architecture.os))
|
||||
dep.constrain("target=%s:" % architecture.target.microarchitecture.family.name)
|
||||
# concrete CompilerSpec has less info than concrete Spec
|
||||
# concretize as Spec to add that information
|
||||
dep.concretize()
|
||||
@@ -339,15 +340,15 @@ def _hms(seconds: int) -> str:
|
||||
if m:
|
||||
parts.append("%dm" % m)
|
||||
if s:
|
||||
parts.append(f"{s:.2f}s")
|
||||
parts.append("%.2fs" % s)
|
||||
return " ".join(parts)
|
||||
|
||||
|
||||
def _log_prefix(pkg_name) -> str:
|
||||
"""Prefix of the form "[pid]: [pkg name]: ..." when printing a status update during
|
||||
the build."""
|
||||
pid = f"{os.getpid()}: " if tty.show_pid() else ""
|
||||
return f"{pid}{pkg_name}:"
|
||||
pid = "{0}: ".format(os.getpid()) if tty.show_pid() else ""
|
||||
return "{0}{1}:".format(pid, pkg_name)
|
||||
|
||||
|
||||
def _print_installed_pkg(message: str) -> None:
|
||||
@@ -374,9 +375,9 @@ def print_install_test_log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
|
||||
|
||||
def _print_timer(pre: str, pkg_id: str, timer: timer.BaseTimer) -> None:
|
||||
phases = [f"{p.capitalize()}: {_hms(timer.duration(p))}." for p in timer.phases]
|
||||
phases.append(f"Total: {_hms(timer.duration())}")
|
||||
tty.msg(f"{pre} Successfully installed {pkg_id}", " ".join(phases))
|
||||
phases = ["{}: {}.".format(p.capitalize(), _hms(timer.duration(p))) for p in timer.phases]
|
||||
phases.append("Total: {}".format(_hms(timer.duration())))
|
||||
tty.msg("{0} Successfully installed {1}".format(pre, pkg_id), " ".join(phases))
|
||||
|
||||
|
||||
def _install_from_cache(
|
||||
@@ -401,14 +402,14 @@ def _install_from_cache(
|
||||
)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
pre = f"No binary for {pkg_id} found"
|
||||
pre = "No binary for {0} found".format(pkg_id)
|
||||
if cache_only:
|
||||
tty.die(f"{pre} when cache-only specified")
|
||||
tty.die("{0} when cache-only specified".format(pre))
|
||||
|
||||
tty.msg(f"{pre}: installing from source")
|
||||
tty.msg("{0}: installing from source".format(pre))
|
||||
return False
|
||||
t.stop()
|
||||
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||
@@ -429,19 +430,19 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
"""
|
||||
assert pkg.spec.external, "Expected to post-install/register an external package."
|
||||
|
||||
pre = f"{pkg.spec.name}@{pkg.spec.version} :"
|
||||
pre = "{s.name}@{s.version} :".format(s=pkg.spec)
|
||||
spec = pkg.spec
|
||||
|
||||
if spec.external_modules:
|
||||
tty.msg(f"{pre} has external module in {spec.external_modules}")
|
||||
tty.debug(f"{pre} is actually installed in {spec.external_path}")
|
||||
tty.msg("{0} has external module in {1}".format(pre, spec.external_modules))
|
||||
tty.debug("{0} is actually installed in {1}".format(pre, spec.external_path))
|
||||
else:
|
||||
tty.debug(f"{pre} externally installed in {spec.external_path}")
|
||||
tty.debug("{0} externally installed in {1}".format(pre, spec.external_path))
|
||||
|
||||
try:
|
||||
# Check if the package was already registered in the DB.
|
||||
# If this is the case, then only make explicit if required.
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
tty.debug("{0} already registered in DB".format(pre))
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
@@ -450,11 +451,11 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
# If not, register it and generate the module file.
|
||||
# For external packages we just need to run
|
||||
# post-install hooks to generate module files.
|
||||
tty.debug(f"{pre} generating module file")
|
||||
tty.debug("{0} generating module file".format(pre))
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
||||
# Add to the DB
|
||||
tty.debug(f"{pre} registering into DB")
|
||||
tty.debug("{0} registering into DB".format(pre))
|
||||
spack.store.STORE.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
@@ -489,7 +490,7 @@ def _process_binary_cache_tarball(
|
||||
if download_result is None:
|
||||
return False
|
||||
|
||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||
tty.msg("Extracting {0} from binary cache".format(package_id(pkg)))
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
@@ -521,7 +522,7 @@ def _try_install_from_binary_cache(
|
||||
if not spack.mirror.MirrorCollection(binary=True):
|
||||
return False
|
||||
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
|
||||
tty.debug("Searching for binary cache of {0}".format(package_id(pkg)))
|
||||
|
||||
with timer.measure("search"):
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
@@ -589,9 +590,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
source_repo = spack.repo.Repo(source_repo_root)
|
||||
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
||||
except spack.repo.RepoError as err:
|
||||
tty.debug(f"Failed to create source repo for {node.name}: {str(err)}")
|
||||
tty.debug("Failed to create source repo for {0}: {1}".format(node.name, str(err)))
|
||||
source_pkg_dir = None
|
||||
tty.warn(f"Warning: Couldn't copy in provenance for {node.name}")
|
||||
tty.warn("Warning: Couldn't copy in provenance for {0}".format(node.name))
|
||||
|
||||
# Create a destination repository
|
||||
dest_repo_root = os.path.join(path, node.namespace)
|
||||
@@ -631,7 +632,7 @@ def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
|
||||
|
||||
Return: Colorized installing message
|
||||
"""
|
||||
pre = f"{pid}: " if tty.show_pid() else ""
|
||||
pre = "{0}: ".format(pid) if tty.show_pid() else ""
|
||||
post = (
|
||||
" @*{%s}" % install_status.get_progress()
|
||||
if install_status and spack.config.get("config:install_status", True)
|
||||
@@ -697,7 +698,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
# in the stage tree (not arbitrary files)
|
||||
abs_expr = os.path.realpath(glob_expr)
|
||||
if os.path.realpath(pkg.stage.path) not in abs_expr:
|
||||
errors.write(f"[OUTSIDE SOURCE PATH]: {glob_expr}\n")
|
||||
errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr))
|
||||
continue
|
||||
# Now that we are sure that the path is within the correct
|
||||
# folder, make it relative and check for matches
|
||||
@@ -717,14 +718,14 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
# Here try to be conservative, and avoid discarding
|
||||
# the whole install procedure because of copying a
|
||||
# single file failed
|
||||
errors.write(f"[FAILED TO ARCHIVE]: {f}")
|
||||
errors.write("[FAILED TO ARCHIVE]: {0}".format(f))
|
||||
|
||||
if errors.getvalue():
|
||||
error_file = os.path.join(target_dir, "errors.txt")
|
||||
fs.mkdirp(target_dir)
|
||||
with open(error_file, "w") as err:
|
||||
err.write(errors.getvalue())
|
||||
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
||||
tty.warn("Errors occurred when archiving files.\n\t" "See: {0}".format(error_file))
|
||||
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
@@ -760,11 +761,11 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
"""
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
|
||||
|
||||
# Cache the package phase options with the explicit package,
|
||||
# popping the options to ensure installation of associated
|
||||
@@ -796,14 +797,14 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build request."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build request."""
|
||||
return f"package={self.pkg.name}, install_args={self.install_args}"
|
||||
return "package={0}, install_args={1}".format(self.pkg.name, self.install_args)
|
||||
|
||||
def _add_default_args(self) -> None:
|
||||
"""Ensure standard install options are set to at least the default."""
|
||||
@@ -929,18 +930,18 @@ def __init__(
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError(f"{str(pkg)} must be a package")
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
if not self.pkg.spec.concrete:
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
|
||||
|
||||
# The "unique" identifier for the task's package
|
||||
self.pkg_id = package_id(self.pkg)
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
raise ValueError(f"{str(pkg)} must have a build request")
|
||||
raise ValueError("{0} must have a build request".format(str(pkg)))
|
||||
|
||||
self.request = request
|
||||
|
||||
@@ -948,9 +949,8 @@ def __init__(
|
||||
# ensure priority queue invariants when tasks are "removed" from the
|
||||
# queue.
|
||||
if status == STATUS_REMOVED:
|
||||
raise InstallError(
|
||||
f"Cannot create a build task for {self.pkg_id} with status '{status}'", pkg=pkg
|
||||
)
|
||||
msg = "Cannot create a build task for {0} with status '{1}'"
|
||||
raise InstallError(msg.format(self.pkg_id, status), pkg=pkg)
|
||||
|
||||
self.status = status
|
||||
|
||||
@@ -964,9 +964,9 @@ def __init__(
|
||||
# to support tracking of parallel, multi-spec, environment installs.
|
||||
self.dependents = set(get_dependent_ids(self.pkg.spec))
|
||||
|
||||
tty.debug(f"Pkg id {self.pkg_id} has the following dependents:")
|
||||
tty.debug("Pkg id {0} has the following dependents:".format(self.pkg_id))
|
||||
for dep_id in self.dependents:
|
||||
tty.debug(f"- {dep_id}")
|
||||
tty.debug("- {0}".format(dep_id))
|
||||
|
||||
# Set of dependencies
|
||||
#
|
||||
@@ -988,9 +988,9 @@ def __init__(
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.constrain("platform=%s" % str(arch_spec.platform))
|
||||
dep.constrain("os=%s" % str(arch_spec.os))
|
||||
dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name)
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package)
|
||||
self.dependencies.add(dep_id)
|
||||
@@ -1026,14 +1026,14 @@ def __ne__(self, other):
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the build task."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the build task."""
|
||||
dependencies = f"#dependencies={len(self.dependencies)}"
|
||||
dependencies = "#dependencies={0}".format(len(self.dependencies))
|
||||
return "priority={0}, status={1}, start={2}, {3}".format(
|
||||
self.priority, self.status, self.start, dependencies
|
||||
)
|
||||
@@ -1056,7 +1056,7 @@ def add_dependent(self, pkg_id: str) -> None:
|
||||
pkg_id: package identifier of the dependent package
|
||||
"""
|
||||
if pkg_id != self.pkg_id and pkg_id not in self.dependents:
|
||||
tty.debug(f"Adding {pkg_id} as a dependent of {self.pkg_id}")
|
||||
tty.debug("Adding {0} as a dependent of {1}".format(pkg_id, self.pkg_id))
|
||||
self.dependents.add(pkg_id)
|
||||
|
||||
def flag_installed(self, installed: List[str]) -> None:
|
||||
@@ -1070,8 +1070,9 @@ def flag_installed(self, installed: List[str]) -> None:
|
||||
for pkg_id in now_installed:
|
||||
self.uninstalled_deps.remove(pkg_id)
|
||||
tty.debug(
|
||||
f"{self.pkg_id}: Removed {pkg_id} from uninstalled deps list: "
|
||||
f"{self.uninstalled_deps}",
|
||||
"{0}: Removed {1} from uninstalled deps list: {2}".format(
|
||||
self.pkg_id, pkg_id, self.uninstalled_deps
|
||||
),
|
||||
level=2,
|
||||
)
|
||||
|
||||
@@ -1169,18 +1170,18 @@ def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Returns a formal representation of the package installer."""
|
||||
rep = f"{self.__class__.__name__}("
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += f"{attr}={value.__repr__()}, "
|
||||
return f"{rep.strip(', ')})"
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a printable version of the package installer."""
|
||||
requests = f"#requests={len(self.build_requests)}"
|
||||
tasks = f"#tasks={len(self.build_tasks)}"
|
||||
failed = f"failed ({len(self.failed)}) = {self.failed}"
|
||||
installed = f"installed ({len(self.installed)}) = {self.installed}"
|
||||
return f"{self.pid}: {requests}; {tasks}; {installed}; {failed}"
|
||||
requests = "#requests={0}".format(len(self.build_requests))
|
||||
tasks = "#tasks={0}".format(len(self.build_tasks))
|
||||
failed = "failed ({0}) = {1}".format(len(self.failed), self.failed)
|
||||
installed = "installed ({0}) = {1}".format(len(self.installed), self.installed)
|
||||
return "{0}: {1}; {2}; {3}; {4}".format(self.pid, requests, tasks, installed, failed)
|
||||
|
||||
def _add_bootstrap_compilers(
|
||||
self,
|
||||
@@ -1225,7 +1226,9 @@ def _modify_existing_task(self, pkgid: str, attr, value) -> None:
|
||||
for i, tup in enumerate(self.build_pq):
|
||||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(f"Modifying task for {pkgid} to treat it as a compiler", level=2)
|
||||
tty.debug(
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
|
||||
)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
|
||||
@@ -1290,7 +1293,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = f"{dep_id} is marked as an install failure: {action}"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
|
||||
# Attempt to get a read lock to ensure another process does not
|
||||
@@ -1298,7 +1301,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
# installed
|
||||
ltype, lock = self._ensure_locked("read", dep_pkg)
|
||||
if lock is None:
|
||||
msg = f"{dep_id} is write locked by another process"
|
||||
msg = "{0} is write locked by another process".format(dep_id)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=request.pkg)
|
||||
|
||||
# Flag external and upstream packages as being installed
|
||||
@@ -1317,7 +1320,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
or rec.installation_time > request.overwrite_time
|
||||
)
|
||||
):
|
||||
tty.debug(f"Flagging {dep_id} as installed per the database")
|
||||
tty.debug("Flagging {0} as installed per the database".format(dep_id))
|
||||
self._flag_installed(dep_pkg)
|
||||
else:
|
||||
lock.release_read()
|
||||
@@ -1353,9 +1356,9 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
# Ensure there is no other installed spec with the same prefix dir
|
||||
if spack.store.STORE.db.is_occupied_install_prefix(task.pkg.spec.prefix):
|
||||
raise InstallError(
|
||||
f"Install prefix collision for {task.pkg_id}",
|
||||
long_msg=f"Prefix directory {task.pkg.spec.prefix} already "
|
||||
"used by another installed spec.",
|
||||
"Install prefix collision for {0}".format(task.pkg_id),
|
||||
long_msg="Prefix directory {0} already used by another "
|
||||
"installed spec.".format(task.pkg.spec.prefix),
|
||||
pkg=task.pkg,
|
||||
)
|
||||
|
||||
@@ -1365,7 +1368,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
if not keep_prefix:
|
||||
task.pkg.remove_prefix()
|
||||
else:
|
||||
tty.debug(f"{task.pkg_id} is partially installed")
|
||||
tty.debug("{0} is partially installed".format(task.pkg_id))
|
||||
|
||||
# Destroy the stage for a locally installed, non-DIYStage, package
|
||||
if restage and task.pkg.stage.managed_by_spack:
|
||||
@@ -1410,8 +1413,9 @@ def _cleanup_failed(self, pkg_id: str) -> None:
|
||||
lock = self.failed.get(pkg_id, None)
|
||||
if lock is not None:
|
||||
err = "{0} exception when removing failure tracking for {1}: {2}"
|
||||
msg = "Removing failure mark on {0}"
|
||||
try:
|
||||
tty.verbose(f"Removing failure mark on {pkg_id}")
|
||||
tty.verbose(msg.format(pkg_id))
|
||||
lock.release_write()
|
||||
except Exception as exc:
|
||||
tty.warn(err.format(exc.__class__.__name__, pkg_id, str(exc)))
|
||||
@@ -1438,19 +1442,19 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
pkg: the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pre = f"{pkg_id} cannot be installed locally:"
|
||||
pre = "{0} cannot be installed locally:".format(pkg_id)
|
||||
|
||||
# External packages cannot be installed locally.
|
||||
if pkg.spec.external:
|
||||
raise ExternalPackageError(f"{pre} is external")
|
||||
raise ExternalPackageError("{0} {1}".format(pre, "is external"))
|
||||
|
||||
# Upstream packages cannot be installed locally.
|
||||
if pkg.spec.installed_upstream:
|
||||
raise UpstreamPackageError(f"{pre} is upstream")
|
||||
raise UpstreamPackageError("{0} {1}".format(pre, "is upstream"))
|
||||
|
||||
# The package must have a prefix lock at this stage.
|
||||
if pkg_id not in self.locks:
|
||||
raise InstallLockError(f"{pre} not locked")
|
||||
raise InstallLockError("{0} {1}".format(pre, "not locked"))
|
||||
|
||||
def _ensure_locked(
|
||||
self, lock_type: str, pkg: "spack.package_base.PackageBase"
|
||||
@@ -1477,14 +1481,14 @@ def _ensure_locked(
|
||||
assert lock_type in [
|
||||
"read",
|
||||
"write",
|
||||
], f'"{lock_type}" is not a supported package management lock type'
|
||||
], '"{0}" is not a supported package management lock type'.format(lock_type)
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
|
||||
if lock and ltype == lock_type:
|
||||
return ltype, lock
|
||||
|
||||
desc = f"{lock_type} lock"
|
||||
desc = "{0} lock".format(lock_type)
|
||||
msg = "{0} a {1} on {2} with timeout {3}"
|
||||
err = "Failed to {0} a {1} for {2} due to {3}: {4}"
|
||||
|
||||
@@ -1503,7 +1507,11 @@ def _ensure_locked(
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(f"Expected prefix lock timeout {timeout}, not {lock.default_timeout}")
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
timeout, lock.default_timeout
|
||||
)
|
||||
)
|
||||
if lock_type == "read":
|
||||
lock.acquire_read()
|
||||
else:
|
||||
@@ -1528,7 +1536,7 @@ def _ensure_locked(
|
||||
tty.debug(msg.format("Upgrading to", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "upgrade to"
|
||||
lock.upgrade_read_to_write(timeout)
|
||||
tty.debug(f"{pkg_id} is now {lock_type} locked")
|
||||
tty.debug("{0} is now {1} locked".format(pkg_id, lock_type))
|
||||
|
||||
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
|
||||
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc)))
|
||||
@@ -1553,14 +1561,14 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
associated dependents
|
||||
"""
|
||||
tty.debug(f"Initializing the build queue for {request.pkg.name}")
|
||||
tty.debug("Initializing the build queue for {0}".format(request.pkg.name))
|
||||
|
||||
# Ensure not attempting to perform an installation when user didn't
|
||||
# want to go that far for the requested package.
|
||||
try:
|
||||
_check_last_phase(request.pkg)
|
||||
except BadInstallPhase as err:
|
||||
tty.warn(f"Installation request refused: {str(err)}")
|
||||
tty.warn("Installation request refused: {0}".format(str(err)))
|
||||
return
|
||||
|
||||
# Skip out early if the spec is not being installed locally (i.e., if
|
||||
@@ -1711,9 +1719,9 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
pid = f"{self.pid}: " if tty.show_pid() else ""
|
||||
tty.debug(f"{pid}{str(e)}")
|
||||
tty.debug(f"Package stage directory: {pkg.stage.source_path}")
|
||||
pid = "{0}: ".format(self.pid) if tty.show_pid() else ""
|
||||
tty.debug("{0}{1}".format(pid, str(e)))
|
||||
tty.debug("Package stage directory: {0}".format(pkg.stage.source_path))
|
||||
|
||||
def _next_is_pri0(self) -> bool:
|
||||
"""
|
||||
@@ -1808,7 +1816,7 @@ def _remove_task(self, pkg_id: str) -> Optional[BuildTask]:
|
||||
pkg_id: identifier for the package to be removed
|
||||
"""
|
||||
if pkg_id in self.build_tasks:
|
||||
tty.debug(f"Removing build task for {pkg_id} from list")
|
||||
tty.debug("Removing build task for {0} from list".format(pkg_id))
|
||||
task = self.build_tasks.pop(pkg_id)
|
||||
task.status = STATUS_REMOVED
|
||||
return task
|
||||
@@ -1824,8 +1832,10 @@ def _requeue_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
|
||||
tty.debug(
|
||||
f"{install_msg(task.pkg_id, self.pid, install_status)} "
|
||||
"in progress by another process"
|
||||
"{0} {1}".format(
|
||||
install_msg(task.pkg_id, self.pid, install_status),
|
||||
"in progress by another process",
|
||||
)
|
||||
)
|
||||
|
||||
new_task = task.next_attempt(self.installed)
|
||||
@@ -1842,7 +1852,7 @@ def _setup_install_dir(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
path = spack.util.path.debug_padded_filter(pkg.spec.prefix)
|
||||
tty.debug(f"Creating the installation directory {path}")
|
||||
tty.debug("Creating the installation directory {0}".format(path))
|
||||
spack.store.STORE.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
@@ -1878,8 +1888,8 @@ def _update_failed(
|
||||
exc: optional exception if associated with the failure
|
||||
"""
|
||||
pkg_id = task.pkg_id
|
||||
err = "" if exc is None else f": {str(exc)}"
|
||||
tty.debug(f"Flagging {pkg_id} as failed{err}")
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
@@ -1888,14 +1898,14 @@ def _update_failed(
|
||||
|
||||
for dep_id in task.dependents:
|
||||
if dep_id in self.build_tasks:
|
||||
tty.warn(f"Skipping build of {dep_id} since {pkg_id} failed")
|
||||
tty.warn("Skipping build of {0} since {1} failed".format(dep_id, pkg_id))
|
||||
# Ensure the dependent's uninstalled dependents are
|
||||
# up-to-date and their build tasks removed.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._update_failed(dep_task, mark)
|
||||
self._remove_task(dep_id)
|
||||
else:
|
||||
tty.debug(f"No build task for {dep_id} to skip since {pkg_id} failed")
|
||||
tty.debug("No build task for {0} to skip since {1} failed".format(dep_id, pkg_id))
|
||||
|
||||
def _update_installed(self, task: BuildTask) -> None:
|
||||
"""
|
||||
@@ -1925,21 +1935,23 @@ def _flag_installed(
|
||||
# Already determined the package has been installed
|
||||
return
|
||||
|
||||
tty.debug(f"Flagging {pkg_id} as installed")
|
||||
tty.debug("Flagging {0} as installed".format(pkg_id))
|
||||
|
||||
self.installed.add(pkg_id)
|
||||
|
||||
# Update affected dependents
|
||||
dependent_ids = dependent_ids or get_dependent_ids(pkg.spec)
|
||||
for dep_id in set(dependent_ids):
|
||||
tty.debug(f"Removing {pkg_id} from {dep_id}'s uninstalled dependencies.")
|
||||
tty.debug("Removing {0} from {1}'s uninstalled dependencies.".format(pkg_id, dep_id))
|
||||
if dep_id in self.build_tasks:
|
||||
# Ensure the dependent's uninstalled dependencies are
|
||||
# up-to-date. This will require requeueing the task.
|
||||
dep_task = self.build_tasks[dep_id]
|
||||
self._push_task(dep_task.next_attempt(self.installed))
|
||||
else:
|
||||
tty.debug(f"{dep_id} has no build task to update for {pkg_id}'s success")
|
||||
tty.debug(
|
||||
"{0} has no build task to update for {1}'s success".format(dep_id, pkg_id)
|
||||
)
|
||||
|
||||
def _init_queue(self) -> None:
|
||||
"""Initialize the build queue from the list of build requests."""
|
||||
@@ -2020,8 +2032,8 @@ def install(self) -> None:
|
||||
|
||||
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
||||
install_status.next_pkg(pkg)
|
||||
install_status.set_term_title(f"Processing {pkg.name}")
|
||||
tty.debug(f"Processing {pkg_id}: task={task}")
|
||||
install_status.set_term_title("Processing {0}".format(pkg.name))
|
||||
tty.debug("Processing {0}: task={1}".format(pkg_id, task))
|
||||
# Ensure that the current spec has NO uninstalled dependencies,
|
||||
# which is assumed to be reflected directly in its priority.
|
||||
#
|
||||
@@ -2033,19 +2045,24 @@ def install(self) -> None:
|
||||
if task.priority != 0:
|
||||
term_status.clear()
|
||||
tty.error(
|
||||
f"Detected uninstalled dependencies for {pkg_id}: " f"{task.uninstalled_deps}"
|
||||
"Detected uninstalled dependencies for {0}: {1}".format(
|
||||
pkg_id, task.uninstalled_deps
|
||||
)
|
||||
)
|
||||
left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed]
|
||||
if not left:
|
||||
tty.warn(f"{pkg_id} does NOT actually have any uninstalled deps left")
|
||||
tty.warn(
|
||||
"{0} does NOT actually have any uninstalled deps" " left".format(pkg_id)
|
||||
)
|
||||
dep_str = "dependencies" if task.priority > 1 else "dependency"
|
||||
|
||||
# Hook to indicate task failure, but without an exception
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
|
||||
raise InstallError(
|
||||
f"Cannot proceed with {pkg_id}: {task.priority} uninstalled "
|
||||
f"{dep_str}: {','.join(task.uninstalled_deps)}",
|
||||
"Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
|
||||
pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
|
||||
),
|
||||
pkg=pkg,
|
||||
)
|
||||
|
||||
@@ -2062,7 +2079,7 @@ def install(self) -> None:
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn(f"{pkg_id} failed to install")
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
self._update_failed(task)
|
||||
|
||||
# Mark that the package failed
|
||||
@@ -2079,7 +2096,7 @@ def install(self) -> None:
|
||||
# another process is likely (un)installing the spec or has
|
||||
# determined the spec has already been installed (though the
|
||||
# other process may be hung).
|
||||
install_status.set_term_title(f"Acquiring lock for {pkg.name}")
|
||||
install_status.set_term_title("Acquiring lock for {0}".format(pkg.name))
|
||||
term_status.add(pkg_id)
|
||||
ltype, lock = self._ensure_locked("write", pkg)
|
||||
if lock is None:
|
||||
@@ -2102,7 +2119,7 @@ def install(self) -> None:
|
||||
task.request.overwrite_time = time.time()
|
||||
|
||||
# Determine state of installation artifacts and adjust accordingly.
|
||||
install_status.set_term_title(f"Preparing {pkg.name}")
|
||||
install_status.set_term_title("Preparing {0}".format(pkg.name))
|
||||
self._prepare_for_install(task)
|
||||
|
||||
# Flag an already installed package
|
||||
@@ -2148,7 +2165,7 @@ def install(self) -> None:
|
||||
|
||||
# Proceed with the installation since we have an exclusive write
|
||||
# lock on the package.
|
||||
install_status.set_term_title(f"Installing {pkg.name}")
|
||||
install_status.set_term_title("Installing {0}".format(pkg.name))
|
||||
try:
|
||||
action = self._install_action(task)
|
||||
|
||||
@@ -2169,9 +2186,8 @@ def install(self) -> None:
|
||||
except KeyboardInterrupt as exc:
|
||||
# The build has been terminated with a Ctrl-C so terminate
|
||||
# regardless of the number of remaining specs.
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
err = "Failed to install {0} due to {1}: {2}"
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__, str(exc)))
|
||||
spack.hooks.on_install_cancel(task.request.pkg.spec)
|
||||
raise
|
||||
|
||||
@@ -2180,10 +2196,9 @@ def install(self) -> None:
|
||||
raise
|
||||
|
||||
# Checking hash on downloaded binary failed.
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} from binary cache due "
|
||||
f"to {str(exc)}: Requeueing to install from source."
|
||||
)
|
||||
err = "Failed to install {0} from binary cache due to {1}:"
|
||||
err += " Requeueing to install from source."
|
||||
tty.error(err.format(pkg.name, str(exc)))
|
||||
# this overrides a full method, which is ugly.
|
||||
task.use_cache = False # type: ignore[misc]
|
||||
self._requeue_task(task, install_status)
|
||||
@@ -2201,12 +2216,13 @@ def install(self) -> None:
|
||||
# lower levels -- skip printing if already printed.
|
||||
# TODO: sort out this and SpackError.print_context()
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} due to "
|
||||
f"{exc.__class__.__name__}: {str(exc)}"
|
||||
"Failed to install {0} due to {1}: {2}".format(
|
||||
pkg.name, exc.__class__.__name__, str(exc)
|
||||
)
|
||||
)
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||
raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)), pkg=pkg)
|
||||
|
||||
# Terminate at this point if the single explicit spec has
|
||||
# failed to install.
|
||||
@@ -2245,17 +2261,17 @@ def install(self) -> None:
|
||||
|
||||
if failed_explicits or missing:
|
||||
for _, pkg_id, err in failed_explicits:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
tty.error("{0}: {1}".format(pkg_id, err))
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
tty.error("{0}: Package was not installed".format(pkg_id))
|
||||
|
||||
if len(failed_explicits) > 0:
|
||||
pkg = failed_explicits[0][0]
|
||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
||||
tty.debug(
|
||||
"Associating installation failure with first failed "
|
||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||
"explicit package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
)
|
||||
|
||||
elif len(missing) > 0:
|
||||
@@ -2263,7 +2279,7 @@ def install(self) -> None:
|
||||
ids = [pkg_id for _, pkg_id in missing]
|
||||
tty.debug(
|
||||
"Associating installation failure with first "
|
||||
f"missing package ({ids[0]}) from {', '.join(ids)}"
|
||||
"missing package ({0}) from {1}".format(ids[0], ", ".join(ids))
|
||||
)
|
||||
|
||||
raise InstallError(
|
||||
@@ -2341,7 +2357,7 @@ def run(self) -> bool:
|
||||
self.timer.stop("stage")
|
||||
|
||||
tty.debug(
|
||||
f"{self.pre} Building {self.pkg_id} [{self.pkg.build_system_class}]" # type: ignore[attr-defined] # noqa: E501
|
||||
"{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class) # type: ignore[attr-defined] # noqa: E501
|
||||
)
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
@@ -2386,7 +2402,7 @@ def _install_source(self) -> None:
|
||||
return
|
||||
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug(f"{self.pre} Copying source to {src_target}")
|
||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
@@ -2448,7 +2464,8 @@ def _real_install(self) -> None:
|
||||
with logger.force_echo():
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
tty.msg(f"{self.pre} Executing phase: '{phase_fn.name}'")
|
||||
msg = "{0} Executing phase: '{1}'"
|
||||
tty.msg(msg.format(self.pre, phase_fn.name))
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Catch any errors to report to logging
|
||||
@@ -2522,9 +2539,12 @@ def install(self):
|
||||
except fs.CouldNotRestoreDirectoryBackup as e:
|
||||
self.database.remove(self.task.pkg.spec)
|
||||
tty.error(
|
||||
f"Recovery of install dir of {self.task.pkg.name} failed due to "
|
||||
f"{e.outer_exception.__class__.__name__}: {str(e.outer_exception)}. "
|
||||
"The spec is now uninstalled."
|
||||
"Recovery of install dir of {0} failed due to "
|
||||
"{1}: {2}. The spec is now uninstalled.".format(
|
||||
self.task.pkg.name,
|
||||
e.outer_exception.__class__.__name__,
|
||||
str(e.outer_exception),
|
||||
)
|
||||
)
|
||||
|
||||
# Unwrap the actual installation exception.
|
||||
@@ -2547,7 +2567,7 @@ class BadInstallPhase(InstallError):
|
||||
"""Raised for an install phase option is not allowed for a package."""
|
||||
|
||||
def __init__(self, pkg_name, phase):
|
||||
super().__init__(f"'{phase}' is not a valid phase for package {pkg_name}")
|
||||
super().__init__("'{0}' is not a valid phase for package {1}".format(phase, pkg_name))
|
||||
|
||||
|
||||
class ExternalPackageError(InstallError):
|
||||
|
||||
@@ -56,7 +56,6 @@
|
||||
import spack.util.file_permissions as fp
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.context import Context
|
||||
|
||||
|
||||
#: config section for this file
|
||||
@@ -587,7 +586,7 @@ def use_name(self):
|
||||
if not projection:
|
||||
projection = self.conf.default_projections["all"]
|
||||
|
||||
name = self.spec.format_path(projection)
|
||||
name = self.spec.format(projection)
|
||||
# Not everybody is working on linux...
|
||||
parts = name.split("/")
|
||||
name = os.path.join(*parts)
|
||||
@@ -718,16 +717,10 @@ def environment_modifications(self):
|
||||
)
|
||||
|
||||
# Let the extendee/dependency modify their extensions/dependencies
|
||||
|
||||
# The only thing we care about is `setup_dependent_run_environment`, but
|
||||
# for that to work, globals have to be set on the package modules, and the
|
||||
# whole chain of setup_dependent_package has to be followed from leaf to spec.
|
||||
# So: just run it here, but don't collect env mods.
|
||||
spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals()
|
||||
|
||||
# Then run setup_dependent_run_environment before setup_run_environment.
|
||||
for dep in spec.dependencies(deptype=("link", "run")):
|
||||
dep.package.setup_dependent_run_environment(env, spec)
|
||||
# before asking for package-specific modifications
|
||||
env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run"))
|
||||
# Package specific modifications
|
||||
spack.build_environment.set_module_variables_for_package(spec.package)
|
||||
spec.package.setup_run_environment(env)
|
||||
|
||||
# Modifications required from modules.yaml
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import posixpath
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
|
||||
import spack.compilers
|
||||
@@ -284,10 +283,8 @@ def token_to_path(self, name, value):
|
||||
Returns:
|
||||
str: part of the path associated with the service
|
||||
"""
|
||||
|
||||
# General format for the path part
|
||||
def path_part_fmt(token):
|
||||
return fs.polite_path([f"{token.name}", f"{token.version}"])
|
||||
path_part_fmt = os.path.join("{token.name}", "{token.version}")
|
||||
|
||||
# If we are dealing with a core compiler, return 'Core'
|
||||
core_compilers = self.conf.core_compilers
|
||||
@@ -299,13 +296,13 @@ def path_part_fmt(token):
|
||||
# CompilerSpec does not have a hash, as we are not allowed to
|
||||
# use different flavors of the same compiler
|
||||
if name == "compiler":
|
||||
return path_part_fmt(token=value)
|
||||
return path_part_fmt.format(token=value)
|
||||
|
||||
# In case the hierarchy token refers to a virtual provider
|
||||
# we need to append a hash to the version to distinguish
|
||||
# among flavors of the same library (e.g. openblas~openmp vs.
|
||||
# openblas+openmp)
|
||||
path = path_part_fmt(token=value)
|
||||
path = path_part_fmt.format(token=value)
|
||||
path = "-".join([path, value.dag_hash(length=7)])
|
||||
return path
|
||||
|
||||
|
||||
@@ -991,14 +991,13 @@ def find_valid_url_for_version(self, version):
|
||||
return None
|
||||
|
||||
def _make_resource_stage(self, root_stage, resource):
|
||||
pretty_resource_name = fsys.polite_filename(f"{resource.name}-{self.version}")
|
||||
return ResourceStage(
|
||||
resource.fetcher,
|
||||
root=root_stage,
|
||||
resource=resource,
|
||||
name=self._resource_stage(resource),
|
||||
mirror_paths=spack.mirror.mirror_archive_paths(
|
||||
resource.fetcher, os.path.join(self.name, pretty_resource_name)
|
||||
resource.fetcher, os.path.join(self.name, f"{resource.name}-{self.version}")
|
||||
),
|
||||
path=self.path,
|
||||
)
|
||||
@@ -1009,10 +1008,8 @@ def _download_search(self):
|
||||
|
||||
def _make_root_stage(self, fetcher):
|
||||
# Construct a mirror path (TODO: get this out of package.py)
|
||||
format_string = "{name}-{version}"
|
||||
pretty_name = self.spec.format_path(format_string)
|
||||
mirror_paths = spack.mirror.mirror_archive_paths(
|
||||
fetcher, os.path.join(self.name, pretty_name), self.spec
|
||||
fetcher, os.path.join(self.name, f"{self.name}-{self.version}"), self.spec
|
||||
)
|
||||
# Construct a path where the stage should build..
|
||||
s = self.spec
|
||||
@@ -1157,7 +1154,7 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
||||
"expected v0.21. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
@@ -1808,7 +1805,14 @@ def do_install(self, **kwargs):
|
||||
verbose (bool): Display verbose build output (by default,
|
||||
suppresses it)
|
||||
"""
|
||||
PackageInstaller([(self, kwargs)]).install()
|
||||
# Non-transitive dev specs need to keep the dev stage and be built from
|
||||
# source every time. Transitive ones just need to be built from source.
|
||||
dev_path_var = self.spec.variants.get("dev_path", None)
|
||||
if dev_path_var:
|
||||
kwargs["keep_stage"] = True
|
||||
|
||||
builder = PackageInstaller([(self, kwargs)])
|
||||
builder.install()
|
||||
|
||||
# TODO (post-34236): Update tests and all packages that use this as a
|
||||
# TODO (post-34236): package method to the routine made available to
|
||||
@@ -1829,7 +1833,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"expected in v0.21. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
|
||||
@@ -76,9 +76,7 @@
|
||||
IDENTIFIER = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9\-]*)"
|
||||
DOTTED_IDENTIFIER = rf"(?:{IDENTIFIER}(?:\.{IDENTIFIER})+)"
|
||||
GIT_HASH = r"(?:[A-Fa-f0-9]{40})"
|
||||
#: Git refs include branch names, and can contain "." and "/"
|
||||
GIT_REF = r"(?:[a-zA-Z_0-9][a-zA-Z_0-9./\-]*)"
|
||||
GIT_VERSION_PATTERN = rf"(?:(?:git\.(?:{GIT_REF}))|(?:{GIT_HASH}))"
|
||||
GIT_VERSION = rf"(?:(?:git\.(?:{DOTTED_IDENTIFIER}|{IDENTIFIER}))|(?:{GIT_HASH}))"
|
||||
|
||||
NAME = r"[a-zA-Z_0-9][a-zA-Z_0-9\-.]*"
|
||||
|
||||
@@ -129,8 +127,7 @@ class TokenType(TokenBase):
|
||||
# Dependency
|
||||
DEPENDENCY = r"(?:\^)"
|
||||
# Version
|
||||
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION_PATTERN})=(?:{VERSION}))"
|
||||
GIT_VERSION = rf"@(?:{GIT_VERSION_PATTERN})"
|
||||
VERSION_HASH_PAIR = rf"(?:@(?:{GIT_VERSION})=(?:{VERSION}))"
|
||||
VERSION = rf"(?:@\s*(?:{VERSION_LIST}))"
|
||||
# Variants
|
||||
PROPAGATED_BOOL_VARIANT = rf"(?:(?:\+\+|~~|--)\s*{NAME})"
|
||||
@@ -361,10 +358,8 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
compiler_name.strip(), compiler_version
|
||||
)
|
||||
self.has_compiler = True
|
||||
elif (
|
||||
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
|
||||
or self.ctx.accept(TokenType.GIT_VERSION)
|
||||
or self.ctx.accept(TokenType.VERSION)
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -37,12 +36,10 @@ def apply_patch(stage, patch_path, level=1, working_dir="."):
|
||||
"""
|
||||
git_utils_path = os.environ.get("PATH", "")
|
||||
if sys.platform == "win32":
|
||||
git = which_string("git")
|
||||
if git:
|
||||
git = pathlib.Path(git)
|
||||
git_root = git.parent.parent
|
||||
git_root = git_root / "usr" / "bin"
|
||||
git_utils_path = os.pathsep.join([str(git_root), git_utils_path])
|
||||
git = which_string("git", required=True)
|
||||
git_root = git.split("\\")[:-2]
|
||||
git_root.extend(["usr", "bin"])
|
||||
git_utils_path = os.sep.join(git_root)
|
||||
|
||||
# TODO: Decouple Spack's patch support on Windows from Git
|
||||
# for Windows, and instead have Spack directly fetch, install, and
|
||||
|
||||
@@ -141,7 +141,6 @@
|
||||
}
|
||||
)
|
||||
|
||||
# TODO: Remove in Spack 0.23
|
||||
ci_properties = {
|
||||
"anyOf": [
|
||||
{
|
||||
@@ -167,7 +166,6 @@
|
||||
properties = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
# TODO: Replace with core-shared-properties in Spack 0.23
|
||||
ci_properties,
|
||||
# Allow legacy format under `ci` for `config update ci`
|
||||
spack.schema.gitlab_ci.gitlab_ci_properties,
|
||||
|
||||
@@ -14,61 +14,63 @@
|
||||
properties = {
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["paths", "spec", "modules", "operating_system"],
|
||||
"properties": {
|
||||
"paths": {
|
||||
"type": "object",
|
||||
"required": ["cc", "cxx", "f77", "fc"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"f77": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"fc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"items": [
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["paths", "spec", "modules", "operating_system"],
|
||||
"properties": {
|
||||
"paths": {
|
||||
"type": "object",
|
||||
"required": ["cc", "cxx", "f77", "fc"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"f77": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"fc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
},
|
||||
},
|
||||
"flags": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
},
|
||||
},
|
||||
"spec": {"type": "string"},
|
||||
"operating_system": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"modules": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||
},
|
||||
"implicit_rpaths": {
|
||||
"anyOf": [
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
{"type": "boolean"},
|
||||
]
|
||||
},
|
||||
"environment": spack.schema.environment.definition,
|
||||
"extra_rpaths": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"flags": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
},
|
||||
},
|
||||
"spec": {"type": "string"},
|
||||
"operating_system": {"type": "string"},
|
||||
"target": {"type": "string"},
|
||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||
"modules": {
|
||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
||||
},
|
||||
"implicit_rpaths": {
|
||||
"anyOf": [
|
||||
{"type": "array", "items": {"type": "string"}},
|
||||
{"type": "boolean"},
|
||||
]
|
||||
},
|
||||
"environment": spack.schema.environment.definition,
|
||||
"extra_rpaths": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -68,6 +68,12 @@
|
||||
"labels": {"type": "object"},
|
||||
# Use a custom template to render the recipe
|
||||
"template": {"type": "string", "default": None},
|
||||
# Add a custom extra section at the bottom of a stage
|
||||
"extra_instructions": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {"build": {"type": "string"}, "final": {"type": "string"}},
|
||||
},
|
||||
# Reserved for properties that are specific to each format
|
||||
"singularity": {
|
||||
"type": "object",
|
||||
@@ -83,6 +89,15 @@
|
||||
"docker": {"type": "object", "additionalProperties": False, "default": {}},
|
||||
"depfile": {"type": "boolean", "default": False},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["extra_instructions"],
|
||||
"message": (
|
||||
"container:extra_instructions has been deprecated and will be removed "
|
||||
"in Spack v0.21. Set container:template appropriately to use custom Jinja2 "
|
||||
"templates instead."
|
||||
),
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
|
||||
properties = {"container": container_schema}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import Dict, List, NamedTuple, Optional, Sequence, Tuple, Union
|
||||
from typing import List, NamedTuple, Optional, Sequence, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -971,70 +971,6 @@ def _model_has_cycles(self, models):
|
||||
return cycle_result.unsatisfiable
|
||||
|
||||
|
||||
class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||
"""Mapping containing concrete specs keyed by DAG hash.
|
||||
|
||||
The mapping is ensured to be consistent, i.e. if a spec in the mapping has a dependency with
|
||||
hash X, it is ensured to be the same object in memory as the spec keyed by X.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.data: Dict[str, spack.spec.Spec] = {}
|
||||
|
||||
def __getitem__(self, dag_hash: str) -> spack.spec.Spec:
|
||||
return self.data[dag_hash]
|
||||
|
||||
def add(self, spec: spack.spec.Spec) -> bool:
|
||||
"""Adds a new concrete spec to the mapping. Returns True if the spec was just added,
|
||||
False if the spec was already in the mapping.
|
||||
|
||||
Args:
|
||||
spec: spec to be added
|
||||
|
||||
Raises:
|
||||
ValueError: if the spec is not concrete
|
||||
"""
|
||||
if not spec.concrete:
|
||||
msg = (
|
||||
f"trying to store the non-concrete spec '{spec}' in a container "
|
||||
f"that only accepts concrete"
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in self.data:
|
||||
return False
|
||||
|
||||
# Here we need to iterate on the input and rewire the copy.
|
||||
self.data[spec.dag_hash()] = spec.copy(deps=False)
|
||||
nodes_to_reconstruct = [spec]
|
||||
|
||||
while nodes_to_reconstruct:
|
||||
input_parent = nodes_to_reconstruct.pop()
|
||||
container_parent = self.data[input_parent.dag_hash()]
|
||||
|
||||
for edge in input_parent.edges_to_dependencies():
|
||||
input_child = edge.spec
|
||||
container_child = self.data.get(input_child.dag_hash())
|
||||
# Copy children that don't exist yet
|
||||
if container_child is None:
|
||||
container_child = input_child.copy(deps=False)
|
||||
self.data[input_child.dag_hash()] = container_child
|
||||
nodes_to_reconstruct.append(input_child)
|
||||
|
||||
# Rewire edges
|
||||
container_parent.add_dependency_edge(
|
||||
dependency_spec=container_child, depflag=edge.depflag, virtuals=edge.virtuals
|
||||
)
|
||||
return True
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.data)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.data)
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
@@ -1058,7 +994,9 @@ def __init__(self, tests=False):
|
||||
# (ID, CompilerSpec) -> dictionary of attributes
|
||||
self.compiler_info = collections.defaultdict(dict)
|
||||
|
||||
self.reusable_and_possible = ConcreteSpecsByHash()
|
||||
# hashes we've already added facts for
|
||||
self.seen_hashes = set()
|
||||
self.reusable_and_possible = {}
|
||||
|
||||
# id for dummy variables
|
||||
self._condition_id_counter = itertools.count()
|
||||
@@ -1945,11 +1883,7 @@ class Body:
|
||||
continue
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != dt.BUILD:
|
||||
clauses.append(
|
||||
fn.attr(
|
||||
"depends_on", spec.name, dep.name, dt.flag_to_string(dtype)
|
||||
)
|
||||
)
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name)
|
||||
@@ -2380,29 +2314,25 @@ def define_variant_values(self):
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value)))
|
||||
|
||||
def register_concrete_spec(self, spec, possible):
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
# tell the solver about any installed packages that could
|
||||
# be dependencies (don't tell it about the others)
|
||||
if spec.name not in possible:
|
||||
return
|
||||
h = spec.dag_hash()
|
||||
if spec.name in possible and h not in self.seen_hashes:
|
||||
self.reusable_and_possible[h] = spec
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.PATH.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.PATH.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError) as e:
|
||||
tty.debug(f"[REUSE] Issues when trying to reuse {spec.short_spec}: {str(e)}")
|
||||
return
|
||||
|
||||
self.reusable_and_possible.add(spec)
|
||||
|
||||
def concrete_specs(self):
|
||||
"""Emit facts for reusable specs"""
|
||||
for h, spec in self.reusable_and_possible.items():
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
|
||||
# this describes what constraints it imposes on the solve
|
||||
self.impose(h, spec, body=True)
|
||||
self.gen.newline()
|
||||
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
# - Add OS to possible OS's
|
||||
@@ -2413,12 +2343,15 @@ def concrete_specs(self):
|
||||
)
|
||||
self.possible_oses.add(dep.os)
|
||||
|
||||
# add the hash to the one seen so far
|
||||
self.seen_hashes.add(h)
|
||||
|
||||
def define_concrete_input_specs(self, specs, possible):
|
||||
# any concrete specs in the input spec list
|
||||
for input_spec in specs:
|
||||
for spec in input_spec.traverse():
|
||||
if spec.concrete:
|
||||
self.register_concrete_spec(spec, possible)
|
||||
self._facts_from_concrete_spec(spec, possible)
|
||||
|
||||
def setup(
|
||||
self,
|
||||
@@ -2485,13 +2418,14 @@ def setup(
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self.register_concrete_spec(reusable_spec, self.pkgs)
|
||||
self.concrete_specs()
|
||||
self._facts_from_concrete_spec(reusable_spec, self.pkgs)
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
@@ -2682,6 +2616,7 @@ def __init__(self, specs, hash_lookup=None):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._hash_specs = []
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
self._flag_compiler_defaults = set()
|
||||
|
||||
@@ -2692,6 +2627,7 @@ def __init__(self, specs, hash_lookup=None):
|
||||
def hash(self, node, h):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = self._hash_lookup[h]
|
||||
self._hash_specs.append(node)
|
||||
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
@@ -2929,10 +2865,12 @@ def build_specs(self, function_tuples):
|
||||
# fix flags after all specs are constructed
|
||||
self.reorder_flags()
|
||||
|
||||
# cycle detection
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
|
||||
@@ -923,8 +923,7 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if no platform is set, fall back to the default
|
||||
error(100, "platform '{0}' is not allowed on the current host", Platform)
|
||||
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
|
||||
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
|
||||
|
||||
attr("node_platform", PackageNode, Platform)
|
||||
:- attr("node", PackageNode),
|
||||
@@ -1535,17 +1534,6 @@ opt_criterion(5, "non-preferred targets").
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(1, "edge wiring").
|
||||
#minimize{ 0@201: #true }.
|
||||
#minimize{ 0@1: #true }.
|
||||
#minimize{
|
||||
Weight@1,ParentNode,PackageNode
|
||||
: version_weight(PackageNode, Weight),
|
||||
not attr("root", PackageNode),
|
||||
depends_on(ParentNode, PackageNode)
|
||||
}.
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
%-----------
|
||||
|
||||
@@ -54,7 +54,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import socket
|
||||
@@ -3673,7 +3672,7 @@ def _autospec(self, spec_like):
|
||||
return spec_like
|
||||
return Spec(spec_like)
|
||||
|
||||
def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if there exists at least one concrete spec that matches both
|
||||
self and other, otherwise False.
|
||||
|
||||
@@ -3796,7 +3795,7 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
return True
|
||||
|
||||
def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
@@ -4454,42 +4453,6 @@ def cformat(self, *args, **kwargs):
|
||||
kwargs.setdefault("color", None)
|
||||
return self.format(*args, **kwargs)
|
||||
|
||||
def format_path(
|
||||
# self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None
|
||||
self,
|
||||
format_string: str,
|
||||
_path_ctor: Optional[Callable[[Any], pathlib.PurePath]] = None,
|
||||
) -> str:
|
||||
"""Given a `format_string` that is intended as a path, generate a string
|
||||
like from `Spec.format`, but eliminate extra path separators introduced by
|
||||
formatting of Spec properties.
|
||||
|
||||
Path separators explicitly added to the string are preserved, so for example
|
||||
"{name}/{version}" would generate a directory based on the Spec's name, and
|
||||
a subdirectory based on its version; this function guarantees though that
|
||||
the resulting string would only have two directories (i.e. that if under
|
||||
normal circumstances that `str(Spec.version)` would contain a path
|
||||
separator, it would not in this case).
|
||||
"""
|
||||
format_component_with_sep = r"\{[^}]*[/\\][^}]*}"
|
||||
if re.search(format_component_with_sep, format_string):
|
||||
raise SpecFormatPathError(
|
||||
f"Invalid path format string: cannot contain {{/...}}\n\t{format_string}"
|
||||
)
|
||||
|
||||
path_ctor = _path_ctor or pathlib.PurePath
|
||||
format_string_as_path = path_ctor(format_string)
|
||||
if format_string_as_path.is_absolute():
|
||||
output_path_components = [format_string_as_path.parts[0]]
|
||||
input_path_components = list(format_string_as_path.parts[1:])
|
||||
else:
|
||||
output_path_components = []
|
||||
input_path_components = list(format_string_as_path.parts)
|
||||
output_path_components += [
|
||||
fs.polite_filename(self.format(x)) for x in input_path_components
|
||||
]
|
||||
return str(path_ctor(*output_path_components))
|
||||
|
||||
def __str__(self):
|
||||
sorted_nodes = [self] + sorted(
|
||||
self.traverse(root=False), key=lambda x: x.name or x.abstract_hash
|
||||
@@ -5416,10 +5379,6 @@ class SpecFormatStringError(spack.error.SpecError):
|
||||
"""Called for errors in Spec format strings."""
|
||||
|
||||
|
||||
class SpecFormatPathError(spack.error.SpecError):
|
||||
"""Called for errors in Spec path-format strings."""
|
||||
|
||||
|
||||
class SpecFormatSigilError(SpecFormatStringError):
|
||||
"""Called for mismatched sigils and attributes in format strings"""
|
||||
|
||||
|
||||
@@ -7,13 +7,12 @@
|
||||
import getpass
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Callable, Dict, Iterable, Optional, Set
|
||||
from typing import Callable, Dict, Iterable, Optional
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.lang
|
||||
@@ -28,8 +27,6 @@
|
||||
partition_path,
|
||||
remove_linked_tree,
|
||||
)
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
@@ -38,14 +35,11 @@
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.lock
|
||||
import spack.util.path as sup
|
||||
import spack.util.pattern as pattern
|
||||
import spack.util.url as url_util
|
||||
from spack.util.crypto import bit_length, prefix_bits
|
||||
from spack.util.editor import editor, executable
|
||||
from spack.version import StandardVersion, VersionList
|
||||
|
||||
# The well-known stage source subdirectory name.
|
||||
_source_path_subdir = "spack-src"
|
||||
@@ -58,7 +52,7 @@ def compute_stage_name(spec):
|
||||
"""Determine stage name given a spec"""
|
||||
default_stage_structure = stage_prefix + "{name}-{version}-{hash}"
|
||||
stage_name_structure = spack.config.get("config:stage_name", default=default_stage_structure)
|
||||
return spec.format_path(format_string=stage_name_structure)
|
||||
return spec.format(format_string=stage_name_structure)
|
||||
|
||||
|
||||
def create_stage_root(path: str) -> None:
|
||||
@@ -866,200 +860,11 @@ def purge():
|
||||
os.remove(stage_path)
|
||||
|
||||
|
||||
def interactive_version_filter(
|
||||
url_dict: Dict[StandardVersion, str],
|
||||
known_versions: Iterable[StandardVersion] = (),
|
||||
*,
|
||||
url_changes: Set[StandardVersion] = set(),
|
||||
input: Callable[..., str] = input,
|
||||
) -> Optional[Dict[StandardVersion, str]]:
|
||||
"""Interactively filter the list of spidered versions.
|
||||
|
||||
Args:
|
||||
url_dict: Dictionary of versions to URLs
|
||||
known_versions: Versions that can be skipped because they are already known
|
||||
|
||||
Returns:
|
||||
Filtered dictionary of versions to URLs or None if the user wants to quit
|
||||
"""
|
||||
# Find length of longest string in the list for padding
|
||||
sorted_and_filtered = sorted(url_dict.keys(), reverse=True)
|
||||
version_filter = VersionList([":"])
|
||||
max_len = max(len(str(v)) for v in sorted_and_filtered)
|
||||
orig_url_dict = url_dict # only copy when using editor to modify
|
||||
print_header = True
|
||||
VERSION_COLOR = spack.spec.VERSION_COLOR
|
||||
while True:
|
||||
if print_header:
|
||||
has_filter = version_filter != VersionList([":"])
|
||||
header = []
|
||||
if not sorted_and_filtered:
|
||||
header.append("No versions selected")
|
||||
elif len(sorted_and_filtered) == len(orig_url_dict):
|
||||
header.append(
|
||||
f"Selected {llnl.string.plural(len(sorted_and_filtered), 'version')}"
|
||||
)
|
||||
else:
|
||||
header.append(
|
||||
f"Selected {len(sorted_and_filtered)} of {len(orig_url_dict)} versions"
|
||||
)
|
||||
if sorted_and_filtered and known_versions:
|
||||
num_new = sum(1 for v in sorted_and_filtered if v not in known_versions)
|
||||
header.append(f"{llnl.string.plural(num_new, 'new version')}")
|
||||
if has_filter:
|
||||
header.append(colorize(f"Filtered by {VERSION_COLOR}{version_filter}@."))
|
||||
|
||||
version_with_url = [
|
||||
colorize(
|
||||
f"{VERSION_COLOR}{str(v):{max_len}}@. {url_dict[v]}"
|
||||
f"{' @K{# NOTE: change of URL}' if v in url_changes else ''}"
|
||||
)
|
||||
for v in sorted_and_filtered
|
||||
]
|
||||
tty.msg(". ".join(header), *llnl.util.lang.elide_list(version_with_url))
|
||||
print()
|
||||
|
||||
print_header = True
|
||||
|
||||
tty.info(colorize("Enter @*{number} of versions to take, or use a @*{command}:"))
|
||||
commands = (
|
||||
"@*b{[c]}hecksum",
|
||||
"@*b{[e]}dit",
|
||||
"@*b{[f]}ilter",
|
||||
"@*b{[a]}sk each",
|
||||
"@*b{[n]}ew only",
|
||||
"@*b{[r]}estart",
|
||||
"@*b{[q]}uit",
|
||||
)
|
||||
colify(list(map(colorize, commands)), indent=4)
|
||||
|
||||
try:
|
||||
command = input(colorize("@*g{action>} ")).strip().lower()
|
||||
except EOFError:
|
||||
print()
|
||||
command = "q"
|
||||
|
||||
if command == "c":
|
||||
break
|
||||
elif command == "e":
|
||||
# Create a temporary file in the stage dir with lines of the form
|
||||
# <version> <url>
|
||||
# which the user can modify. Once the editor is closed, the file is
|
||||
# read back in and the versions to url dict is updated.
|
||||
|
||||
# Create a temporary file by hashing its contents.
|
||||
buffer = io.StringIO()
|
||||
buffer.write("# Edit this file to change the versions and urls to fetch\n")
|
||||
for v in sorted_and_filtered:
|
||||
buffer.write(f"{str(v):{max_len}} {url_dict[v]}\n")
|
||||
data = buffer.getvalue().encode("utf-8")
|
||||
|
||||
short_hash = hashlib.sha1(data).hexdigest()[:7]
|
||||
filename = f"{spack.stage.stage_prefix}versions-{short_hash}.txt"
|
||||
filepath = os.path.join(spack.stage.get_stage_root(), filename)
|
||||
|
||||
# Write contents
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
# Open editor
|
||||
editor(filepath, exec_fn=executable)
|
||||
|
||||
# Read back in
|
||||
with open(filepath, "r") as f:
|
||||
orig_url_dict, url_dict = url_dict, {}
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
# Skip empty lines and comments
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
try:
|
||||
version, url = line.split(None, 1)
|
||||
except ValueError:
|
||||
tty.warn(f"Couldn't parse: {line}")
|
||||
continue
|
||||
try:
|
||||
url_dict[StandardVersion.from_string(version)] = url
|
||||
except ValueError:
|
||||
tty.warn(f"Invalid version: {version}")
|
||||
continue
|
||||
sorted_and_filtered = sorted(url_dict.keys(), reverse=True)
|
||||
|
||||
os.unlink(filepath)
|
||||
elif command == "f":
|
||||
tty.msg(
|
||||
colorize(
|
||||
f"Examples filters: {VERSION_COLOR}1.2@. "
|
||||
f"or {VERSION_COLOR}1.1:1.3@. "
|
||||
f"or {VERSION_COLOR}=1.2, 1.2.2:@."
|
||||
)
|
||||
)
|
||||
try:
|
||||
# Allow a leading @ version specifier
|
||||
filter_spec = input(colorize("@*g{filter>} ")).strip().lstrip("@")
|
||||
except EOFError:
|
||||
print()
|
||||
continue
|
||||
try:
|
||||
version_filter.intersect(VersionList([filter_spec]))
|
||||
except ValueError:
|
||||
tty.warn(f"Invalid version specifier: {filter_spec}")
|
||||
continue
|
||||
# Apply filter
|
||||
sorted_and_filtered = [v for v in sorted_and_filtered if v.satisfies(version_filter)]
|
||||
elif command == "a":
|
||||
i = 0
|
||||
while i < len(sorted_and_filtered):
|
||||
v = sorted_and_filtered[i]
|
||||
try:
|
||||
answer = input(f" {str(v):{max_len}} {url_dict[v]} [Y/n]? ").strip().lower()
|
||||
except EOFError:
|
||||
# If ^D, don't fully exit, but go back to the command prompt, now with possibly
|
||||
# fewer versions
|
||||
print()
|
||||
break
|
||||
if answer in ("n", "no"):
|
||||
del sorted_and_filtered[i]
|
||||
elif answer in ("y", "yes", ""):
|
||||
i += 1
|
||||
else:
|
||||
# Went over each version, so go to checksumming
|
||||
break
|
||||
elif command == "n":
|
||||
sorted_and_filtered = [v for v in sorted_and_filtered if v not in known_versions]
|
||||
elif command == "r":
|
||||
url_dict = orig_url_dict
|
||||
sorted_and_filtered = sorted(url_dict.keys(), reverse=True)
|
||||
version_filter = VersionList([":"])
|
||||
elif command == "q":
|
||||
try:
|
||||
if input("Really quit [y/N]? ").strip().lower() in ("y", "yes"):
|
||||
return None
|
||||
except EOFError:
|
||||
print()
|
||||
return None
|
||||
else:
|
||||
# Last restort: filter the top N versions
|
||||
try:
|
||||
n = int(command)
|
||||
invalid_command = n < 1
|
||||
except ValueError:
|
||||
invalid_command = True
|
||||
|
||||
if invalid_command:
|
||||
tty.warn(f"Ignoring invalid command: {command}")
|
||||
print_header = False
|
||||
continue
|
||||
|
||||
sorted_and_filtered = sorted_and_filtered[:n]
|
||||
|
||||
return {v: url_dict[v] for v in sorted_and_filtered}
|
||||
|
||||
|
||||
def get_checksums_for_versions(
|
||||
url_by_version: Dict[str, str],
|
||||
package_name: str,
|
||||
*,
|
||||
batch: bool = False,
|
||||
first_stage_function: Optional[Callable[[Stage, str], None]] = None,
|
||||
keep_stage: bool = False,
|
||||
concurrency: Optional[int] = None,
|
||||
@@ -1085,7 +890,32 @@ def get_checksums_for_versions(
|
||||
Returns:
|
||||
A dictionary mapping each version to the corresponding checksum
|
||||
"""
|
||||
versions = sorted(url_by_version.keys(), reverse=True)
|
||||
sorted_versions = sorted(url_by_version.keys(), reverse=True)
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
max_len = max(len(str(v)) for v in sorted_versions)
|
||||
num_ver = len(sorted_versions)
|
||||
|
||||
tty.msg(
|
||||
f"Found {llnl.string.plural(num_ver, 'version')} of {package_name}:",
|
||||
"",
|
||||
*llnl.util.lang.elide_list(
|
||||
["{0:{1}} {2}".format(str(v), max_len, url_by_version[v]) for v in sorted_versions]
|
||||
),
|
||||
)
|
||||
print()
|
||||
|
||||
if batch:
|
||||
archives_to_fetch = len(sorted_versions)
|
||||
else:
|
||||
archives_to_fetch = tty.get_number(
|
||||
"How many would you like to checksum?", default=1, abort="q"
|
||||
)
|
||||
|
||||
if not archives_to_fetch:
|
||||
tty.die("Aborted.")
|
||||
|
||||
versions = sorted_versions[:archives_to_fetch]
|
||||
search_arguments = [(url_by_version[v], v) for v in versions]
|
||||
|
||||
version_hashes, errors = {}, []
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
import spack.concretize
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.target
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -64,7 +64,7 @@ def test_user_input_combination(config, target_str, os_str):
|
||||
the operating system match.
|
||||
"""
|
||||
spec_str = "libelf os={} target={}".format(os_str, target_str)
|
||||
spec = Spec(spec_str)
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str))
|
||||
assert spec.architecture.target == TEST_PLATFORM.target(target_str)
|
||||
|
||||
@@ -114,7 +114,7 @@ def test_target_container_semantic(cpu_flag, target_name):
|
||||
],
|
||||
)
|
||||
def test_arch_spec_container_semantic(item, architecture_str):
|
||||
architecture = ArchSpec(architecture_str)
|
||||
architecture = spack.spec.ArchSpec(architecture_str)
|
||||
assert item in architecture
|
||||
|
||||
|
||||
@@ -141,24 +141,24 @@ def test_optimization_flags(compiler_spec, target_name, expected_flags, config):
|
||||
@pytest.mark.parametrize(
|
||||
"compiler,real_version,target_str,expected_flags",
|
||||
[
|
||||
(CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
(spack.spec.CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
# Check that custom string versions are accepted
|
||||
(
|
||||
CompilerSpec("gcc@=10foo"),
|
||||
spack.spec.CompilerSpec("gcc@=10foo"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
# Check that we run version detection (4.4.0 doesn't support icelake)
|
||||
(
|
||||
CompilerSpec("gcc@=4.4.0-special"),
|
||||
spack.spec.CompilerSpec("gcc@=4.4.0-special"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
# Check that the special case for Apple's clang is treated correctly
|
||||
# i.e. it won't try to detect the version again
|
||||
(CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"),
|
||||
(spack.spec.CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"),
|
||||
],
|
||||
)
|
||||
def test_optimization_flags_with_custom_versions(
|
||||
@@ -180,8 +180,8 @@ def test_optimization_flags_with_custom_versions(
|
||||
],
|
||||
)
|
||||
def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple):
|
||||
architecture = ArchSpec(architecture_tuple)
|
||||
constraint = ArchSpec(constraint_tuple)
|
||||
architecture = spack.spec.ArchSpec(architecture_tuple)
|
||||
constraint = spack.spec.ArchSpec(constraint_tuple)
|
||||
assert not architecture.satisfies(constraint)
|
||||
|
||||
|
||||
@@ -204,10 +204,16 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
||||
|
||||
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
||||
root_target_range,
|
||||
dep_target_range,
|
||||
)
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec.concretize()
|
||||
assert spec.target == spec["b"].target == result
|
||||
|
||||
assert str(spec).count("arch=test-debian6-%s" % result) == 2
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -21,10 +21,6 @@
|
||||
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
(["invalid-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
(["invalid-selfhosted-gitlab-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a stand-alone 'test*' method in build-time callbacks
|
||||
(["fail-test-audit"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has no issues
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix
|
||||
from spack.context import Context
|
||||
from spack.build_environment import _static_to_shared_library, dso_suffix
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -439,10 +438,10 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
||||
# b (parallel =True)
|
||||
s = default_mock_concretization("a foobar=bar")
|
||||
|
||||
spack.build_environment.set_package_py_globals(s.package)
|
||||
spack.build_environment.set_module_variables_for_package(s.package)
|
||||
assert s["a"].package.module.make_jobs == 1
|
||||
|
||||
spack.build_environment.set_package_py_globals(s["b"].package)
|
||||
spack.build_environment.set_module_variables_for_package(s["b"].package)
|
||||
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||
parallel=s["b"].package.parallel
|
||||
)
|
||||
@@ -576,69 +575,3 @@ def test_setting_attributes(self, default_mock_concretization):
|
||||
if current_module == spack.package_base:
|
||||
break
|
||||
assert current_module.SOME_ATTRIBUTE == 1
|
||||
|
||||
|
||||
def test_effective_deptype_build_environment(default_mock_concretization):
|
||||
s = default_mock_concretization("dttop")
|
||||
|
||||
# [ ] dttop@1.0 #
|
||||
# [b ] ^dtbuild1@1.0 # <- direct build dep
|
||||
# [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped
|
||||
# [bl ] ^dtlink2@1.0 # <- linkable, and runtime dep of build dep
|
||||
# [ r ] ^dtrun2@1.0 # <- non-linkable, exectuable runtime dep of build dep
|
||||
# [bl ] ^dtlink1@1.0 # <- direct build dep
|
||||
# [bl ] ^dtlink3@1.0 # <- linkable, and runtime dep of build dep
|
||||
# [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped
|
||||
# [bl ] ^dtlink4@1.0 # <- linkable, and runtime dep of build dep
|
||||
# [ r ] ^dtrun1@1.0 # <- run-only dep is pruned (should it be in PATH?)
|
||||
# [bl ] ^dtlink5@1.0 # <- children too
|
||||
# [ r ] ^dtrun3@1.0 # <- children too
|
||||
# [b ] ^dtbuild3@1.0 # <- children too
|
||||
|
||||
expected_flags = {
|
||||
"dttop": UseMode.ROOT,
|
||||
"dtbuild1": UseMode.BUILDTIME_DIRECT,
|
||||
"dtlink1": UseMode.BUILDTIME_DIRECT | UseMode.BUILDTIME,
|
||||
"dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME,
|
||||
"dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME,
|
||||
"dtrun2": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
|
||||
"dtlink2": UseMode.RUNTIME,
|
||||
}
|
||||
|
||||
for spec, effective_type in spack.build_environment.effective_deptypes(
|
||||
s, context=Context.BUILD
|
||||
):
|
||||
assert effective_type & expected_flags.pop(spec.name) == effective_type
|
||||
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"
|
||||
|
||||
|
||||
def test_effective_deptype_run_environment(default_mock_concretization):
|
||||
s = default_mock_concretization("dttop")
|
||||
|
||||
# [ ] dttop@1.0 #
|
||||
# [b ] ^dtbuild1@1.0 # <- direct build-only dep is pruned
|
||||
# [b ] ^dtbuild2@1.0 # <- children too
|
||||
# [bl ] ^dtlink2@1.0 # <- children too
|
||||
# [ r ] ^dtrun2@1.0 # <- children too
|
||||
# [bl ] ^dtlink1@1.0 # <- runtime, not executable
|
||||
# [bl ] ^dtlink3@1.0 # <- runtime, not executable
|
||||
# [b ] ^dtbuild2@1.0 # <- indirect build only dep is pruned
|
||||
# [bl ] ^dtlink4@1.0 # <- runtime, not executable
|
||||
# [ r ] ^dtrun1@1.0 # <- runtime and executable
|
||||
# [bl ] ^dtlink5@1.0 # <- runtime, not executable
|
||||
# [ r ] ^dtrun3@1.0 # <- runtime and executable
|
||||
# [b ] ^dtbuild3@1.0 # <- indirect build-only dep is pruned
|
||||
|
||||
expected_flags = {
|
||||
"dttop": UseMode.ROOT,
|
||||
"dtlink1": UseMode.RUNTIME,
|
||||
"dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME,
|
||||
"dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME,
|
||||
"dtrun1": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
|
||||
"dtlink5": UseMode.RUNTIME,
|
||||
"dtrun3": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
|
||||
}
|
||||
|
||||
for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN):
|
||||
assert effective_type & expected_flags.pop(spec.name) == effective_type
|
||||
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"
|
||||
|
||||
@@ -451,7 +451,9 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey
|
||||
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
|
||||
|
||||
results = ci.create_buildcache(
|
||||
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]
|
||||
None,
|
||||
buildcache_mirror_url="file:///fake-url-one",
|
||||
pipeline_mirror_url="file:///fake-url-two",
|
||||
)
|
||||
|
||||
assert len(results) == 2
|
||||
@@ -461,7 +463,7 @@ def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkey
|
||||
assert result2.success
|
||||
assert result2.url == "file:///fake-url-two"
|
||||
|
||||
results = ci.create_buildcache(None, destination_mirror_urls=["file:///fake-url-one"])
|
||||
results = ci.create_buildcache(None, buildcache_mirror_url="file:///fake-url-one")
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].success
|
||||
|
||||
@@ -169,7 +169,7 @@ def test_remove_and_add_a_source(mutable_config):
|
||||
assert not sources
|
||||
|
||||
# Add it back and check we restored the initial state
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.5")
|
||||
_bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.3")
|
||||
sources = spack.bootstrap.core.bootstrapping_sources()
|
||||
assert len(sources) == 1
|
||||
|
||||
|
||||
@@ -7,12 +7,12 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.checksum
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.main import SpackCommand
|
||||
from spack.stage import interactive_version_filter
|
||||
from spack.version import Version
|
||||
|
||||
spack_checksum = SpackCommand("checksum")
|
||||
|
||||
@@ -56,173 +56,18 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag
|
||||
assert "version(" in output
|
||||
|
||||
|
||||
def input_from_commands(*commands):
|
||||
"""Create a function that returns the next command from a list of inputs for interactive spack
|
||||
checksum. If None is encountered, this is equivalent to EOF / ^D."""
|
||||
commands = iter(commands)
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
|
||||
# TODO: mock_fetch doesn't actually work with stage, working around with ignoring
|
||||
# fail_on_error for now
|
||||
def _get_number(*args, **kwargs):
|
||||
return 1
|
||||
|
||||
def _input(prompt):
|
||||
cmd = next(commands)
|
||||
if cmd is None:
|
||||
raise EOFError
|
||||
assert isinstance(cmd, str)
|
||||
return cmd
|
||||
monkeypatch.setattr(tty, "get_number", _get_number)
|
||||
|
||||
return _input
|
||||
|
||||
|
||||
def test_checksum_interactive_filter():
|
||||
# Filter effectively by 1:1.0, then checksum.
|
||||
input = input_from_commands("f", "@1:", "f", "@:1.0", "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {
|
||||
Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def test_checksum_interactive_return_from_filter_prompt():
|
||||
# Enter and then exit filter subcommand.
|
||||
input = input_from_commands("f", None, "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def test_checksum_interactive_quit_returns_none():
|
||||
# Quit after filtering something out (y to confirm quit)
|
||||
input = input_from_commands("f", "@1:", "q", "y")
|
||||
assert (
|
||||
interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
)
|
||||
is None
|
||||
)
|
||||
|
||||
|
||||
def test_checksum_interactive_reset_resets():
|
||||
# Filter 1:, then reset, then filter :0, should just given 0.9 (it was filtered out
|
||||
# before reset)
|
||||
input = input_from_commands("f", "@1:", "r", "f", ":0", "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz"}
|
||||
|
||||
|
||||
def test_checksum_interactive_ask_each():
|
||||
# Ask each should run on the filtered list. First select 1.x, then select only the second
|
||||
# entry, which is 1.0.1.
|
||||
input = input_from_commands("f", "@1:", "a", "n", "y", "n")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {Version("1.0.1"): "https://www.example.com/pkg-1.0.1.tar.gz"}
|
||||
|
||||
|
||||
def test_checksum_interactive_quit_from_ask_each():
|
||||
# Enter ask each mode, select the second item, then quit from submenu, then checksum, which
|
||||
# should still include the last item at which ask each stopped.
|
||||
input = input_from_commands("a", "n", "y", None, "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def test_checksum_interactive_nothing_left():
|
||||
"""If nothing is left after interactive filtering, return an empty dict."""
|
||||
input = input_from_commands("f", "@2", "c")
|
||||
assert (
|
||||
interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
)
|
||||
== {}
|
||||
)
|
||||
|
||||
|
||||
def test_checksum_interactive_new_only():
|
||||
# The 1.0 version is known already, and should be dropped on `n`.
|
||||
input = input_from_commands("n", "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
known_versions=[Version("1.0")],
|
||||
input=input,
|
||||
) == {
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def test_checksum_interactive_top_n():
|
||||
"""Test integers select top n versions"""
|
||||
input = input_from_commands("2", "c")
|
||||
assert interactive_version_filter(
|
||||
{
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
Version("0.9"): "https://www.example.com/pkg-0.9.tar.gz",
|
||||
},
|
||||
input=input,
|
||||
) == {
|
||||
Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz",
|
||||
Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz",
|
||||
}
|
||||
|
||||
|
||||
def test_checksum_interactive_unrecognized_command():
|
||||
"""Unrecognized commands should be ignored"""
|
||||
input = input_from_commands("-1", "0", "hello", "c")
|
||||
v = {Version("1.1"): "https://www.example.com/pkg-1.1.tar.gz"}
|
||||
assert interactive_version_filter(v.copy(), input=input) == v
|
||||
output = spack_checksum("preferred-test", fail_on_error=False)
|
||||
assert "version of preferred-test" in output
|
||||
assert "version(" in output
|
||||
|
||||
|
||||
def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
|
||||
|
||||
@@ -1080,17 +1080,14 @@ def test_push_mirror_contents(
|
||||
|
||||
ci.import_signing_key(_signing_key())
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- packages: [patchelf]
|
||||
specs:
|
||||
- $packages
|
||||
mirrors:
|
||||
test-mirror: {mirror_url}
|
||||
test-mirror: {0}
|
||||
ci:
|
||||
enable-artifacts-buildcache: True
|
||||
pipeline-gen:
|
||||
@@ -1110,8 +1107,15 @@ def test_push_mirror_contents(
|
||||
- nonbuildtag
|
||||
image: basicimage
|
||||
custom_attribute: custom!
|
||||
"""
|
||||
)
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(spack_yaml_contents)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
concrete_spec = Spec("patchelf").concretized()
|
||||
@@ -1122,8 +1126,7 @@ def test_push_mirror_contents(
|
||||
|
||||
install_cmd("--add", "--keep-stage", json_path)
|
||||
|
||||
for s in concrete_spec.traverse():
|
||||
ci.push_mirror_contents(s, mirror_url, True)
|
||||
ci.push_mirror_contents(concrete_spec, mirror_url, True)
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
|
||||
@@ -2209,50 +2212,3 @@ def test_gitlab_config_scopes(
|
||||
assert all([t in rebuild_tags for t in ["spack", "service"]])
|
||||
expected_vars = ["CI_JOB_SIZE", "KUBERNETES_CPU_REQUEST", "KUBERNETES_MEMORY_REQUEST"]
|
||||
assert all([v in rebuild_vars for v in expected_vars])
|
||||
|
||||
|
||||
def test_ci_generate_mirror_config(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
mock_binary_index,
|
||||
):
|
||||
"""Make sure the correct mirror gets used as the buildcache destination"""
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- archive-files
|
||||
mirrors:
|
||||
some-mirror: file:///this/is/a/source/mirror
|
||||
buildcache-destination: file:///push/binaries/here
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- archive-files
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
"""
|
||||
)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
with open(outputfile) as of:
|
||||
pipeline_doc = syaml.load(of.read())
|
||||
assert "rebuild-index" in pipeline_doc
|
||||
reindex_job = pipeline_doc["rebuild-index"]
|
||||
assert "script" in reindex_job
|
||||
reindex_step = reindex_job["script"][0]
|
||||
assert "file:///push/binaries/here" in reindex_step
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
[r"TestNamedPackage(Package)", r"def install(self"],
|
||||
),
|
||||
(["file://example.tar.gz"], "example", [r"Example(Package)", r"def install(self"]),
|
||||
(["-n", "test-license"], "test-license", [r'license("UNKNOWN")']),
|
||||
# Template-specific cases
|
||||
(
|
||||
["-t", "autoreconf", "/test-autoreconf"],
|
||||
|
||||
@@ -9,11 +9,8 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.main import SpackCommand
|
||||
|
||||
dev_build = SpackCommand("dev-build")
|
||||
@@ -23,8 +20,9 @@
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_dev_build_basics(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
assert "dev_path" in spec.variants
|
||||
|
||||
@@ -41,8 +39,9 @@ def test_dev_build_basics(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
def test_dev_build_before(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
def test_dev_build_before(tmpdir, mock_packages, install_mockery):
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
@@ -57,8 +56,9 @@ def test_dev_build_before(tmpdir, install_mockery):
|
||||
assert not os.path.exists(spec.prefix)
|
||||
|
||||
|
||||
def test_dev_build_until(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
def test_dev_build_until(tmpdir, mock_packages, install_mockery):
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
@@ -74,9 +74,10 @@ def test_dev_build_until(tmpdir, install_mockery):
|
||||
assert not spack.store.STORE.db.query(spec, installed=True)
|
||||
|
||||
|
||||
def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
|
||||
# Test that we ignore the last_phase argument if it is already last
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
@@ -93,8 +94,9 @@ def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
def test_dev_build_before_until(tmpdir, install_mockery, capsys):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys):
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(spec.package.filename, "w") as f:
|
||||
@@ -132,6 +134,7 @@ def mock_module_noop(*args):
|
||||
|
||||
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
||||
monkeypatch.setattr(os, "execvp", print_spack_cc)
|
||||
|
||||
monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
@@ -139,7 +142,7 @@ def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery,
|
||||
assert "lib/spack/env" in output
|
||||
|
||||
|
||||
def test_dev_build_fails_already_installed(tmpdir, install_mockery):
|
||||
def test_dev_build_fails_already_installed(tmpdir, mock_packages, install_mockery):
|
||||
spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
@@ -172,7 +175,7 @@ def test_dev_build_fails_no_version(mock_packages):
|
||||
assert "dev-build spec must have a single, concrete version" in output
|
||||
|
||||
|
||||
def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_path):
|
||||
"""Test Spack does dev builds for packages in develop section of env."""
|
||||
# setup dev-build-test-install package for dev build
|
||||
build_dir = tmpdir.mkdir("build")
|
||||
@@ -188,7 +191,7 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- dev-build-test-install@0.0.0
|
||||
@@ -196,9 +199,11 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@0.0.0
|
||||
path: {os.path.relpath(str(build_dir), start=str(envdir))}
|
||||
path: %s
|
||||
"""
|
||||
% os.path.relpath(str(build_dir), start=str(envdir))
|
||||
)
|
||||
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
install()
|
||||
@@ -208,7 +213,9 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
|
||||
def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_env_path):
|
||||
def test_dev_build_env_version_mismatch(
|
||||
tmpdir, mock_packages, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
"""Test Spack constraints concretization by develop specs."""
|
||||
# setup dev-build-test-install package for dev build
|
||||
build_dir = tmpdir.mkdir("build")
|
||||
@@ -224,7 +231,7 @@ def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_en
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- dev-build-test-install@0.0.0
|
||||
@@ -232,17 +239,20 @@ def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_en
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@1.1.1
|
||||
path: {build_dir}
|
||||
path: %s
|
||||
"""
|
||||
% build_dir
|
||||
)
|
||||
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
with pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError)):
|
||||
with pytest.raises(RuntimeError):
|
||||
install()
|
||||
|
||||
|
||||
def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock_fetch):
|
||||
def test_dev_build_multiple(
|
||||
tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch
|
||||
):
|
||||
"""Test spack install with multiple developer builds
|
||||
|
||||
Test that only the root needs to be specified in the environment
|
||||
@@ -274,19 +284,20 @@ def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- dev-build-test-dependent@0.0.0
|
||||
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
path: {leaf_dir}
|
||||
path: %s
|
||||
spec: dev-build-test-install@=1.0.0
|
||||
dev-build-test-dependent:
|
||||
spec: dev-build-test-dependent@0.0.0
|
||||
path: {root_dir}
|
||||
path: %s
|
||||
"""
|
||||
% (leaf_dir, root_dir)
|
||||
)
|
||||
|
||||
env("create", "test", "./spack.yaml")
|
||||
@@ -305,7 +316,9 @@ def test_dev_build_multiple(tmpdir, install_mockery, mutable_mock_env_path, mock
|
||||
assert f.read() == spec.package.replacement_string
|
||||
|
||||
|
||||
def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_mock_env_path):
|
||||
def test_dev_build_env_dependency(
|
||||
tmpdir, mock_packages, install_mockery, mock_fetch, mutable_mock_env_path
|
||||
):
|
||||
"""
|
||||
Test non-root specs in an environment are properly marked for dev builds.
|
||||
"""
|
||||
@@ -324,7 +337,7 @@ def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_m
|
||||
with envdir.as_cwd():
|
||||
with open("spack.yaml", "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- dependent-of-dev-build@0.0.0
|
||||
@@ -332,9 +345,11 @@ def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_m
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@0.0.0
|
||||
path: {os.path.relpath(str(build_dir), start=str(envdir))}
|
||||
path: %s
|
||||
"""
|
||||
% os.path.relpath(str(build_dir), start=str(envdir))
|
||||
)
|
||||
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
# concretize in the environment to get the dev build info
|
||||
@@ -356,7 +371,7 @@ def test_dev_build_env_dependency(tmpdir, install_mockery, mock_fetch, mutable_m
|
||||
|
||||
@pytest.mark.parametrize("test_spec", ["dev-build-test-install", "dependent-of-dev-build"])
|
||||
def test_dev_build_rebuild_on_source_changes(
|
||||
test_spec, tmpdir, install_mockery, mutable_mock_env_path, mock_fetch
|
||||
test_spec, tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch
|
||||
):
|
||||
"""Test dev builds rebuild on changes to source code.
|
||||
|
||||
@@ -401,4 +416,4 @@ def reset_string():
|
||||
fs.touch(os.path.join(str(build_dir), "test"))
|
||||
output = install()
|
||||
|
||||
assert f"Installing {test_spec}" in output
|
||||
assert "Installing %s" % test_spec in output
|
||||
|
||||
@@ -168,7 +168,7 @@ def test_env_remove(capfd):
|
||||
|
||||
foo = ev.read("foo")
|
||||
with foo:
|
||||
with pytest.raises(SpackCommandError):
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
with capfd.disabled():
|
||||
env("remove", "-y", "foo")
|
||||
assert "foo" in env("list")
|
||||
@@ -283,7 +283,7 @@ def setup_error(pkg, env):
|
||||
|
||||
_, err = capfd.readouterr()
|
||||
assert "cmake-client had issues!" in err
|
||||
assert "Warning: couldn't load runtime environment" in err
|
||||
assert "Warning: couldn't get environment settings" in err
|
||||
|
||||
|
||||
def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch):
|
||||
@@ -500,14 +500,11 @@ def test_env_activate_broken_view(
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
wrong_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: couldn't load runtime environment" in wrong_repo
|
||||
assert "Unknown namespace: builtin.mock" in wrong_repo
|
||||
with pytest.raises(SpackCommandError):
|
||||
env("activate", "--sh", "test")
|
||||
|
||||
# test replacing repo fixes it
|
||||
normal_repo = env("activate", "--sh", "test")
|
||||
assert "Warning: couldn't load runtime environment" not in normal_repo
|
||||
assert "Unknown namespace: builtin.mock" not in normal_repo
|
||||
env("activate", "--sh", "test")
|
||||
|
||||
|
||||
def test_to_lockfile_dict():
|
||||
@@ -666,7 +663,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||
e.write()
|
||||
|
||||
env_mod = spack.util.environment.EnvironmentModifications()
|
||||
e.add_view_to_env(env_mod, "default")
|
||||
e.add_default_view_to_env(env_mod)
|
||||
env_variables = {}
|
||||
env_mod.apply_modifications(env_variables)
|
||||
assert str(fake_bin) in env_variables["PATH"]
|
||||
@@ -1047,7 +1044,7 @@ def test_env_commands_die_with_no_env_arg():
|
||||
env("remove")
|
||||
|
||||
# these have an optional env arg and raise errors via tty.die
|
||||
with pytest.raises(SpackCommandError):
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("loads")
|
||||
|
||||
# This should NOT raise an error with no environment
|
||||
@@ -2359,7 +2356,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins
|
||||
This is a cursory check; ``share/spack/qa/setup-env-test.sh`` checks
|
||||
for correctness.
|
||||
"""
|
||||
env("create", "test")
|
||||
env("create", "test", add_view=True)
|
||||
|
||||
out = env("activate", "--sh", "test")
|
||||
assert "export SPACK_ENV=" in out
|
||||
@@ -2374,7 +2371,7 @@ def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, ins
|
||||
|
||||
def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery):
|
||||
"""Check the shell commands output by ``spack env activate --csh``."""
|
||||
env("create", "test")
|
||||
env("create", "test", add_view=True)
|
||||
|
||||
out = env("activate", "--csh", "test")
|
||||
assert "setenv SPACK_ENV" in out
|
||||
@@ -2391,7 +2388,7 @@ def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, in
|
||||
def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
|
||||
"""Check that the root of the default view in the environment is added
|
||||
to the shell unconditionally."""
|
||||
env("create", "test")
|
||||
env("create", "test", add_view=True)
|
||||
|
||||
with ev.read("test") as e:
|
||||
viewdir = e.default_view.root
|
||||
@@ -2406,27 +2403,6 @@ def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
|
||||
)
|
||||
|
||||
|
||||
def test_env_activate_custom_view(tmp_path: pathlib.Path, mock_packages):
|
||||
"""Check that an environment can be activated with a non-default view."""
|
||||
env_template = tmp_path / "spack.yaml"
|
||||
default_dir = tmp_path / "defaultdir"
|
||||
nondefaultdir = tmp_path / "nondefaultdir"
|
||||
with open(env_template, "w") as f:
|
||||
f.write(
|
||||
f"""\
|
||||
spack:
|
||||
specs: [a]
|
||||
view:
|
||||
default:
|
||||
root: {default_dir}
|
||||
nondefault:
|
||||
root: {nondefaultdir}"""
|
||||
)
|
||||
env("create", "test", str(env_template))
|
||||
shell = env("activate", "--sh", "--with-view", "nondefault", "test")
|
||||
assert os.path.join(nondefaultdir, "bin") in shell
|
||||
|
||||
|
||||
def test_concretize_user_specs_together():
|
||||
e = ev.create("coconcretization")
|
||||
e.unify = True
|
||||
|
||||
@@ -88,7 +88,6 @@ def test_info_fields(pkg_query, parser, print_buffer):
|
||||
"Installation Phases:",
|
||||
"Virtual Packages:",
|
||||
"Tags:",
|
||||
"Licenses:",
|
||||
)
|
||||
|
||||
args = parser.parse_args(["--all", pkg_query])
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
|
||||
import spack.spec
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.environment
|
||||
from spack.main import SpackCommand
|
||||
|
||||
load = SpackCommand("load")
|
||||
@@ -28,63 +27,74 @@ def test_manpath_trailing_colon(
|
||||
manpath search path via a trailing colon"""
|
||||
install("mpileaks")
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
sh_out = load("--sh", "--only", "package", "mpileaks")
|
||||
lines = sh_out.split("\n")
|
||||
assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines)
|
||||
|
||||
os.environ["MANPATH"] = "/tmp/man:"
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
sh_out = load("--sh", "--only", "package", "mpileaks")
|
||||
lines = sh_out.split("\n")
|
||||
assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines)
|
||||
|
||||
|
||||
def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages, working_env):
|
||||
"""Test that `spack load` applies prefix inspections of its required runtime deps in
|
||||
topo-order"""
|
||||
install("mpileaks")
|
||||
def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test that the commands generated by load add the specified prefix
|
||||
inspections. Also test that Spack records loaded specs by hash in the
|
||||
user environment.
|
||||
|
||||
CMAKE_PREFIX_PATH is the only prefix inspection guaranteed for fake
|
||||
packages, since it keys on the prefix instead of a subdir."""
|
||||
install_out = install("mpileaks", output=str, fail_on_error=False)
|
||||
print("spack install mpileaks")
|
||||
print(install_out)
|
||||
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
|
||||
|
||||
# Ensure our reference variable is cleed.
|
||||
os.environ["CMAKE_PREFIX_PATH"] = "/hello:/world"
|
||||
sh_out = load("--sh", "--only", "package", "mpileaks")
|
||||
csh_out = load("--csh", "--only", "package", "mpileaks")
|
||||
|
||||
# Test prefix inspections
|
||||
sh_out_test = "export CMAKE_PREFIX_PATH=%s" % mpileaks_spec.prefix
|
||||
csh_out_test = "setenv CMAKE_PREFIX_PATH %s" % mpileaks_spec.prefix
|
||||
assert sh_out_test in sh_out
|
||||
assert csh_out_test in csh_out
|
||||
|
||||
# Test hashes recorded properly
|
||||
hash_test_replacements = (uenv.spack_loaded_hashes_var, mpileaks_spec.dag_hash())
|
||||
sh_hash_test = "export %s=%s" % hash_test_replacements
|
||||
csh_hash_test = "setenv %s %s" % hash_test_replacements
|
||||
assert sh_hash_test in sh_out
|
||||
assert csh_hash_test in csh_out
|
||||
|
||||
|
||||
def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test that the '-r' option to the load command prepends dependency prefix
|
||||
inspections in post-order"""
|
||||
install("mpileaks")
|
||||
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
|
||||
|
||||
sh_out = load("--sh", "mpileaks")
|
||||
csh_out = load("--csh", "mpileaks")
|
||||
|
||||
def extract_cmake_prefix_path(output, prefix):
|
||||
return next(cmd for cmd in output.split(";") if cmd.startswith(prefix))[
|
||||
len(prefix) :
|
||||
].split(":")
|
||||
|
||||
# Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG.
|
||||
prefix_to_pkg = lambda prefix: next(
|
||||
s.name for s in mpileaks_spec.traverse() if s.prefix == prefix
|
||||
# Test prefix inspections
|
||||
prefix_test_replacement = ":".join(
|
||||
reversed([s.prefix for s in mpileaks_spec.traverse(order="post")])
|
||||
)
|
||||
|
||||
paths_sh = extract_cmake_prefix_path(sh_out, prefix="export CMAKE_PREFIX_PATH=")
|
||||
paths_csh = extract_cmake_prefix_path(csh_out, prefix="setenv CMAKE_PREFIX_PATH ")
|
||||
sh_prefix_test = "export CMAKE_PREFIX_PATH=%s" % prefix_test_replacement
|
||||
csh_prefix_test = "setenv CMAKE_PREFIX_PATH %s" % prefix_test_replacement
|
||||
assert sh_prefix_test in sh_out
|
||||
assert csh_prefix_test in csh_out
|
||||
|
||||
# Shouldn't be a difference between loading csh / sh, so check they're the same.
|
||||
assert paths_sh == paths_csh
|
||||
|
||||
# We should've prepended new paths, and keep old ones.
|
||||
assert paths_sh[-2:] == ["/hello", "/world"]
|
||||
|
||||
# All but the last two paths are added by spack load; lookup what packages they're from.
|
||||
pkgs = [prefix_to_pkg(p) for p in paths_sh[:-2]]
|
||||
|
||||
# Do we have all the runtime packages?
|
||||
assert set(pkgs) == set(
|
||||
s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True)
|
||||
# Test spack records loaded hashes properly
|
||||
hash_test_replacement = (
|
||||
uenv.spack_loaded_hashes_var,
|
||||
":".join(reversed([s.dag_hash() for s in mpileaks_spec.traverse(order="post")])),
|
||||
)
|
||||
|
||||
# Finally, do we list them in topo order?
|
||||
for i, pkg in enumerate(pkgs):
|
||||
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
|
||||
|
||||
# Lastly, do we keep track that mpileaks was loaded?
|
||||
assert f"export {uenv.spack_loaded_hashes_var}={mpileaks_spec.dag_hash()}" in sh_out
|
||||
assert f"setenv {uenv.spack_loaded_hashes_var} {mpileaks_spec.dag_hash()}" in csh_out
|
||||
sh_hash_test = "export %s=%s" % hash_test_replacement
|
||||
csh_hash_test = "setenv %s %s" % hash_test_replacement
|
||||
assert sh_hash_test in sh_out
|
||||
assert csh_hash_test in csh_out
|
||||
|
||||
|
||||
def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
|
||||
@@ -1170,7 +1170,7 @@ def test_external_package_versions(self, spec_str, is_external, expected):
|
||||
)
|
||||
@pytest.mark.parametrize("mock_db", [True, False])
|
||||
def test_reuse_does_not_overwrite_dev_specs(
|
||||
self, dev_first, spec, mock_db, tmpdir, temporary_store, monkeypatch
|
||||
self, dev_first, spec, mock_db, tmpdir, monkeypatch
|
||||
):
|
||||
"""Test that reuse does not mix dev specs with non-dev specs.
|
||||
|
||||
@@ -1182,7 +1182,8 @@ def test_reuse_does_not_overwrite_dev_specs(
|
||||
# dev and non-dev specs that are otherwise identical
|
||||
spec = Spec(spec)
|
||||
dev_spec = spec.copy()
|
||||
dev_spec["dev-build-test-install"].constrain(f"dev_path={tmpdir.strpath}")
|
||||
dev_constraint = "dev_path=%s" % tmpdir.strpath
|
||||
dev_spec["dev-build-test-install"].constrain(dev_constraint)
|
||||
|
||||
# run the test in both orders
|
||||
first_spec = dev_spec if dev_first else spec
|
||||
@@ -1195,7 +1196,7 @@ def mock_fn(*args, **kwargs):
|
||||
return [first_spec]
|
||||
|
||||
if mock_db:
|
||||
temporary_store.db.add(first_spec, None)
|
||||
monkeypatch.setattr(spack.store.STORE.db, "query", mock_fn)
|
||||
else:
|
||||
monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn)
|
||||
|
||||
@@ -2111,24 +2112,6 @@ def test_dont_define_new_version_from_input_if_checksum_required(self, working_e
|
||||
# when checksums are required
|
||||
Spec("a@=3.0").concretized()
|
||||
|
||||
@pytest.mark.regression("39570")
|
||||
@pytest.mark.db
|
||||
def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
|
||||
"""Tests that reusing python with and explicit request on the command line, when the spec
|
||||
also reuses a python extension from the DB, doesn't fail.
|
||||
"""
|
||||
s = Spec("py-extension1").concretized()
|
||||
python_hash = s["python"].dag_hash()
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
with_reuse = Spec(f"py-extension2 ^/{python_hash}").concretized()
|
||||
|
||||
with spack.config.override("concretizer:reuse", False):
|
||||
without_reuse = Spec("py-extension2").concretized()
|
||||
|
||||
assert with_reuse.dag_hash() == without_reuse.dag_hash()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
@@ -2228,43 +2211,6 @@ def test_pure_build_virtual_dependency(self, strategy):
|
||||
s = Spec("virtual-build").concretized()
|
||||
assert s["pkgconfig"].name == "pkg-config"
|
||||
|
||||
@pytest.mark.regression("40595")
|
||||
def test_no_multiple_solutions_with_different_edges_same_nodes(self):
|
||||
r"""Tests that the root node, which has a dependency on py-setuptools without constraint,
|
||||
doesn't randomly pick one of the two setuptools (@=59, @=60) needed by its dependency.
|
||||
|
||||
o py-floating@1.25.0/3baitsp
|
||||
|\
|
||||
| |\
|
||||
| | |\
|
||||
| o | | py-shapely@1.25.0/4hep6my
|
||||
|/| | |
|
||||
| |\| |
|
||||
| | |/
|
||||
| |/|
|
||||
| | o py-setuptools@60/cwhbthc
|
||||
| |/
|
||||
|/|
|
||||
| o py-numpy@1.25.0/5q5fx4d
|
||||
|/|
|
||||
| |\
|
||||
| o | py-setuptools@59/jvsa7sd
|
||||
|/ /
|
||||
o | python@3.11.2/pdmjekv
|
||||
o | gmake@3.0/jv7k2bl
|
||||
/
|
||||
o gmake@4.1/uo6ot3d
|
||||
"""
|
||||
spec_str = "py-floating"
|
||||
|
||||
root = spack.spec.Spec(spec_str).concretized()
|
||||
assert root["py-shapely"].satisfies("^py-setuptools@=60")
|
||||
assert root["py-numpy"].satisfies("^py-setuptools@=59")
|
||||
|
||||
edges = root.edges_to_dependencies("py-setuptools")
|
||||
assert len(edges) == 1
|
||||
assert edges[0].spec.satisfies("@=60")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"v_str,v_opts,checksummed",
|
||||
@@ -2300,23 +2246,3 @@ def test_no_multiple_solutions_with_different_edges_same_nodes(self):
|
||||
def test_drop_moving_targets(v_str, v_opts, checksummed):
|
||||
v = Version(v_str)
|
||||
assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed
|
||||
|
||||
|
||||
class TestConcreteSpecsByHash:
|
||||
"""Tests the container of concrete specs"""
|
||||
|
||||
@pytest.mark.parametrize("input_specs", [["a"], ["a foobar=bar", "b"], ["a foobar=baz", "b"]])
|
||||
def test_adding_specs(self, input_specs, default_mock_concretization):
|
||||
"""Tests that concrete specs in the container are equivalent, but stored as different
|
||||
objects in memory.
|
||||
"""
|
||||
container = spack.solver.asp.ConcreteSpecsByHash()
|
||||
input_specs = [Spec(s).concretized() for s in input_specs]
|
||||
for s in input_specs:
|
||||
container.add(s)
|
||||
|
||||
for root in input_specs:
|
||||
for node in root.traverse(root=True):
|
||||
assert node == container[node.dag_hash()]
|
||||
assert node.dag_hash() in container
|
||||
assert node is not container[node.dag_hash()]
|
||||
|
||||
@@ -82,6 +82,23 @@ def test_strip_is_set_from_config(minimal_configuration):
|
||||
assert writer.strip is False
|
||||
|
||||
|
||||
def test_extra_instructions_is_set_from_config(minimal_configuration):
|
||||
writer = writers.create(minimal_configuration)
|
||||
assert writer.extra_instructions == (None, None)
|
||||
|
||||
test_line = "RUN echo Hello world!"
|
||||
e = minimal_configuration["spack"]["container"]
|
||||
e["extra_instructions"] = {}
|
||||
e["extra_instructions"]["build"] = test_line
|
||||
writer = writers.create(minimal_configuration)
|
||||
assert writer.extra_instructions == (test_line, None)
|
||||
|
||||
e["extra_instructions"]["final"] = test_line
|
||||
del e["extra_instructions"]["build"]
|
||||
writer = writers.create(minimal_configuration)
|
||||
assert writer.extra_instructions == (None, test_line)
|
||||
|
||||
|
||||
def test_custom_base_images(minimal_configuration):
|
||||
"""Test setting custom base images from configuration file"""
|
||||
minimal_configuration["spack"]["container"]["images"] = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
bootstrap:
|
||||
sources:
|
||||
- name: 'github-actions'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
trusted: {}
|
||||
|
||||
@@ -89,44 +89,6 @@ def test_maintainer_directive(config, mock_packages, package_name, expected_main
|
||||
assert pkg_cls.maintainers == expected_maintainers
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"package_name,expected_licenses", [("licenses-1", [("MIT", "+foo"), ("Apache-2.0", "~foo")])]
|
||||
)
|
||||
def test_license_directive(config, mock_packages, package_name, expected_licenses):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(package_name)
|
||||
for license in expected_licenses:
|
||||
assert spack.spec.Spec(license[1]) in pkg_cls.licenses
|
||||
assert license[0] == pkg_cls.licenses[spack.spec.Spec(license[1])]
|
||||
|
||||
|
||||
def test_duplicate_exact_range_license():
|
||||
package = namedtuple("package", ["licenses", "name"])
|
||||
package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"}
|
||||
package.name = "test_package"
|
||||
|
||||
msg = (
|
||||
r"test_package is specified as being licensed as MIT when \+foo, but it is also "
|
||||
r"specified as being licensed under Apache-2.0 when \+foo, which conflict."
|
||||
)
|
||||
|
||||
with pytest.raises(spack.directives.OverlappingLicenseError, match=msg):
|
||||
spack.directives._execute_license(package, "MIT", "+foo")
|
||||
|
||||
|
||||
def test_overlapping_duplicate_licenses():
|
||||
package = namedtuple("package", ["licenses", "name"])
|
||||
package.licenses = {spack.directives.make_when_spec("+foo"): "Apache-2.0"}
|
||||
package.name = "test_package"
|
||||
|
||||
msg = (
|
||||
r"test_package is specified as being licensed as MIT when \+bar, but it is also "
|
||||
r"specified as being licensed under Apache-2.0 when \+foo, which conflict."
|
||||
)
|
||||
|
||||
with pytest.raises(spack.directives.OverlappingLicenseError, match=msg):
|
||||
spack.directives._execute_license(package, "MIT", "+bar")
|
||||
|
||||
|
||||
def test_version_type_validation():
|
||||
# A version should be a string or an int, not a float, because it leads to subtle issues
|
||||
# such as 3.10 being interpreted as 3.1.
|
||||
|
||||
@@ -121,6 +121,7 @@ def test_ld_flags_cmake(self, temp_env):
|
||||
"-DCMAKE_EXE_LINKER_FLAGS=-mthreads",
|
||||
"-DCMAKE_MODULE_LINKER_FLAGS=-mthreads",
|
||||
"-DCMAKE_SHARED_LINKER_FLAGS=-mthreads",
|
||||
"-DCMAKE_STATIC_LINKER_FLAGS=-mthreads",
|
||||
}
|
||||
|
||||
def test_ld_libs_cmake(self, temp_env):
|
||||
|
||||
@@ -719,12 +719,13 @@ def test_check_deps_status_external(install_mockery, monkeypatch):
|
||||
installer = create_installer(const_arg)
|
||||
request = installer.build_requests[0]
|
||||
|
||||
# Mock the dependencies as external so assumed to be installed
|
||||
# Mock the known dependent, b, as external so assumed to be installed
|
||||
monkeypatch.setattr(spack.spec.Spec, "external", True)
|
||||
installer._check_deps_status(request)
|
||||
|
||||
for dep in request.spec.traverse(root=False):
|
||||
assert inst.package_id(dep.package) in installer.installed
|
||||
# exotic architectures will add dependencies on gnuconfig, which we want to ignore
|
||||
installed = [x for x in installer.installed if not x.startswith("gnuconfig")]
|
||||
assert installed[0].startswith("b")
|
||||
|
||||
|
||||
def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
@@ -732,12 +733,13 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
|
||||
installer = create_installer(const_arg)
|
||||
request = installer.build_requests[0]
|
||||
|
||||
# Mock the known dependencies as installed upstream
|
||||
# Mock the known dependent, b, as installed upstream
|
||||
monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True)
|
||||
installer._check_deps_status(request)
|
||||
|
||||
for dep in request.spec.traverse(root=False):
|
||||
assert inst.package_id(dep.package) in installer.installed
|
||||
# exotic architectures will add dependencies on gnuconfig, which we want to ignore
|
||||
installed = [x for x in installer.installed if not x.startswith("gnuconfig")]
|
||||
assert installed[0].startswith("b")
|
||||
|
||||
|
||||
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.directives
|
||||
@@ -1007,84 +1005,6 @@ def test_spec_override(self):
|
||||
assert new_spec.compiler_flags["cxxflags"] == ["-O1"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
("zlib@git.foo/bar", "{name}-{version}", str(pathlib.Path("zlib-git.foo_bar"))),
|
||||
("zlib@git.foo/bar", "{name}-{version}-{/hash}", None),
|
||||
("zlib@git.foo/bar", "{name}/{version}", str(pathlib.Path("zlib", "git.foo_bar"))),
|
||||
(
|
||||
"zlib@{0}=1.0%gcc".format("a" * 40),
|
||||
"{name}/{version}/{compiler}",
|
||||
str(pathlib.Path("zlib", "{0}_1.0".format("a" * 40), "gcc")),
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar=1.0%gcc",
|
||||
"{name}/{version}/{compiler}",
|
||||
str(pathlib.Path("zlib", "git.foo_bar_1.0", "gcc")),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path(spec_str, format_str, expected):
|
||||
_check_spec_format_path(spec_str, format_str, expected)
|
||||
|
||||
|
||||
def _check_spec_format_path(spec_str, format_str, expected, path_ctor=None):
|
||||
spec = Spec(spec_str)
|
||||
if not expected:
|
||||
with pytest.raises((spack.spec.SpecFormatPathError, spack.spec.SpecFormatStringError)):
|
||||
spec.format_path(format_str, _path_ctor=path_ctor)
|
||||
else:
|
||||
formatted = spec.format_path(format_str, _path_ctor=path_ctor)
|
||||
assert formatted == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
r"C:\\installroot\{name}\{version}",
|
||||
r"C:\installroot\zlib\git.foo_bar",
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
r"\\hostname\sharename\{name}\{version}",
|
||||
r"\\hostname\sharename\zlib\git.foo_bar",
|
||||
),
|
||||
# Windows doesn't attribute any significance to a leading
|
||||
# "/" so it is discarded
|
||||
("zlib@git.foo/bar", r"/installroot/{name}/{version}", r"installroot\zlib\git.foo_bar"),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path_windows(spec_str, format_str, expected):
|
||||
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PureWindowsPath)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,format_str,expected",
|
||||
[
|
||||
("zlib@git.foo/bar", r"/installroot/{name}/{version}", "/installroot/zlib/git.foo_bar"),
|
||||
("zlib@git.foo/bar", r"//installroot/{name}/{version}", "//installroot/zlib/git.foo_bar"),
|
||||
# This is likely unintentional on Linux: Firstly, "\" is not a
|
||||
# path separator for POSIX, so this is treated as a single path
|
||||
# component (containing literal "\" characters); secondly,
|
||||
# Spec.format treats "\" as an escape character, so is
|
||||
# discarded (unless directly following another "\")
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
r"C:\\installroot\package-{name}-{version}",
|
||||
r"C__installrootpackage-zlib-git.foo_bar",
|
||||
),
|
||||
# "\" is not a POSIX separator, and Spec.format treats "\{" as a literal
|
||||
# "{", which means that the resulting format string is invalid
|
||||
("zlib@git.foo/bar", r"package\{name}\{version}", None),
|
||||
],
|
||||
)
|
||||
def test_spec_format_path_posix(spec_str, format_str, expected):
|
||||
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PurePosixPath)
|
||||
|
||||
|
||||
@pytest.mark.regression("3887")
|
||||
@pytest.mark.parametrize("spec_str", ["py-extension2", "extension1", "perl-extension"])
|
||||
def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
@@ -517,14 +517,6 @@ def _specfile_for(spec_str, filename):
|
||||
[Token(TokenType.VERSION, value="@:0.4"), Token(TokenType.COMPILER, value="% nvhpc")],
|
||||
"@:0.4%nvhpc",
|
||||
),
|
||||
(
|
||||
"zlib@git.foo/bar",
|
||||
[
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
|
||||
Token(TokenType.GIT_VERSION, "@git.foo/bar"),
|
||||
],
|
||||
"zlib@git.foo/bar",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_parse_single_spec(spec_str, tokens, expected_roundtrip):
|
||||
|
||||
@@ -675,25 +675,6 @@ def test_git_ref_comparisons(mock_git_version_info, install_mockery, mock_packag
|
||||
assert str(spec_branch.version) == "git.1.x=1.2"
|
||||
|
||||
|
||||
def test_git_branch_with_slash():
|
||||
class MockLookup(object):
|
||||
def get(self, ref):
|
||||
assert ref == "feature/bar"
|
||||
return "1.2", 0
|
||||
|
||||
v = spack.version.from_string("git.feature/bar")
|
||||
assert isinstance(v, GitVersion)
|
||||
v.attach_lookup(MockLookup())
|
||||
|
||||
# Create a version range
|
||||
test_number_version = spack.version.from_string("1.2")
|
||||
v.satisfies(test_number_version)
|
||||
|
||||
serialized = VersionList([v]).to_dict()
|
||||
v_deserialized = VersionList.from_dict(serialized)
|
||||
assert v_deserialized[0].ref == "feature/bar"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"string,git",
|
||||
[
|
||||
|
||||
@@ -647,7 +647,7 @@ def find_versions_of_archive(
|
||||
list_urls |= additional_list_urls
|
||||
|
||||
# Grab some web pages to scrape.
|
||||
_, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency)
|
||||
pages, links = spack.util.web.spider(list_urls, depth=list_depth, concurrency=concurrency)
|
||||
|
||||
# Scrape them for archive URLs
|
||||
regexes = []
|
||||
|
||||
@@ -4,18 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable
|
||||
|
||||
from llnl.util.lang import nullcontext
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.spec
|
||||
import spack.util.environment as environment
|
||||
import spack.util.prefix as prefix
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
|
||||
#: Environment variable name Spack uses to track individually loaded packages
|
||||
spack_loaded_hashes_var = "SPACK_LOADED_HASHES"
|
||||
@@ -33,8 +26,8 @@ def prefix_inspections(platform):
|
||||
A dictionary mapping subdirectory names to lists of environment
|
||||
variables to modify with that directory if it exists.
|
||||
"""
|
||||
inspections = spack.config.get("modules:prefix_inspections")
|
||||
if isinstance(inspections, dict):
|
||||
inspections = spack.config.get("modules:prefix_inspections", {})
|
||||
if inspections:
|
||||
return inspections
|
||||
|
||||
inspections = {
|
||||
@@ -69,58 +62,40 @@ def unconditional_environment_modifications(view):
|
||||
return env
|
||||
|
||||
|
||||
@contextmanager
|
||||
def projected_prefix(*specs: spack.spec.Spec, projection: Callable[[spack.spec.Spec], str]):
|
||||
"""Temporarily replace every Spec's prefix with projection(s)"""
|
||||
prefixes = dict()
|
||||
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
|
||||
if s.external:
|
||||
continue
|
||||
prefixes[s.dag_hash()] = s.prefix
|
||||
s.prefix = prefix.Prefix(projection(s))
|
||||
|
||||
yield
|
||||
|
||||
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
|
||||
s.prefix = prefixes.get(s.dag_hash(), s.prefix)
|
||||
|
||||
|
||||
def environment_modifications_for_specs(
|
||||
*specs: spack.spec.Spec, view=None, set_package_py_globals: bool = True
|
||||
):
|
||||
def environment_modifications_for_spec(spec, view=None, set_package_py_globals=True):
|
||||
"""List of environment (shell) modifications to be processed for spec.
|
||||
|
||||
This list is specific to the location of the spec or its projection in
|
||||
the view.
|
||||
|
||||
Args:
|
||||
specs: spec(s) for which to list the environment modifications
|
||||
spec (spack.spec.Spec): spec for which to list the environment modifications
|
||||
view: view associated with the spec passed as first argument
|
||||
set_package_py_globals: whether or not to set the global variables in the
|
||||
set_package_py_globals (bool): whether or not to set the global variables in the
|
||||
package.py files (this may be problematic when using buildcaches that have
|
||||
been built on a different but compatible OS)
|
||||
"""
|
||||
env = environment.EnvironmentModifications()
|
||||
topo_ordered = traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo")
|
||||
spec = spec.copy()
|
||||
if view and not spec.external:
|
||||
spec.prefix = prefix.Prefix(view.get_projection_for_spec(spec))
|
||||
|
||||
if view:
|
||||
maybe_projected = projected_prefix(*specs, projection=view.get_projection_for_spec)
|
||||
else:
|
||||
maybe_projected = nullcontext()
|
||||
# generic environment modifications determined by inspecting the spec
|
||||
# prefix
|
||||
env = environment.inspect_path(
|
||||
spec.prefix, prefix_inspections(spec.platform), exclude=environment.is_system_path
|
||||
)
|
||||
|
||||
with maybe_projected:
|
||||
# Static environment changes (prefix inspections)
|
||||
for s in reversed(list(topo_ordered)):
|
||||
static = environment.inspect_path(
|
||||
s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path
|
||||
)
|
||||
env.extend(static)
|
||||
# Let the extendee/dependency modify their extensions/dependents
|
||||
# before asking for package-specific modifications
|
||||
env.extend(
|
||||
spack.build_environment.modifications_from_dependencies(
|
||||
spec, context="run", set_package_py_globals=set_package_py_globals
|
||||
)
|
||||
)
|
||||
|
||||
# Dynamic environment changes (setup_run_environment etc)
|
||||
setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN)
|
||||
if set_package_py_globals:
|
||||
setup_context.set_all_package_py_globals()
|
||||
dynamic = setup_context.get_env_modifications()
|
||||
env.extend(dynamic)
|
||||
if set_package_py_globals:
|
||||
spack.build_environment.set_module_variables_for_package(spec.package)
|
||||
|
||||
spec.package.setup_run_environment(env)
|
||||
|
||||
return env
|
||||
|
||||
@@ -61,7 +61,7 @@ def executable(exe: str, args: List[str]) -> int:
|
||||
return cmd.returncode
|
||||
|
||||
|
||||
def editor(*args: str, exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool:
|
||||
def editor(*args: List[str], exec_fn: Callable[[str, List[str]], int] = os.execv) -> bool:
|
||||
"""Invoke the user's editor.
|
||||
|
||||
This will try to execute the following, in order:
|
||||
|
||||
@@ -2,11 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from .cpus import cpus_available
|
||||
|
||||
|
||||
class ErrorFromWorker:
|
||||
"""Wrapper class to report an error from a worker process"""
|
||||
@@ -53,25 +56,79 @@ def __call__(self, *args, **kwargs):
|
||||
return value
|
||||
|
||||
|
||||
def imap_unordered(f, list_of_args, *, processes: int, debug=False):
|
||||
"""Wrapper around multiprocessing.Pool.imap_unordered.
|
||||
def raise_if_errors(*results, **kwargs):
|
||||
"""Analyze results from worker Processes to search for ErrorFromWorker
|
||||
objects. If found print all of them and raise an exception.
|
||||
|
||||
Args:
|
||||
f: function to apply
|
||||
list_of_args: list of tuples of args for the task
|
||||
processes: maximum number of processes allowed
|
||||
debug: if False, raise an exception containing just the error messages
|
||||
*results: results from worker processes
|
||||
debug: if True show complete stacktraces
|
||||
|
||||
Raise:
|
||||
RuntimeError: if ErrorFromWorker objects are in the results
|
||||
"""
|
||||
debug = kwargs.get("debug", False) # This can be a keyword only arg in Python 3
|
||||
errors = [x for x in results if isinstance(x, ErrorFromWorker)]
|
||||
if not errors:
|
||||
return
|
||||
|
||||
msg = "\n".join([error.stacktrace if debug else str(error) for error in errors])
|
||||
|
||||
error_fmt = "{0}"
|
||||
if len(errors) > 1 and not debug:
|
||||
error_fmt = "errors occurred during concretization of the environment:\n{0}"
|
||||
|
||||
raise RuntimeError(error_fmt.format(msg))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pool(*args, **kwargs):
|
||||
"""Context manager to start and terminate a pool of processes, similar to the
|
||||
default one provided in Python 3.X
|
||||
|
||||
Arguments are forwarded to the multiprocessing.Pool.__init__ method.
|
||||
"""
|
||||
try:
|
||||
p = multiprocessing.Pool(*args, **kwargs)
|
||||
yield p
|
||||
finally:
|
||||
p.terminate()
|
||||
p.join()
|
||||
|
||||
|
||||
def num_processes(max_processes=None):
|
||||
"""Return the number of processes in a pool.
|
||||
|
||||
Currently the function return the minimum between the maximum number
|
||||
of processes and the cpus available.
|
||||
|
||||
When a maximum number of processes is not specified return the cpus available.
|
||||
|
||||
Args:
|
||||
max_processes (int or None): maximum number of processes allowed
|
||||
"""
|
||||
max_processes or cpus_available()
|
||||
return min(cpus_available(), max_processes)
|
||||
|
||||
|
||||
def parallel_map(func, arguments, max_processes=None, debug=False):
|
||||
"""Map a task object to the list of arguments, return the list of results.
|
||||
|
||||
Args:
|
||||
func (Task): user defined task object
|
||||
arguments (list): list of arguments for the task
|
||||
max_processes (int or None): maximum number of processes allowed
|
||||
debug (bool): if False, raise an exception containing just the error messages
|
||||
from workers, if True an exception with complete stacktraces
|
||||
|
||||
Raises:
|
||||
RuntimeError: if any error occurred in the worker processes
|
||||
"""
|
||||
if sys.platform in ("darwin", "win32") or len(list_of_args) == 1:
|
||||
yield from map(f, list_of_args)
|
||||
return
|
||||
|
||||
with multiprocessing.Pool(processes) as p:
|
||||
for result in p.imap_unordered(Task(f), list_of_args):
|
||||
if isinstance(result, ErrorFromWorker):
|
||||
raise RuntimeError(result.stacktrace if debug else str(result))
|
||||
yield result
|
||||
task_wrapper = Task(func)
|
||||
if sys.platform != "darwin" and sys.platform != "win32":
|
||||
with pool(processes=num_processes(max_processes=max_processes)) as p:
|
||||
results = p.map(task_wrapper, arguments)
|
||||
else:
|
||||
results = list(map(task_wrapper, arguments))
|
||||
raise_if_errors(*results, debug=debug)
|
||||
return results
|
||||
|
||||
@@ -1083,10 +1083,6 @@ def visit_MatchOr(self, node):
|
||||
def visit_TypeAlias(self, node):
|
||||
self.fill("type ")
|
||||
self.dispatch(node.name)
|
||||
if node.type_params:
|
||||
self.write("[")
|
||||
interleave(lambda: self.write(", "), self.dispatch, node.type_params)
|
||||
self.write("]")
|
||||
self.write(" = ")
|
||||
self.dispatch(node.value)
|
||||
|
||||
|
||||
@@ -110,28 +110,19 @@ def handle_starttag(self, tag, attrs):
|
||||
self.links.append(val)
|
||||
|
||||
|
||||
class ExtractMetadataParser(HTMLParser):
|
||||
class IncludeFragmentParser(HTMLParser):
|
||||
"""This parser takes an HTML page and selects the include-fragments,
|
||||
used on GitHub, https://github.github.io/include-fragment-element,
|
||||
as well as a possible base url."""
|
||||
used on GitHub, https://github.github.io/include-fragment-element."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.fragments = []
|
||||
self.base_url = None
|
||||
self.links = []
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
# <include-fragment src="..." />
|
||||
if tag == "include-fragment":
|
||||
for attr, val in attrs:
|
||||
if attr == "src":
|
||||
self.fragments.append(val)
|
||||
|
||||
# <base href="..." />
|
||||
elif tag == "base":
|
||||
for attr, val in attrs:
|
||||
if attr == "href":
|
||||
self.base_url = val
|
||||
self.links.append(val)
|
||||
|
||||
|
||||
def read_from_url(url, accept_content_type=None):
|
||||
@@ -634,15 +625,12 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s
|
||||
|
||||
# Parse out the include-fragments in the page
|
||||
# https://github.github.io/include-fragment-element
|
||||
metadata_parser = ExtractMetadataParser()
|
||||
metadata_parser.feed(page)
|
||||
|
||||
# Change of base URL due to <base href="..." /> tag
|
||||
response_url = metadata_parser.base_url or response_url
|
||||
include_fragment_parser = IncludeFragmentParser()
|
||||
include_fragment_parser.feed(page)
|
||||
|
||||
fragments = set()
|
||||
while metadata_parser.fragments:
|
||||
raw_link = metadata_parser.fragments.pop()
|
||||
while include_fragment_parser.links:
|
||||
raw_link = include_fragment_parser.links.pop()
|
||||
abs_link = url_util.join(response_url, raw_link.strip(), resolve_href=True)
|
||||
|
||||
try:
|
||||
|
||||
268
share/spack/bootstrap/github-actions-v0.3/clingo.json
Normal file
268
share/spack/bootstrap/github-actions-v0.3/clingo.json
Normal file
@@ -0,0 +1,268 @@
|
||||
{
|
||||
"verified": [
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"i5rx6vbyw7cyg3snajcpnuozo7l3lcab",
|
||||
"c55d1c76adb82ac9fbe67725641ef7e4fe1ae11e2e8da0dc93a3efe362549127"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"xoxkdgo3n332ewhbh7pz2zuevrjxkrke",
|
||||
"b50e2fba026e85af3f99b3c412b4f0c88ec2fbce15b48eeb75072f1d3737f3cc"
|
||||
]
|
||||
],
|
||||
"python": "python@3.5",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"sgmirxbu3bpn4rdpfs6jlyycfrkfxl5i",
|
||||
"b0a574df6f5d59491a685a31a8ed99fb345c850a91df62ef232fbe0cca716ed1"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"5hn7hszlizeqq3leqi6lrdmyy5ssv6zs",
|
||||
"36e24bc3bd27b125fdeb30d51d2554e44288877c0ce6df5a878bb4e8a1d5847a"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"qk3ecxakadq4naakng6mhdfkwauef3dn",
|
||||
"9d974c0d2b546d18f0ec35e08d5ba114bf2867f7ff7c7ea990b79d019ece6380"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"2omdsvzshkn2u3l5vwvwoey4es5cowfu",
|
||||
"cbf72eb932ac847f87b1640f8e70e26f5261967288f7d6db19206ef352e36a88"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"ifgzrctoh2ibrmitp6ushrvrnaeqtkr7",
|
||||
"1c609df7351286fe09aa3452fa7ed7fedf903e9fa12cde89b916a0fc4c022949"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"esfzjhodgh5be22hvh3trg2ojzrmhzwt",
|
||||
"8d070cdb2a5103cde3e6f873b1eb11d25f60464f3059d8643f943e5c9a9ec76c"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"5b4uhkhrvtvdmsnctjx2isrxciy6v2o2",
|
||||
"336b8b1202a8a28a0e34a98e5780ae0e2b2370b342ce67434551009b1a7c8db9"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"czapgrrey6llnsu2m4qaamv3so2lybxm",
|
||||
"16bdfe4b08ee8da38f3e2c7d5cc44a38d87696cc2b6de0971a4de25efb8ad8e4"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"7za6vsetahbghs4d2qe4ajtf2iyiacwx",
|
||||
"730ae7e6096ec8b83a0fc9464dda62bd6c2fec1f8565bb291f4d1ffe7746703b"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"zulnxrmchldtasffqw6qacmgg4y2qumj",
|
||||
"8988325db53c0c650f64372c21571ac85e9ba4577975d14ae7dba8ab7728b5fc"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"lx54ebqzwtjpfgch7kagoxkmul56z7fa",
|
||||
"81d64229299e76f9dc81f88d286bc94725e7cbcbb29ad0d66aaeaff73dd6473a"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"isu2rjoicl4xzmbl3k2c4bg35gvejkgz",
|
||||
"fcc4b052832cfd327d11f657c2b7715d981b0894ed03bbce18b23a842c7d706d"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"ob3k3g2wjy7cw33lfobjar44sqmojyth",
|
||||
"f51fd6256bfd3afc8470614d87df61e5c9dd582fcc70f707ca66ba2b7255da12"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"norpsmparkl5dfuzdqj4537o77vjbgsl",
|
||||
"477c041857b60f29ff9d6c7d2982b7eb49a2e02ebbc98af11488c32e2fb24081"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"gypv5loj2ml73duq6sr76yg5rj25te2m",
|
||||
"c855d7d32aadec37c41e51f19b83558b32bc0b946a9565dba0e659c6820bd6c3"
|
||||
]
|
||||
],
|
||||
"python": "python@2.7+ucs4",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"rjopyx7hum3hqhgsdyw3st7frdfgrv3p",
|
||||
"0e555f9bc99b4e4152939b30b2257f4f353941d152659e716bf6123c0ce11a60"
|
||||
]
|
||||
],
|
||||
"python": "python@2.7~ucs4",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"2l45t4kw3cqqwj6vbxhfwhzlo6b3q2p4",
|
||||
"6cb90de5a3d123b7408cfef693a9a78bb69c66abbfed746c1e85aa0acb848d03"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"4psiezojm7dexequtbnav77wvgcajigq",
|
||||
"b3fc33b5482357613294becb54968bd74de638abeae69e27c6c4319046a7e352"
|
||||
]
|
||||
],
|
||||
"python": "python@3.5",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"dzhvhynye4z7oalowdcy5zt25lej3m2n",
|
||||
"61c5f3e80bcc7acfc65e335f1910762df2cc5ded9d7e1e5977380a24de553dd7"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"dtwevigmwgke4g6ee5byktpmzmrp2kvx",
|
||||
"636937244b58611ec2eedb4422a1076fcaf09f3998593befb5a6ff1a74e1d5f7"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"shqedxgvjnhiwdcdrvjhbd73jaevv7wt",
|
||||
"b3615b2a94a8a15fddaa74cf4d9f9b3a516467a843cdeab597f72dcf6be5e31d"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"z6v6zvc6awioeompbvo735b4flr3yuyz",
|
||||
"1389192bd74c1f7059d95c4a41500201cbc2905cbba553678613e0b7e3b96c71"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
}
|
||||
]
|
||||
}
|
||||
204
share/spack/bootstrap/github-actions-v0.3/gnupg.json
Normal file
204
share/spack/bootstrap/github-actions-v0.3/gnupg.json
Normal file
@@ -0,0 +1,204 @@
|
||||
{
|
||||
"verified": [
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"libiconv",
|
||||
"d6dhoguolmllbzy2h6pnvjm3tti6uy6f",
|
||||
"7fe765a87945991d4e57782ed67c4bf42a10f95582eecd6f57de80a545bde821"
|
||||
],
|
||||
[
|
||||
"npth",
|
||||
"x6fb7zx6n7mos5knvi6wlnaadd7r2szx",
|
||||
"fd1e5a62107339f45219c32ba20b5e82aa0880c31ac86d1b245d388ca4546990"
|
||||
],
|
||||
[
|
||||
"zlib",
|
||||
"c5wm3jilx6zsers3sfgdisjqusoza4wr",
|
||||
"7500a717c62736872aa65df4599f797ef67b21086dd6236b4c7712cfffac9bf3"
|
||||
],
|
||||
[
|
||||
"libassuan",
|
||||
"3qv4bprobfwes37clg764cfipdzjdbto",
|
||||
"d85cd9d2c63a296300d4dcbd667421956df241109daef5e12d3ca63fa241cb14"
|
||||
],
|
||||
[
|
||||
"libgcrypt",
|
||||
"3y4ubdgxvgpvhxr3bk4l5mkw4gv42n7e",
|
||||
"9dad7c2635344957c4db68378964d3af84ea052d45dbe8ded9a6e6e47211daa8"
|
||||
],
|
||||
[
|
||||
"libgpg-error",
|
||||
"doido34kfwsvwpj4c4jcocahjb5ltebw",
|
||||
"20e5c238bee91d2a841f0b4bd0358ded59a0bd665d7f251fd9cd42f83e0b283b"
|
||||
],
|
||||
[
|
||||
"libksba",
|
||||
"mttecm7gzdv544lbzcoahchnboxysrvi",
|
||||
"1c0ae64e828a597e4cf15dd997c66cd677e41f68c63db09b9551480a197052a2"
|
||||
],
|
||||
[
|
||||
"pinentry",
|
||||
"se7xgv7yf4ywpjnbv7voxgeuuvs77ahb",
|
||||
"2fd13fbee7ca2361dc5dd09708c72d0489611301b60635cb0206bc5b94add884"
|
||||
],
|
||||
[
|
||||
"gnupg",
|
||||
"yannph34bpaqkhsv5mz2icwhy3epiqxd",
|
||||
"1de8b4e119fa3455d0170466fa0fb8e04957fab740aec32535b4667279312b3f"
|
||||
]
|
||||
],
|
||||
"spec": "gnupg@2.3: %apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"zlib",
|
||||
"t2hjzsyf3txkg64e4bq3nihe26rzzdws",
|
||||
"171e720840a28af50b62141be77bc525e666cffd1fbbe2ee62673214e8b0280f"
|
||||
],
|
||||
[
|
||||
"libiconv",
|
||||
"yjdji2wj4njz72fyrg46jlz5f5wfbhfr",
|
||||
"94c773c3d0294cf248ec1f3e9862669dfa743fe1a76de580d9425c14c8f7dcd2"
|
||||
],
|
||||
[
|
||||
"npth",
|
||||
"kx3vzmpysee7jxwsudarthrmyop6hzgc",
|
||||
"f8cc6204fa449ce576d450396ec2cad40a75d5712c1381a61ed1681a54f9c79f"
|
||||
],
|
||||
[
|
||||
"libassuan",
|
||||
"e5n5l5ftzwxs4ego5furrdbegphb6hxp",
|
||||
"ef0428874aa81bcb9944deed88e1fc639f629fe3d522cab3c281235ae2a53db9"
|
||||
],
|
||||
[
|
||||
"libgcrypt",
|
||||
"wyncpahrpqsmpk4b7nlhg5ekkjzyjdzs",
|
||||
"2309548c51a17f580f036445b701feb85d2bc552b9c4404418c2f223666cfe3b"
|
||||
],
|
||||
[
|
||||
"libgpg-error",
|
||||
"vhcdd6jkbiday2seg3rlkbzpf6jzfdx7",
|
||||
"79dd719538d9223d6287c0bba07b981944ab6d3ab11e5060274f1b7c727daf55"
|
||||
],
|
||||
[
|
||||
"libksba",
|
||||
"azcgpgncynoox3dce45hkz46bp2tb5rr",
|
||||
"15d301f201a5162234261fcfccd579b0ff484131444a0b6f5c0006224bb155d6"
|
||||
],
|
||||
[
|
||||
"pinentry",
|
||||
"e3z5ekbv4jlsie4qooubcfvsk2sb6t7l",
|
||||
"5fd27b8e47934b06554e84f1374a90a93e71e60a14dbde672a8da414b27b97f4"
|
||||
],
|
||||
[
|
||||
"gnupg",
|
||||
"i5agfvsmzdokuooaqhlh6vro5giwei2t",
|
||||
"f1bde7a1f0c84c1bbcde5757a96cf7a3e9157c2cfa9907fde799aa8e04c0d51f"
|
||||
]
|
||||
],
|
||||
"spec": "gnupg@2.3: %gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"zlib",
|
||||
"v5rr6ba37tudzfuv2jszwikgcl4wd3cd",
|
||||
"371ad4b277af7b97c7871b9931f2764c97362620c7990c5ad8fdb5c42a1d30dc"
|
||||
],
|
||||
[
|
||||
"libiconv",
|
||||
"bvcnx2e4bumjcgya4dczdhjb3fhqyass",
|
||||
"65a00b717b3a4ee1b5ab9f84163722bdfea8eb20a2eecc9cf657c0eaac0227e9"
|
||||
],
|
||||
[
|
||||
"npth",
|
||||
"dkb6ez6a4c3iyrv67llwf5mzmynqdmtj",
|
||||
"4d77351661d0e0130b1c89fb6c6a944aee41d701ef80d056d3fc0178a7f36075"
|
||||
],
|
||||
[
|
||||
"libassuan",
|
||||
"tuydcxdbb5jfvw3gri7y24b233kgotgd",
|
||||
"d8775e7c1dd252437c6fa0781675b1d2202cfc0c8190e60d248928b6fca8bc9f"
|
||||
],
|
||||
[
|
||||
"libgcrypt",
|
||||
"kgxmg4eukwx6nn3bdera3j7cf7hxfy6n",
|
||||
"6046523f10ed54be50b0211c27191b3422886984fc0c00aed1a85d1f121c42e6"
|
||||
],
|
||||
[
|
||||
"libgpg-error",
|
||||
"ewhrwnltlrzkpqyix2vbkf4ruq6b6ea3",
|
||||
"3f3bbbf1a3cb82d39313e39bcbe3dad94a176130fc0e9a8045417d6223fb4f31"
|
||||
],
|
||||
[
|
||||
"libksba",
|
||||
"onxt5ry2fotgwiognwmhxlgnekuvtviq",
|
||||
"3a4df13f8b880441d1df4b234a4ca01de7601d84a6627185c2b3191a34445d40"
|
||||
],
|
||||
[
|
||||
"pinentry",
|
||||
"fm3m4rsszzxxakcpssd34jbbe4ihrhac",
|
||||
"73afa46176a7ec8f02d01a2caad3e400dc18c3c8a53f92b88a9aa9e3653db3e6"
|
||||
],
|
||||
[
|
||||
"gnupg",
|
||||
"gwr65ovh4wbxjgniaoqlbt3yla6rdikj",
|
||||
"7a3f7afe69ca67797a339c04028ca45a9630933020b57cb56e28453197fe8a57"
|
||||
]
|
||||
],
|
||||
"spec": "gnupg@2.3: %gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"libiconv",
|
||||
"vec3ac6t4ag3lb7ycvisafthqmpci74b",
|
||||
"35d184218e525d8aaea60082fd2d0f1e80449ec32746cceda2ea0ca106e9a095"
|
||||
],
|
||||
[
|
||||
"npth",
|
||||
"jx3kmy3ilc66rgg5mqtbed5z6qwt3vrd",
|
||||
"74c2c1b087667661da3e24ac83bcecf1bc2d10d69e7678d1fd232875fe295135"
|
||||
],
|
||||
[
|
||||
"zlib",
|
||||
"wnpbp4pu7xca24goggcy773d2y4pobbd",
|
||||
"bcbd5310e8c5e75cbf33d8155448b212486dc543469d6df7e56dcecb6112ee88"
|
||||
],
|
||||
[
|
||||
"libassuan",
|
||||
"ynn33wutdtoo2lbjjoizgslintxst2zl",
|
||||
"ac3b060690c6da0c64dcf35da047b84cc81793118fb9ff29b993f3fb9efdc258"
|
||||
],
|
||||
[
|
||||
"libgcrypt",
|
||||
"zzofcjer43vsxwj27c3rxapjxhsz4hlx",
|
||||
"4b1977d815f657c2d6af540ea4b4ce80838cadcf4ada72a8ba142a7441e571ea"
|
||||
],
|
||||
[
|
||||
"libgpg-error",
|
||||
"gzr2ucybgks5jquvf4lv7iprxq5vx5le",
|
||||
"a12ecb5cfd083a29d042fd309ebb5ab8fd4ace0b68b27f89b857e9a84d75b5be"
|
||||
],
|
||||
[
|
||||
"libksba",
|
||||
"hw4u4pam6mp3henpw476axtqaahfdy64",
|
||||
"5424caf98a2d48e0ed0b9134353c242328ebeef6d2b31808d58969165e809b47"
|
||||
],
|
||||
[
|
||||
"pinentry",
|
||||
"hffsjitsewdgoijwgzvub6vpjwm33ywr",
|
||||
"8ed7504b5b2d13ab7e1f4a0e27a882c33c5a6ebfcb43c51269333c0d6d5e1448"
|
||||
],
|
||||
[
|
||||
"gnupg",
|
||||
"lge4h2kjgvssyspnvutq6t3q2xual5oc",
|
||||
"6080ce00fcc24185e4051a30f6d52982f86f46eee6d8a2dc4d83ab08d8195be8"
|
||||
]
|
||||
],
|
||||
"spec": "gnupg@2.3: %gcc platform=linux target=x86_64"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -3,6 +3,6 @@ description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.5
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.3
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
@@ -4,8 +4,8 @@
|
||||
"binaries": [
|
||||
[
|
||||
"patchelf",
|
||||
"4txke6ixd2zg2yzg33l3fqnjyassono7",
|
||||
"102800775f789cc293e244899f39a22f0b7a19373305ef0497ca3189223123f3"
|
||||
"cn4gsqzdnnffk7ynvbcai6wrt5ehqqrl",
|
||||
"8c6a28cbe8133d719be27ded11159f0aa2c97ed1d0881119ae0ebd71f8ccc755"
|
||||
]
|
||||
],
|
||||
"spec": "patchelf@0.13: %gcc platform=linux target=aarch64"
|
||||
@@ -14,8 +14,8 @@
|
||||
"binaries": [
|
||||
[
|
||||
"patchelf",
|
||||
"tnbgxc22uebqsiwrhchf3nieatuqlsrr",
|
||||
"91cf0a9d4750c04575c5ed3bcdefc4754e1cf9d1cd1bf197eb1fe20ccaa869f1"
|
||||
"mgq6n2heyvcx2ebdpchkbknwwn3u63s6",
|
||||
"1d4ea9167fb8345a178c1352e0377cc37ef2b421935cf2b48fb6fa03a94fca3d"
|
||||
]
|
||||
],
|
||||
"spec": "patchelf@0.13: %gcc platform=linux target=ppc64le"
|
||||
@@ -24,8 +24,8 @@
|
||||
"binaries": [
|
||||
[
|
||||
"patchelf",
|
||||
"afv7arjarb7nzmlh7c5slkfxykybuqce",
|
||||
"73f4bde46b843c96521e3f5c31ab94756491404c1ad6429c9f61dbafbbfa6470"
|
||||
"htk62k7efo2z22kh6kmhaselru7bfkuc",
|
||||
"833df21b20eaa7999ac4c5779ae26aa90397d9027aebaa686a428589befda693"
|
||||
]
|
||||
],
|
||||
"spec": "patchelf@0.13: %gcc platform=linux target=x86_64"
|
||||
@@ -1,389 +0,0 @@
|
||||
{
|
||||
"verified": [
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"riu2vekwzrloc3fktlf6v7kwv6fja7lp",
|
||||
"7527bc4d2d75671162fe0db3de04c5d3e1e6ab7991dfd85442c302c698febb45"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10.13",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"sgf6pgn4ihfcbxutxhevp36n3orfpdkw",
|
||||
"958531adcb449094bca7703f8f08d0f55a18f9a4c0f10a175ae4190d20982891"
|
||||
]
|
||||
],
|
||||
"python": "python@3.11.5",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"ie4wlhhnb4snroymbnjksajwvoid6omx",
|
||||
"4af14c3375a211ead3d2b4a31b59683744adcb79b820cc0c6b168ab162a7d983"
|
||||
]
|
||||
],
|
||||
"python": "python@3.12.0",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"5ke32podcipzxxwrj6uzm324bxegbwca",
|
||||
"a4106c42ee68d07c3d954ab73fe305ca4204f44d90b58fd91a8f784d9b96e7e3"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"scu4cnnf5axmjgozqc7cccpqnj5nc5tj",
|
||||
"54de4ca141b92222c8f1729e9e336c8a71dad9efa641e76438fcfb79bb58fc7f"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7.17",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"ajbswc25irhmhbc4qibdcr6ohsvpcdku",
|
||||
"8b9e7af163a4259256eca4b4a1a92b5d95463a5cf467be2a11c64ab536ca5b04"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8.18",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"vwkuxa5z4pj7vviwsmrpw2r6kbbqej2p",
|
||||
"a3f10024ff859e15b79ccd06c970a5f0e6ba11b0eae423f096ec9a35863816d2"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9.18",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"attdjmyzpfnhoobadw55pgg4hwkyp7zk",
|
||||
"f3258af3a648b47f12285dd3f048b685ed652b2b55b53861ac9913926de0f1c3"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"w4vnbsxjgkhsmgwozudzcsqlvccjsec4",
|
||||
"19322c2c951fc80234963ac068c78442df57ac63055325b24a39ab705d27a5b9"
|
||||
]
|
||||
],
|
||||
"python": "python@3.11",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"dw7ez2xcx6e5dxo3n4jin7pdbo3ihwtw",
|
||||
"c368edda4b3c8fd767f5f0f098ea416864b088c767dc43135df49cf5f6ef4c93"
|
||||
]
|
||||
],
|
||||
"python": "python@3.12",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"audrlxaw3ny3kyjkf6kqywumhokcxh3p",
|
||||
"db2f44966ec104ffe57c0911f0b1e0d3d052753f4c46c30c0890dfb26d547b09"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"al7brxvvvhih5nlxvtfkavufqc3pe5t2",
|
||||
"4e09b6d50d42c898e075fd20f7c7eddf91cb80edfd2d1326d26fd779e4d1ffed"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"v3ctpkyogl542wjibng6m2h2426spjbb",
|
||||
"d9ceb4f9ca23ef1dcc33872e5410ccfef6ea0360247d3e8faedf1751fb1ae4ca"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"zxo5ih5ac6r7lj6miwyx36ot7s6a4dcw",
|
||||
"f8f5e124d0e7bada34ff687a05e80b2fe207ce4d26205dab09b144edb148f05e"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9",
|
||||
"spec": "clingo-bootstrap%apple-clang platform=darwin target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"wki4qcy3wzpoxav3auxt2u7yb4sk3xcc",
|
||||
"f5b9251eb51c60a71f7a0359c252f48c1a1121c426e1e6f9181808c626cb5fef"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10.13",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"gun6hbksmsecau5wjyrmxodq4hxievzx",
|
||||
"28839ec43db444d6725bde3fcff99adadf61a392d967041fb16f0ffc0afa2f9d"
|
||||
]
|
||||
],
|
||||
"python": "python@3.11.5",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"er73owosuqfmmkxvuw3f7sqnvvj6s4xp",
|
||||
"99264d48c290256bf16e202c155bf3f8c88fdbbe9894d901344d0db7258abce3"
|
||||
]
|
||||
],
|
||||
"python": "python@3.12.0",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"kv6l7qttuzk7zxkxi5fhff52qso3pj7m",
|
||||
"59aa052e89d3c698fdd35e30ac21a896c8e49bbcc2f589a8f777bd5dafff2af7"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"uw5o2z753otspa3lmmy2bdodh5munkir",
|
||||
"7a8b6359ce83463541ff68c221296fe9875adf28ea2b2c1416229750cf4935d2"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7.17",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"d63pp2l453bfygh6q7afwdj5mw7lhsns",
|
||||
"425bef3a8605732b2fbe74cdd77ef6a359cbdb62800490bbd05620a57da35b0c"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"nap44jiznzwlma6n75uxbpznppazs7av",
|
||||
"316d940ca9af8c6b3bc50f8fdaadba02b0e955c4f24345a63a1a6715b01a752c"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=aarch64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"qhvnw4yowmk2tofg3u7a4uomisktgzw5",
|
||||
"d30ec81385377521dd2d1ac091546cc2dec6a852ad31f35c24c65919f94fbf64"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10.13",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"b3y37ryfuhjq6ljbkq7piglsafg5stgw",
|
||||
"3c2f9cca3a6d37685fdf7d7dffb7a0505336c32562715069004631c446e46a7c"
|
||||
]
|
||||
],
|
||||
"python": "python@3.11.5",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"dbloojtq5kcfd3pjmj4pislgpzrcvjpn",
|
||||
"f8aeba80e6c106b769adba164702db94e077255fe1a22d6d265ccc3172b4ab1a"
|
||||
]
|
||||
],
|
||||
"python": "python@3.12.0",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"gtlngzdb7iggcjmaottob54qi3b24blt",
|
||||
"3efc534ba293ee51156971b8c19a597ebcb237b003c98e3c215a49a88064dfd1"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"4ab4wobwa7bvhlkrmhdp2dwgtcq5rpzo",
|
||||
"3dc6539a989701ec1d83d644a79953af912c11fe6046a8d720970faf8e477991"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7.17",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"fgout3h4mt4i64xaovqrpcsdy3ly2aml",
|
||||
"ade67f0623e941b16f2dd531270b4863de8befd56a9a47bd87af85345bc8bed6"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"5fv2q4agg4b4g53f4zhnymrbv6ogiwpy",
|
||||
"18047d48538a770f014cce73756258c1a320d4ac143abef3c5d8bc09dd7a03cc"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=ppc64le"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"smkmkb5xqz4v2f7tl22g4e2ghamglox5",
|
||||
"a850c80c7a48dab506f807cc936b9e54e6f5640fe96543ff58281c046140f112"
|
||||
]
|
||||
],
|
||||
"python": "python@3.10.13",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"55qeu52pkt5shpwd7ulugv7wzt5j7vqd",
|
||||
"e5e1a10b3b2d543b1555f5caef9ac1a9ccdcddb36a1278d3bf68bf0e9f490626"
|
||||
]
|
||||
],
|
||||
"python": "python@3.11.5",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"zcw5ieomfwwpzpzpabetix2plfqzpvwd",
|
||||
"ed409165109488d13afe8ef12edd3b373ed08967903dc802889523b5d3bccd14"
|
||||
]
|
||||
],
|
||||
"python": "python@3.12.0",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"t4yf34cuvquqp5xd66zybmcfyhwbdlsf",
|
||||
"b14e26e86bcfdac98b3a55109996265683f32910d3452e034ddc0d328bf62d67"
|
||||
]
|
||||
],
|
||||
"python": "python@3.6",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"grkrpj76lxsxa753uzndwfmrj3pwvyhp",
|
||||
"11a535d4a8a9dbb18c2f995e10bc90b27b6ebc61f7ac2090f15db9b4f9be1a64"
|
||||
]
|
||||
],
|
||||
"python": "python@3.7.17",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"zowwoarrf3hvo6i3iereolfujr42iyro",
|
||||
"154d3a725f02c1775644d99a0b74f9e2cdf6736989a264ccfd5d9a8bce77a16b"
|
||||
]
|
||||
],
|
||||
"python": "python@3.8.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
},
|
||||
{
|
||||
"binaries": [
|
||||
[
|
||||
"clingo-bootstrap",
|
||||
"bhqgwuvef354fwuxq7heeighavunpber",
|
||||
"399dec8cb6b8cd1b03737e68ea32e6ed69030b57e5f05d983e8856024143ea78"
|
||||
]
|
||||
],
|
||||
"python": "python@3.9.18",
|
||||
"spec": "clingo-bootstrap%gcc platform=linux target=x86_64"
|
||||
}
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user