Compare commits

..

2 Commits

Author SHA1 Message Date
Gregory Becker
1be0cf0fdf apple clang package 2023-05-30 16:10:59 -07:00
Gregory Becker
f60cb4090b wip: apple-clang only and now parallelism 2023-05-23 19:57:02 +02:00
889 changed files with 5501 additions and 11132 deletions

View File

@@ -5,8 +5,3 @@ updates:
directory: "/"
schedule:
interval: "daily"
# Requirements to build documentation
- package-ecosystem: "pip"
directory: "/lib/spack/docs"
schedule:
interval: "daily"

View File

@@ -19,8 +19,8 @@ jobs:
package-audits:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages
@@ -38,7 +38,7 @@ jobs:
run: |
. share/spack/setup-env.sh
$(which spack) audit packages
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,linux,audits

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -179,7 +179,7 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- name: Bootstrap clingo
run: |
set -ex
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup repo
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh

View File

@@ -49,14 +49,14 @@ jobs:
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -92,13 +92,13 @@ jobs:
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@ecf95283f03858871ff00b787d79c419715afc34 # @v1
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -106,13 +106,13 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -1,31 +0,0 @@
name: Windows Paraview Nightly
on:
schedule:
- cron: '0 2 * * *' # Run at 2 am
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip six pywin32 setuptools coverage
- name: Build Test
run: |
spack compiler find
spack external find cmake ninja win-sdk win-wdk wgl msmpi
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
exit 0

View File

@@ -47,10 +47,10 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -87,17 +87,17 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
with:
flags: unittests,linux,${{ matrix.concretizer }}
# Test shell integration
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -118,7 +118,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
with:
flags: shelltests,linux
@@ -133,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -152,10 +152,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -176,7 +176,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0
with:
flags: unittests,linux,clingo
# Run unit tests on MacOS
@@ -186,10 +186,10 @@ jobs:
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -211,6 +211,6 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
with:
flags: unittests,macos

View File

@@ -18,8 +18,8 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: '3.11'
cache: 'pip'
@@ -35,10 +35,10 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2
with:
python-version: '3.11'
cache: 'pip'
@@ -68,7 +68,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -81,7 +81,6 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack debug report
spack -d bootstrap now --dev
spack style -t black
spack unit-test -V

View File

@@ -15,10 +15,10 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: 3.9
- name: Install Python packages
@@ -33,16 +33,16 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
with:
flags: unittests,windows
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: 3.9
- name: Install Python packages
@@ -57,16 +57,16 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2
with:
flags: unittests,windows
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
with:
fetch-depth: 0
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,16 +1,10 @@
version: 2
build:
os: "ubuntu-22.04"
apt_packages:
- graphviz
tools:
python: "3.11"
sphinx:
configuration: lib/spack/docs/conf.py
fail_on_warning: true
python:
version: 3.7
install:
- requirements: lib/spack/docs/requirements.txt

View File

@@ -1,16 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# The name of the Pygments (syntax highlighting) style to use.
# We use our own extension of the default style with a few modifications
from pygments.styles.default import DefaultStyle
from pygments.token import Generic
class SpackStyle(DefaultStyle):
styles = DefaultStyle.styles.copy()
background_color = "#f4f4f8"
styles[Generic.Output] = "#355"
styles[Generic.Prompt] = "bold #346ec9"

View File

@@ -149,6 +149,7 @@ def setup(sphinx):
# Get nice vector graphics
graphviz_output_format = "svg"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
@@ -232,8 +233,30 @@ def setup(sphinx):
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
sys.path.append("./_pygments")
pygments_style = "style.SpackStyle"
# The name of the Pygments (syntax highlighting) style to use.
# We use our own extension of the default style with a few modifications
from pygments.style import Style
from pygments.styles.default import DefaultStyle
from pygments.token import Comment, Generic, Text
class SpackStyle(DefaultStyle):
styles = DefaultStyle.styles.copy()
background_color = "#f4f4f8"
styles[Generic.Output] = "#355"
styles[Generic.Prompt] = "bold #346ec9"
import pkg_resources
dist = pkg_resources.Distribution(__file__)
sys.path.append(".") # make 'conf' module findable
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
pkg_resources.working_set.add(dist)
pygments_style = "spack"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@@ -318,15 +341,16 @@ def setup(sphinx):
# Output file base name for HTML help builder.
htmlhelp_basename = "Spackdoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples

View File

@@ -636,7 +636,7 @@ to customize the generation of container recipes:
- No
* - ``os_packages:command``
- Tool used to manage system packages
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
- Only with custom base images
* - ``os_packages:update``
- Whether or not to update the list of available packages

View File

@@ -916,9 +916,9 @@ function, as shown in the example below:
.. code-block:: yaml
projections:
zlib: "{name}-{version}"
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
all: "{name}-{version}/{compiler.name}-{compiler.version}"
zlib: {name}-{version}
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
all: {name}-{version}/{compiler.name}-{compiler.version}
The entries in the projections configuration file must all be either
specs or the keyword ``all``. For each spec, the projection used will
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
example/push/%: example/install/%
@mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
@touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
$(info Updating the buildcache index)
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
$(info Done!)
@touch $@

View File

@@ -317,7 +317,7 @@ installed, but you know that new compilers have been added to your
.. code-block:: console
$ module load gcc/4.9.0
$ module load gcc-4.9.0
$ spack compiler find
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
gcc@4.9.0

View File

@@ -76,7 +76,6 @@ or refer to the full manual below.
chain
extensions
pipelines
signing
.. toctree::
:maxdepth: 2

View File

@@ -35,27 +35,27 @@ showing lots of installed packages:
$ module avail
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
The names should look familiar, as they resemble the output from ``spack find``.
For example, you could type the following command to load the ``cmake`` module:
.. code-block:: console
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
Neither of these is particularly pretty, easy to remember, or easy to
type. Luckily, Spack offers many facilities for customizing the module
@@ -779,35 +779,35 @@ cut-and-pasted into a shell script. For example:
$ spack module tcl loads --dependencies py-numpy git
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
# ncurses@6.0%gcc@4.9.3=linux-x86_64
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
# readline@6.3%gcc@4.9.3=linux-x86_64
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
# python@3.5.1%gcc@4.9.3=linux-x86_64
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
# curl@7.47.1%gcc@4.9.3=linux-x86_64
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
# autoconf@2.69%gcc@4.9.3=linux-x86_64
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
# expat@2.1.0%gcc@4.9.3=linux-x86_64
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
The script may be further edited by removing unnecessary modules.
@@ -826,12 +826,12 @@ For example, consider the following on one system:
.. code-block:: console
$ module avail
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
$ spack module tcl loads antlr # WRONG!
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y

View File

@@ -3071,7 +3071,7 @@ follows:
# The library provided by the bar virtual package
@property
def bar_libs(self):
return find_libraries("libFooBar", root=self.home, recursive=True)
return find_libraries("libFooBar", root=sef.home, recursive=True)
# The baz virtual package home
@property

View File

@@ -1,8 +1,13 @@
sphinx==6.2.1
sphinxcontrib-programoutput==0.17
sphinx_design==0.4.1
sphinx-rtd-theme==1.2.2
python-levenshtein==0.21.1
docutils==0.18.1
pygments==2.15.1
urllib3==2.0.3
# These dependencies should be installed using pip in order
# to build the documentation.
sphinx>=3.4,!=4.1.2,!=5.1.0
sphinxcontrib-programoutput
sphinx-design
sphinx-rtd-theme
python-levenshtein
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
# https://stackoverflow.com/questions/67542699
docutils <0.17
pygments <2.13
urllib3 <2

View File

@@ -1,484 +0,0 @@
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _signing:
=====================
Spack Package Signing
=====================
The goal of package signing in Spack is to provide data integrity
assurances around official packages produced by the automated Spack CI
pipelines. These assurances directly address the security of Spacks
software supply chain by explaining why a security-conscious user can
be reasonably justified in the belief that packages installed via Spack
have an uninterrupted auditable trail back to change management
decisions judged to be appropriate by the Spack maintainers. This is
achieved through cryptographic signing of packages built by Spack CI
pipelines based on code that has been transparently reviewed and
approved on GitHub. This document describes the signing process for
interested users.
.. _risks:
------------------------------
Risks, Impact and Threat Model
------------------------------
This document addresses the approach taken to safeguard Spacks
reputation with regard to the integrity of the package data produced by
Spacks CI pipelines. It does not address issues of data confidentiality
(Spack is intended to be largely open source) or availability (efforts
are described elsewhere). With that said the main reputational risk can
be broadly categorized as a loss of faith in the data integrity due to a
breach of the private key used to sign packages. Remediation of a
private key breach would require republishing the public key with a
revocation certificate, generating a new signing key, an assessment and
potential rebuild/resigning of all packages since the key was breached,
and finally direct intervention by every spack user to update their copy
of Spacks public keys used for local verification.
The primary threat model used in mitigating the risks of these stated
impacts is one of individual error not malicious intent or insider
threat. The primary objective is to avoid the above impacts by making a
private key breach nearly impossible due to oversight or configuration
error. Obvious and straightforward measures are taken to mitigate issues
of malicious interference in data integrity and insider threats but
these attack vectors are not systematically addressed. It should be hard
to exfiltrate the private key intentionally, and almost impossible to
leak the key by accident.
.. _overview:
-----------------
Pipeline Overview
-----------------
Spack pipelines build software through progressive stages where packages
in later stages nominally depend on packages built in earlier stages.
For both technical and design reasons these dependencies are not
implemented through the default GitLab artifacts mechanism; instead
built packages are uploaded to AWS S3 mirrors (buckets) where they are
retrieved by subsequent stages in the pipeline. Two broad categories of
pipelines exist: Pull Request (PR) pipelines and Develop/Release
pipelines.
- PR pipelines are launched in response to pull requests made by
trusted and untrusted users. Packages built on these pipelines upload
code to quarantined AWS S3 locations which cache the built packages
for the purposes of review and iteration on the changes proposed in
the pull request. Packages built on PR pipelines can come from
untrusted users so signing of these pipelines is not implemented.
Jobs in these pipelines are executed via normal GitLab runners both
within the AWS GitLab infrastructure and at affiliated institutions.
- Develop and Release pipelines **sign** the packages they produce and carry
strong integrity assurances that trace back to auditable change management
decisions. These pipelines only run after members from a trusted group of
reviewers verify that the proposed changes in a pull request are appropriate.
Once the PR is merged, or a release is cut, a pipeline is run on protected
GitLab runners which provide access to the required signing keys within the
job. Intermediary keys are used to sign packages in each stage of the
pipeline as they are built and a final job officially signs each package
external to any specific packages build environment. An intermediate key
exists in the AWS infrastructure and for each affiliated instritution that
maintains protected runners. The runners that execute these pipelines
exclusively accept jobs from protected branches meaning the intermediate keys
are never exposed to unreviewed code and the official keys are never exposed
to any specific build environment.
.. _key_architecture:
----------------
Key Architecture
----------------
Spacks CI process uses public-key infrastructure (PKI) based on GNU Privacy
Guard (gpg) keypairs to sign public releases of spack package metadata, also
called specs. Two classes of GPG keys are involved in the process to reduce the
impact of an individual private key compromise, these key classes are the
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
sub-keys that are used exclusively for signing packages. This can be confusing
so for the purpose of this explanation well refer to Root and Signing keys.
Each key has a private and a public component as well as one or more identities
and zero or more signatures.
-------------------
Intermediate CI Key
-------------------
The Intermediate key class is used to sign and verify packages between stages
within a develop or release pipeline. An intermediate key exists for the AWS
infrastructure as well as each affiliated institution that maintains protected
runners. These intermediate keys are made available to the GitLab execution
environment building the package so that the packages dependencies may be
verified by the Signing Intermediate CI Public Key and the final package may be
signed by the Signing Intermediate CI Private Key.
+---------------------------------------------------------------------------------------------------------+
| **Intermediate CI Key (GPG)** |
+==================================================+======================================================+
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
+---------------------------------------------------------------------------------------------------------+
| Signatures: None |
+---------------------------------------------------------------------------------------------------------+
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
stored offline completely separate from Spacks infrastructure. This allows the
core development team to append revocation certificates to the GPG key and
issue new sub-keys for use in the pipeline. It is our expectation that this
will happen on a semi regular basis. A corollary of this is that *this key
should not be used to verify package integrity outside the internal CI process.*
----------------
Reputational Key
----------------
The Reputational Key is the public facing key used to sign complete groups of
development and release packages. Only one key pair exsits in this class of
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
used to verify package integrity. At the end of develop and release pipeline a
final pipeline job pulls down all signed package metadata built by the pipeline,
verifies they were signed with an Intermediate CI Key, then strips the
Intermediate CI Key signature from the package and re-signs them with the
Signing Reputational Private Key. The officially signed packages are then
uploaded back to the AWS S3 mirror. Please note that separating use of the
reputational key into this final job is done to prevent leakage of the key in a
spack package. Because the Signing Reputational Private Key is never exposed to
a build job it cannot accidentally end up in any built package.
+---------------------------------------------------------------------------------------------------------+
| **Reputational Key (GPG)** |
+==================================================+======================================================+
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
+--------------------------------------------------+------------------------------------------------------+
| Identity: “Spack Project <maintainers@spack.io>” |
+---------------------------------------------------------------------------------------------------------+
| Signatures: Signed by core development team [#f1]_ |
+---------------------------------------------------------------------------------------------------------+
The Root Reputational Private Key is stripped out of the GPG key and stored
offline completely separate from Spacks infrastructure. This allows the core
development team to append revocation certificates to the GPG key in the
unlikely event that the Signing Reputation Private Key is compromised. In
general it is the expectation that rotating this key will happen infrequently if
at all. This should allow relatively transparent verification for the end-user
community without needing deep familiarity with GnuPG or Public Key
Infrastructure.
.. _build_cache_format:
------------------
Build Cache Format
------------------
A binary package consists of a metadata file unambiguously defining the
built package (and including other details such as how to relocate it)
and the installation directory of the package stored as a compressed
archive file. The metadata files can either be unsigned, in which case
the contents are simply the json-serialized concrete spec plus metadata,
or they can be signed, in which case the json-serialized concrete spec
plus metadata is wrapped in a gpg cleartext signature. Built package
metadata files are named to indicate the operating system and
architecture for which the package was built as well as the compiler
used to build it and the packages name and version. For example::
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
would contain the concrete spec and binary metadata for a binary package
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
architecture. The id of the built package exists in the name of the file
as well (after the package name and version) and in this case begins
with ``llv2ys``. The id distinguishes a particular built package from all
other built packages with the same os/arch, compiler, name, and version.
Below is an example of a signed binary package metadata file. Such a
file would live in the ``build_cache`` directory of a binary mirror::
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512
{
"spec": {
<concrete-spec-contents-omitted>
},
"buildcache_layout_version": 1,
"binary_cache_checksum": {
"hash_algorithm": "sha256",
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
},
"buildinfo": {
"relative_prefix":
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
"relative_rpaths": false
}
}
-----BEGIN PGP SIGNATURE-----
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
j3DXty57
=3gvm
-----END PGP SIGNATURE-----
If a user has trusted the public key associated with the private key
used to sign the above spec file, the signature can be verified with
gpg, as follows::
$ gpg verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
The metadata (regardless whether signed or unsigned) contains the checksum
of the ``.spack`` file containing the actual installation. The checksum should
be compared to a checksum computed locally on the ``.spack`` file to ensure the
contents have not changed since the binary spec plus metadata were signed. The
``.spack`` files are actually tarballs containing the compressed archive of the
install tree. These files, along with the metadata files, live within the
``build_cache`` directory of the mirror, and together are organized as follows::
build_cache/
# unsigned metadata (for indexing, contains sha256 of .spack file)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
# clearsigned metadata (same as above, but signed)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
<arch>/
<compiler>/
<name>-<ver>/
# tar.gz-compressed prefix (may support more compression formats later)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
Uncompressing and extracting the ``.spack`` file results in the install tree.
This is in contrast to previous versions of spack, where the ``.spack`` file
contained a (duplicated) metadata file, a signature file and a nested tarball
containing the install tree.
.. _internal_implementation:
-----------------------
Internal Implementation
-----------------------
The technical implementation of the pipeline signing process includes components
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
implementation in two interdependent sections. The first addresses how secrets
are managed through the lifecycle of a develop or release pipeline. The second
section describes how Gitlab Runner and pipelines are configured and managed to
support secure automated signing.
Secrets Management
^^^^^^^^^^^^^^^^^^
As stated above the Root Private Keys (intermediate and reputational)
are stripped from the GPG keys and stored outside Spacks
infrastructure.
.. warning::
**TODO**
- Explanation here about where and how access is handled for these keys.
- Both Root private keys are protected with strong passwords
- Who has access to these and how?
**Intermediate CI Key**
-----------------------
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
run in AWS, and one key for each affiliated institution (e.g. Univerity of
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
exported as an ASCII armored file and stored in a Kubernetes secret called
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
contains an ASCII-armored export of just the *public* components of the
Reputational Key. This secret also contains the *public* components of each of
the affiliated institutions' Intermediate CI Key. These are potentially needed
to verify dependent packages which may have been found in the public mirror or
built by a protected job running on an affiliated institution's infrastrcuture
in an earlier stage of the pipeline.
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
the following way:
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
a job tagged ``protected`` from a protected GitLab branch. (See
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
2. Based on its configuration, the runner creates a job Pod in the
pipeline namespace and mounts the spack-intermediate-ci-signing-key
Kubernetes secret into the build container
3. The Intermediate CI Key, affiliated institutions' public key and the
Reputational Public Key are imported into a keyring by the ``spack gpg …``
sub-command. This is initiated by the jobs build script which is created by
the generate job at the beginning of the pipeline.
4. Assuming the package has dependencies those specs are verified using
the keyring.
5. The package is built and the spec.json is generated
6. The spec.json is signed by the keyring and uploaded to the mirrors
build cache.
**Reputational Key**
--------------------
Because of the increased impact to end users in the case of a private
key breach, the Reputational Key is managed separately from the
Intermediate CI Keys and has additional controls. First, the Reputational
Key was generated outside of Spacks infrastructure and has been signed
by the core development team. The Reputational Key (along with the
Signing Reputational Private Key) was then ASCII armor exported to a
file. Unlike the Intermediate CI Key this exported file is not stored as
a base64 encoded secret in Kubernetes. Instead\ *the key file
itself*\ is encrypted and stored in Kubernetes as the
``spack-signing-key-encrypted`` secret in the pipeline namespace.
The encryption of the exported Reputational Key (including the Signing
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
keys
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
The private key material is decrypted and imported at the time of signing into a
memory mounted temporary directory holding the keychain. The signing job uses
the `AWS Encryption SDK
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
decrypt the key is granted to the job Pod through a Kubernetes service account
specifically used for this, and only this, function. Finally, for convenience
sake, this same secret contains an ASCII-armored export of the *public*
components of the Intermediate CI Keys and the Reputational Key. This allows the
signing script to verify that packages were built by the pipeline (both on AWS
or at affiliated institutions), or signed previously as a part of a different
pipeline. This is is done *before* importing decrypting and importing the
Signing Reputational Private Key material and officially signing the packages.
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
following way:
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
up a job tagged ``notary`` from a protected GitLab branch (See
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
2. Based on its configuration, the runner creates a job pod in the
pipeline namespace. The job is run in a stripped down purpose-built
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
configured to only allow running jobs with this image.
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
a path on disk. Note that this becomes several files on disk, the
public components of the Intermediate CI Keys, the public components
of the Reputational CI, and an AWS KMS encrypted file containing the
Singing Reputational Private Key.
4. In addition to the secret, the runner creates a tmpfs memory mounted
directory where the GnuPG keyring will be created to verify, and
then resign the package specs.
5. The job script syncs all spec.json.sig files from the build cache to
a working directory in the jobs execution environment.
6. The job script then runs the ``sign.sh`` script built into the
notary Docker image.
7. The ``sign.sh`` script imports the public components of the
Reputational and Intermediate CI Keys and uses them to verify good
signatures on the spec.json.sig files. If any signed spec does not
verify the job immediately fails.
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
the spec json data from the signed file in preparation for being
re-signed with the Reputational Key.
9. The private components of the Reputational Key are decrypted to
standard out using ``aws-encryption-cli`` directly into a ``gpg
import …`` statement which imports the key into the
keyring mounted in-memory.
10. The private key is then used to sign each of the json specs and the
keyring is removed from disk.
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
public signing of the packages for the develop or release pipeline
that created them is complete.
Non service-account access to the private components of the Reputational
Key that are managed through access to the symmetric secret in KMS used
to encrypt the data key (which in turn is used to encrypt the GnuPG key
- See:\ `Encryption SDK
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
A small trusted subset of the core development team are the only
individuals with access to this symmetric key.
.. _protected_runners:
Protected Runners and Reserved Tags
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack has a large number of Gitlab Runners operating in its build farm.
These include runners deployed in the AWS Kubernetes cluster as well as
runners deployed at affiliated institutions. The majority of runners are
shared runners that operate across projects in gitlab.spack.io. These
runners pick up jobs primarily from the spack/spack project and execute
them in PR pipelines.
A small number of runners operating on AWS and at affiliated institutions are
registered as specific *protected* runners on the spack/spack project. In
addition to protected runners there are protected branches on the spack/spack
project. These are the ``develop`` branch, any release branch (i.e. managed with
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
wildcard) Finally Spacks pipeline generation code reserves certain tags to make
sure jobs are routed to the correct runners, these tags are ``public``,
``protected``, and ``notary``. Understanding how all this works together to
protect secrets and provide integrity assurances can be a little confusing so
lets break these down:
- **Protected Branches**- Protected branches in Spack prevent anyone
other than Maintainers in GitLab from pushing code. In the case of
Spack the only Maintainer level entity pushing code to protected
branches is Spack bot. Protecting branches also marks them in such a
way that Protected Runners will only run jobs from those branches
- **Protected Runners**- Protected Runners only run jobs from protected
branches. Because protected runners have access to secrets, it's critical
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
would be possible for a PR branch to tag a job in such a way that a protected
runner executed that job and mounted secrets into a code execution
environment that had not been reviewed by Spack maintainers. Note however
that in the absence of tagging used to route jobs, public runners *could* run
jobs from protected branches. No secrets would be at risk of being breached
because non-protected runners do not have access to those secrets; lack of
secrets would, however, cause the jobs to fail.
- **Reserved Tags**- To mitigate the issue of public runners picking up
protected jobs Spack uses a small set of “reserved” job tags (Note that these
are *job* tags not git tags). These tags are “public”, “private”, and
“notary.” The majority of jobs executed in Spacks GitLab instance are
executed via a ``generate`` job. The generate job code systematically ensures
that no user defined configuration sets these tags. Instead, the ``generate``
job sets these tags based on rules related to the branch where this pipeline
originated. If the job is a part of a pipeline on a PR branch it sets the
``public`` tag. If the job is part of a pipeline on a protected branch it
sets the ``protected`` tag. Finally if the job is the package signing job and
it is running on a pipeline that is part of a protected branch then it sets
the ``notary`` tag.
Protected Runners are configured to only run jobs from protected branches. Only
jobs running in pipelines on protected branches are tagged with ``protected`` or
``notary`` tags. This tightly couples jobs on protected branches to protected
runners that provide access to the secrets required to sign the built packages.
The secrets are can **only** be accessed via:
1. Runners under direct control of the core development team.
2. Runners under direct control of trusted maintainers at affiliated institutions.
3. By code running the automated pipeline that has been reviewed by the
Spack maintainers and judged to be appropriate.
Other attempts (either through malicious intent or incompetence) can at
worst grab jobs intended for protected runners which will cause those
jobs to fail alerting both Spack maintainers and the core development
team.
.. [#f1]
The Reputational Key has also cross signed core development team
keys.

428
lib/spack/env/cc vendored
View File

@@ -416,14 +416,30 @@ input_command="$*"
# The lists are all bell-separated to be as flexible as possible, as their
# contents may come from the command line, from ' '-separated lists,
# ':'-separated lists, etc.
include_dirs_list=""
lib_dirs_list=""
rpath_dirs_list=""
system_include_dirs_list=""
system_lib_dirs_list=""
system_rpath_dirs_list=""
isystem_system_include_dirs_list=""
isystem_include_dirs_list=""
libs_list=""
other_args_list=""
# Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no
# Same, but for -Xlinker -rpath -Xlinker /path
xlinker_expect_rpath=no
parse_Wl() {
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
append system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
append rpath_dirs_list "$1"
fi
wl_expect_rpath=no
else
@@ -433,9 +449,9 @@ parse_Wl() {
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
append system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
append rpath_dirs_list "$arg"
fi
;;
--rpath=*)
@@ -443,9 +459,9 @@ parse_Wl() {
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
append system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
append rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
@@ -459,7 +475,7 @@ parse_Wl() {
return 1
;;
*)
append return_other_args_list "-Wl,$1"
append other_args_list "-Wl,$1"
;;
esac
fi
@@ -467,210 +483,177 @@ parse_Wl() {
done
}
categorize_arguments() {
unset IFS
while [ $# -ne 0 ]; do
return_other_args_list=""
return_isystem_was_used=""
return_isystem_system_include_dirs_list=""
return_isystem_include_dirs_list=""
return_system_include_dirs_list=""
return_include_dirs_list=""
return_system_lib_dirs_list=""
return_lib_dirs_list=""
return_system_rpath_dirs_list=""
return_rpath_dirs_list=""
# an RPATH to be added after the case statement.
rp=""
# Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no
# Multiple consecutive spaces in the command line can
# result in blank arguments
if [ -z "$1" ]; then
shift
continue
fi
# Same, but for -Xlinker -rpath -Xlinker /path
xlinker_expect_rpath=no
while [ $# -ne 0 ]; do
# an RPATH to be added after the case statement.
rp=""
# Multiple consecutive spaces in the command line can
# result in blank arguments
if [ -z "$1" ]; then
shift
continue
fi
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
# NOTE: the eval is required to allow `|` alternatives inside the variable
eval "\
case \"\$1\" in
$SPACK_COMPILER_FLAGS_KEEP)
append return_other_args_list \"\$1\"
shift
continue
;;
esac
"
fi
# the replace list is a space-separated list of pipe-separated pairs,
# the first in each pair is the original prefix to be matched, the
# second is the replacement prefix
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
before=${rep%|*}
after=${rep#*|}
eval "\
stripped=\"\${1##$before}\"
"
if [ "$stripped" = "$1" ] ; then
continue
fi
replaced="$after$stripped"
# it matched, remove it
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
# NOTE: the eval is required to allow `|` alternatives inside the variable
eval "\
case \"\$1\" in
$SPACK_COMPILER_FLAGS_KEEP)
append other_args_list \"\$1\"
shift
if [ -z "$replaced" ] ; then
# completely removed, continue OUTER loop
continue 2
fi
# re-build argument list with replacement
set -- "$replaced" "$@"
done
fi
case "$1" in
-isystem*)
arg="${1#-isystem}"
return_isystem_was_used=true
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append return_isystem_system_include_dirs_list "$arg"
else
append return_isystem_include_dirs_list "$arg"
fi
;;
-I*)
arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append return_system_include_dirs_list "$arg"
else
append return_include_dirs_list "$arg"
fi
;;
-L*)
arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append return_system_lib_dirs_list "$arg"
else
append return_lib_dirs_list "$arg"
fi
;;
-l*)
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
# and passed by ifx to the linker, which confuses it with a
# library. Filter it out.
# TODO: generalize filtering of args with an env var, so that
# TODO: we do not have to special case this here.
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
&& [ "$1" != "${1#-loopopt}" ]; then
shift
continue
fi
arg="${1#-l}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
append return_other_args_list "-l$arg"
;;
-Wl,*)
IFS=,
if ! parse_Wl ${1#-Wl,}; then
append return_other_args_list "$1"
fi
unset IFS
;;
-Xlinker)
shift
if [ $# -eq 0 ]; then
# -Xlinker without value: let the compiler error about it.
append return_other_args_list -Xlinker
xlinker_expect_rpath=no
break
elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
fi
xlinker_expect_rpath=no
else
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
--rpath=*)
arg="${1#--rpath=}"
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
xlinker_expect_rpath=yes
;;
"$dtags_to_strip")
;;
*)
append return_other_args_list -Xlinker
append return_other_args_list "$1"
;;
esac
fi
;;
"$dtags_to_strip")
;;
*)
append return_other_args_list "$1"
continue
;;
esac
shift
done
"
fi
# the replace list is a space-separated list of pipe-separated pairs,
# the first in each pair is the original prefix to be matched, the
# second is the replacement prefix
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
before=${rep%|*}
after=${rep#*|}
eval "\
stripped=\"\${1##$before}\"
"
if [ "$stripped" = "$1" ] ; then
continue
fi
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
# parsing.
if [ "$xlinker_expect_rpath" = yes ]; then
append return_other_args_list -Xlinker
append return_other_args_list -rpath
replaced="$after$stripped"
# it matched, remove it
shift
if [ -z "$replaced" ] ; then
# completely removed, continue OUTER loop
continue 2
fi
# re-build argument list with replacement
set -- "$replaced" "$@"
done
fi
# Same, but for -Wl flags.
if [ "$wl_expect_rpath" = yes ]; then
append return_other_args_list -Wl,-rpath
fi
}
case "$1" in
-isystem*)
arg="${1#-isystem}"
isystem_was_used=true
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append isystem_system_include_dirs_list "$arg"
else
append isystem_include_dirs_list "$arg"
fi
;;
-I*)
arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append system_include_dirs_list "$arg"
else
append include_dirs_list "$arg"
fi
;;
-L*)
arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
if system_dir "$arg"; then
append system_lib_dirs_list "$arg"
else
append lib_dirs_list "$arg"
fi
;;
-l*)
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
# and passed by ifx to the linker, which confuses it with a
# library. Filter it out.
# TODO: generalize filtering of args with an env var, so that
# TODO: we do not have to special case this here.
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
&& [ "$1" != "${1#-loopopt}" ]; then
shift
continue
fi
arg="${1#-l}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
append other_args_list "-l$arg"
;;
-Wl,*)
IFS=,
if ! parse_Wl ${1#-Wl,}; then
append other_args_list "$1"
fi
unset IFS
;;
-Xlinker)
shift
if [ $# -eq 0 ]; then
# -Xlinker without value: let the compiler error about it.
append other_args_list -Xlinker
xlinker_expect_rpath=no
break
elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
if system_dir "$1"; then
append system_rpath_dirs_list "$1"
else
append rpath_dirs_list "$1"
fi
xlinker_expect_rpath=no
else
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
if system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi
;;
--rpath=*)
arg="${1#--rpath=}"
if system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
xlinker_expect_rpath=yes
;;
"$dtags_to_strip")
;;
*)
append other_args_list -Xlinker
append other_args_list "$1"
;;
esac
fi
;;
"$dtags_to_strip")
;;
*)
append other_args_list "$1"
;;
esac
shift
done
categorize_arguments "$@"
include_dirs_list="$return_include_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
system_include_dirs_list="$return_system_include_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
isystem_was_used="$return_isystem_was_used"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
other_args_list="$return_other_args_list"
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
# parsing.
if [ "$xlinker_expect_rpath" = yes ]; then
append other_args_list -Xlinker
append other_args_list -rpath
fi
# Same, but for -Wl flags.
if [ "$wl_expect_rpath" = yes ]; then
append other_args_list -Wl,-rpath
fi
#
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
@@ -690,14 +673,12 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
extend flags_list SPACK_DEBUG_FLAGS
fi
spack_flags_list=""
# Fortran flags come before CPPFLAGS
case "$mode" in
cc|ccld)
case $lang_flags in
F)
extend spack_flags_list SPACK_FFLAGS
extend flags_list SPACK_FFLAGS
;;
esac
;;
@@ -706,7 +687,7 @@ esac
# C preprocessor flags come before any C/CXX flags
case "$mode" in
cpp|as|cc|ccld)
extend spack_flags_list SPACK_CPPFLAGS
extend flags_list SPACK_CPPFLAGS
;;
esac
@@ -716,10 +697,10 @@ case "$mode" in
cc|ccld)
case $lang_flags in
C)
extend spack_flags_list SPACK_CFLAGS
extend flags_list SPACK_CFLAGS
;;
CXX)
extend spack_flags_list SPACK_CXXFLAGS
extend flags_list SPACK_CXXFLAGS
;;
esac
@@ -731,25 +712,10 @@ esac
# Linker flags
case "$mode" in
ld|ccld)
extend spack_flags_list SPACK_LDFLAGS
extend flags_list SPACK_LDFLAGS
;;
esac
IFS="$lsep"
categorize_arguments $spack_flags_list
unset IFS
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
spack_flags_other_args_list="$return_other_args_list"
# On macOS insert headerpad_max_install_names linker flag
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
@@ -775,8 +741,6 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
extend lib_dirs_list SPACK_LINK_DIRS
fi
libs_list=""
# add RPATHs if we're in in any linking mode
case "$mode" in
ld|ccld)
@@ -805,16 +769,12 @@ args_list="$flags_list"
# Insert include directories just prior to any system include directories
# NOTE: adding ${lsep} to the prefix here turns every added element into two
extend args_list spack_flags_include_dirs_list "-I"
extend args_list include_dirs_list "-I"
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
extend args_list isystem_include_dirs_list "-isystem${lsep}"
case "$mode" in
cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
elif [ "$isystem_was_used" = "true" ]; then
if [ "$isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
else
extend args_list SPACK_INCLUDE_DIRS "-I"
@@ -822,15 +782,11 @@ case "$mode" in
;;
esac
extend args_list spack_flags_system_include_dirs_list -I
extend args_list system_include_dirs_list -I
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
# Library search paths
extend args_list spack_flags_lib_dirs_list "-L"
extend args_list lib_dirs_list "-L"
extend args_list spack_flags_system_lib_dirs_list "-L"
extend args_list system_lib_dirs_list "-L"
# RPATHs arguments
@@ -839,25 +795,20 @@ case "$mode" in
if [ -n "$dtags_to_add" ] ; then
append args_list "$linker_arg$dtags_to_add"
fi
extend args_list spack_flags_rpath_dirs_list "$rpath"
extend args_list rpath_dirs_list "$rpath"
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
extend args_list system_rpath_dirs_list "$rpath"
;;
ld)
if [ -n "$dtags_to_add" ] ; then
append args_list "$dtags_to_add"
fi
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
extend args_list rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
extend args_list system_rpath_dirs_list "-rpath${lsep}"
;;
esac
# Other arguments from the input command
extend args_list other_args_list
extend args_list spack_flags_other_args_list
# Inject SPACK_LDLIBS, if supplied
extend args_list libs_list "-l"
@@ -913,4 +864,3 @@ fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -760,12 +760,13 @@ def hashes_to_prefixes(spec):
}
def get_buildinfo_dict(spec):
def get_buildinfo_dict(spec, rel=False):
"""Create metadata for a tarball"""
manifest = get_buildfile_manifest(spec)
return {
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
"relative_rpaths": rel,
"buildpath": spack.store.layout.root,
"spackprefix": spack.paths.prefix,
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
@@ -1208,6 +1209,9 @@ class PushOptions(NamedTuple):
#: Overwrite existing tarball/metadata files in buildcache
force: bool = False
#: Whether to use relative RPATHs
relative: bool = False
#: Allow absolute paths to package prefixes when creating a tarball
allow_root: bool = False
@@ -1277,17 +1281,41 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
raise NoOverwriteException(url_util.format(remote_specfile_path))
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
workdir = os.path.join(stage_dir, pkg_dir)
binaries_dir = spec.prefix
# TODO: We generally don't want to mutate any files, but when using relative
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
# For now, we only make a full copy of the spec prefix when in relative mode.
if options.relative:
# tarfile is used because it preserves hardlink etc best.
binaries_dir = workdir
temp_tarfile_name = tarball_name(spec, ".tar")
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
tar.add(name="%s" % spec.prefix, arcname=".")
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
tar.extractall(workdir)
os.remove(temp_tarfile_path)
else:
binaries_dir = spec.prefix
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec)
buildinfo = get_buildinfo_dict(spec, options.relative)
if not options.allow_root:
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
if options.relative:
make_package_relative(workdir, spec, buildinfo, options.allow_root)
elif not options.allow_root:
ensure_package_relocatable(buildinfo, binaries_dir)
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
# remove copy of install directory
if options.relative:
shutil.rmtree(workdir)
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
@@ -1308,6 +1336,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
# This will be used to determine is the directory layout has changed.
buildinfo = {}
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
buildinfo["relative_rpaths"] = options.relative
spec_dict["buildinfo"] = buildinfo
with open(specfile_path, "w") as outfile:
@@ -1567,6 +1596,35 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
return None
def make_package_relative(workdir, spec, buildinfo, allow_root):
"""
Change paths in binaries to relative paths. Change absolute symlinks
to relative symlinks.
"""
prefix = spec.prefix
old_layout_root = buildinfo["buildpath"]
orig_path_names = list()
cur_path_names = list()
for filename in buildinfo["relocate_binaries"]:
orig_path_names.append(os.path.join(prefix, filename))
cur_path_names.append(os.path.join(workdir, filename))
platform = spack.platforms.by_name(spec.platform)
if "macho" in platform.binary_formats:
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
if "elf" in platform.binary_formats:
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
orig_path_names = list()
cur_path_names = list()
for linkname in buildinfo.get("relocate_links", []):
orig_path_names.append(os.path.join(prefix, linkname))
cur_path_names.append(os.path.join(workdir, linkname))
relocate.make_link_relative(cur_path_names, orig_path_names)
def ensure_package_relocatable(buildinfo, binaries_dir):
"""Check if package binaries are relocatable."""
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]

View File

@@ -175,12 +175,12 @@ def black_root_spec() -> str:
def flake8_root_spec() -> str:
"""Return the root spec used to bootstrap flake8"""
return _root_spec("py-flake8@3.8.2:")
return _root_spec("py-flake8")
def pytest_root_spec() -> str:
"""Return the root spec used to bootstrap flake8"""
return _root_spec("py-pytest@6.2.4:")
return _root_spec("py-pytest")
def ensure_environment_dependencies() -> None:

View File

@@ -1373,7 +1373,7 @@ def long_message(self):
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
if os.path.isfile(test_log):
out.write("\nSee test log for details:\n")
out.write(" {0}\n".format(test_log))
out.write(" {0}n".format(test_log))
return out.getvalue()

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import os
from typing import Tuple
@@ -14,24 +13,21 @@
from .cmake import CMakeBuilder, CMakePackage
def cmake_cache_path(name, value, comment="", force=False):
def cmake_cache_path(name, value, comment=""):
"""Generate a string for a cmake cache variable"""
force_str = " FORCE" if force else ""
return 'set({0} "{1}" CACHE PATH "{2}"{3})\n'.format(name, value, comment, force_str)
return 'set({0} "{1}" CACHE PATH "{2}")\n'.format(name, value, comment)
def cmake_cache_string(name, value, comment="", force=False):
def cmake_cache_string(name, value, comment=""):
"""Generate a string for a cmake cache variable"""
force_str = " FORCE" if force else ""
return 'set({0} "{1}" CACHE STRING "{2}"{3})\n'.format(name, value, comment, force_str)
return 'set({0} "{1}" CACHE STRING "{2}")\n'.format(name, value, comment)
def cmake_cache_option(name, boolean_value, comment="", force=False):
def cmake_cache_option(name, boolean_value, comment=""):
"""Generate a string for a cmake configuration option"""
value = "ON" if boolean_value else "OFF"
force_str = " FORCE" if force else ""
return 'set({0} {1} CACHE BOOL "{2}"{3})\n'.format(name, value, comment, force_str)
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
class CachedCMakeBuilder(CMakeBuilder):
@@ -67,34 +63,6 @@ def cache_name(self):
def cache_path(self):
return os.path.join(self.pkg.stage.source_path, self.cache_name)
# Implement a version of the define_from_variant for Cached packages
def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
"""Return a Cached CMake field from the given variant's value.
See define_from_variant in lib/spack/spack/build_systems/cmake.py package
"""
if variant is None:
variant = cmake_var.lower()
if variant not in self.pkg.variants:
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.pkg.spec.variants:
return ""
value = self.pkg.spec.variants[variant].value
field = None
if isinstance(value, bool):
field = cmake_cache_option(cmake_var, value, comment)
else:
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
value = ";".join(str(v) for v in value)
else:
value = str(value)
field = cmake_cache_string(cmake_var, value, comment)
return field
def initconfig_compiler_entries(self):
# This will tell cmake to use the Spack compiler wrappers when run
# through Spack, but use the underlying compiler when run outside of
@@ -162,17 +130,6 @@ def initconfig_compiler_entries(self):
libs_string = libs_format_string.format(lang)
entries.append(cmake_cache_string(libs_string, libs_flags))
# Set the generator in the cached config
if self.spec.satisfies("generator=make"):
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
if self.spec.satisfies("generator=ninja"):
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
entries.append(
cmake_cache_string(
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
)
)
return entries
def initconfig_mpi_entries(self):
@@ -238,57 +195,26 @@ def initconfig_hardware_entries(self):
"#------------------{0}\n".format("-" * 60),
]
# Provide standard CMake arguments for dependent CachedCMakePackages
if spec.satisfies("^cuda"):
entries.append("#------------------{0}".format("-" * 30))
entries.append("# Cuda")
entries.append("#------------------{0}\n".format("-" * 30))
cudatoolkitdir = spec["cuda"].prefix
entries.append(cmake_cache_path("CUDAToolkit_ROOT", cudatoolkitdir))
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", "${CUDAToolkit_ROOT}/bin/nvcc"))
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
archs = spec.variants["cuda_arch"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
)
if "+rocm" in spec:
entries.append("#------------------{0}".format("-" * 30))
entries.append("# ROCm")
entries.append("#------------------{0}\n".format("-" * 30))
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
entries.append(
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
)
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
return entries
def std_initconfig_entries(self):
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
return [
"#------------------{0}".format("-" * 60),
"# !!!! This is a generated file, edit at own risk !!!!",
"#------------------{0}".format("-" * 60),
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
"#------------------{0}\n".format("-" * 60),
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
]
def initconfig_package_entries(self):

View File

@@ -5,7 +5,6 @@
import collections.abc
import inspect
import os
import pathlib
import platform
import re
import sys
@@ -16,6 +15,7 @@
import spack.build_environment
import spack.builder
import spack.package_base
import spack.util.path
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
@@ -271,7 +271,7 @@ def std_args(pkg, generator=None):
args = [
"-G",
generator,
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
define("CMAKE_BUILD_TYPE", build_type),
define("BUILD_TESTING", pkg.run_tests),
]

View File

@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh
"""
# Only if environment modifications are desired (default is +envmods)
if "~envmods" not in self.spec:
if "+envmods" in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
join_path(self.component_prefix, "env", "vars.sh")

View File

@@ -180,51 +180,6 @@ def test(self):
work_dir="spack-test",
)
def update_external_dependencies(self, extendee_spec=None):
"""
Ensure all external python packages have a python dependency
If another package in the DAG depends on python, we use that
python for the dependency of the external. If not, we assume
that the external PythonPackage is installed into the same
directory as the python it depends on.
"""
# TODO: Include this in the solve, rather than instantiating post-concretization
if "python" not in self.spec:
if extendee_spec:
python = extendee_spec
elif "python" in self.spec.root:
python = self.spec.root["python"]
else:
python = self.get_external_python_for_prefix()
if not python.concrete:
repo = spack.repo.path.repo_for_pkg(python)
python.namespace = repo.namespace
# Ensure architecture information is present
if not python.architecture:
host_platform = spack.platforms.host()
host_os = host_platform.operating_system("default_os")
host_target = host_platform.target("default_target")
python.architecture = spack.spec.ArchSpec(
(str(host_platform), str(host_os), str(host_target))
)
else:
if not python.architecture.platform:
python.architecture.platform = spack.platforms.host()
if not python.architecture.os:
python.architecture.os = "default_os"
if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name
# Ensure compiler information is present
if not python.compiler:
python.compiler = self.spec.compiler
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
class PythonPackage(PythonExtension):
"""Specialized class for packages that are built using pip."""
@@ -270,6 +225,51 @@ def list_url(cls):
name = cls.pypi.split("/")[0]
return "https://pypi.org/simple/" + name + "/"
def update_external_dependencies(self, extendee_spec=None):
"""
Ensure all external python packages have a python dependency
If another package in the DAG depends on python, we use that
python for the dependency of the external. If not, we assume
that the external PythonPackage is installed into the same
directory as the python it depends on.
"""
# TODO: Include this in the solve, rather than instantiating post-concretization
if "python" not in self.spec:
if extendee_spec:
python = extendee_spec
elif "python" in self.spec.root:
python = self.spec.root["python"]
else:
python = self.get_external_python_for_prefix()
if not python.concrete:
repo = spack.repo.path.repo_for_pkg(python)
python.namespace = repo.namespace
# Ensure architecture information is present
if not python.architecture:
host_platform = spack.platforms.host()
host_os = host_platform.operating_system("default_os")
host_target = host_platform.target("default_target")
python.architecture = spack.spec.ArchSpec(
(str(host_platform), str(host_os), str(host_target))
)
else:
if not python.architecture.platform:
python.architecture.platform = spack.platforms.host()
if not python.architecture.os:
python.architecture.os = "default_os"
if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name
# Ensure compiler information is present
if not python.compiler:
python.compiler = self.spec.compiler
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
def get_external_python_for_prefix(self):
"""
For an external package that extends python, find the most likely spec for the python

View File

@@ -7,7 +7,7 @@
import llnl.util.lang as lang
from spack.directives import extends
from spack.directives import extends, maintainers
from .generic import GenericBuilder, Package
@@ -71,6 +71,8 @@ class RPackage(Package):
GenericBuilder = RBuilder
maintainers("glennpj")
#: This attribute is used in UI queries that need to know the build
#: system base class
build_system_class = "RPackage"

View File

@@ -10,7 +10,6 @@
from llnl.util.filesystem import find, join_path, working_dir
import spack.builder
import spack.install_test
import spack.package_base
from spack.directives import build_system, depends_on, extends
from spack.multimethod import when
@@ -31,8 +30,8 @@ class SIPPackage(spack.package_base.PackageBase):
#: Name of private sip module to install alongside package
sip_module = "sip"
#: Callback names for install-time testing
install_time_test_callbacks = ["test_imports"]
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "sip"
@@ -88,20 +87,18 @@ def python(self, *args, **kwargs):
"""The python ``Executable``."""
inspect.getmodule(self).python(*args, **kwargs)
def test_imports(self):
def test(self):
"""Attempts to import modules of the installed package."""
# Make sure we are importing the installed modules,
# not the ones in the source directory
python = inspect.getmodule(self).python
for module in self.import_modules:
with spack.install_test.test_part(
self,
"test_imports_{0}".format(module),
self.run_test(
inspect.getmodule(self).python.path,
["-c", "import {0}".format(module)],
purpose="checking import of {0}".format(module),
work_dir="spack-test",
):
python("-c", "import {0}".format(module))
)
@spack.builder.builder("sip")

View File

@@ -751,7 +751,7 @@ def generate_gitlab_ci_yaml(
env.concretize()
env.write()
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
yaml_root = ev.config_dict(env.manifest)
# Get the joined "ci" config with all of the current scopes resolved
ci_config = cfg.get("ci")
@@ -946,7 +946,7 @@ def generate_gitlab_ci_yaml(
# Add config scopes to environment
env_includes = env_yaml_root["spack"].get("include", [])
cli_scopes = [
os.path.relpath(s.path, concrete_env_dir)
os.path.abspath(s.path)
for s in cfg.scopes().values()
if type(s) == cfg.ImmutableConfigScope
and s.path not in env_includes
@@ -1095,7 +1095,7 @@ def generate_gitlab_ci_yaml(
raise AttributeError
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", rel_concrete_env_dir)
return cmd.replace("{env_dir}", concrete_env_dir)
job_object["script"] = _unpack_script(
job_object["script"], op=main_script_replacements

View File

@@ -43,6 +43,13 @@ def setup_parser(subparser):
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
# TODO: remove from Spack 0.21
push.add_argument(
"-r",
"--rel",
action="store_true",
help="make all rpaths relative before creating tarballs. (deprecated)",
)
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
push.add_argument(
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
@@ -56,7 +63,37 @@ def setup_parser(subparser):
push.add_argument(
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
)
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.")
output = push.add_mutually_exclusive_group(required=False)
# TODO: remove from Spack 0.21
output.add_argument(
"-d",
"--directory",
metavar="directory",
dest="mirror_flag",
type=arguments.mirror_directory,
help="local directory where buildcaches will be written. (deprecated)",
)
# TODO: remove from Spack 0.21
output.add_argument(
"-m",
"--mirror-name",
metavar="mirror-name",
dest="mirror_flag",
type=arguments.mirror_name,
help="name of the mirror where buildcaches will be written. (deprecated)",
)
# TODO: remove from Spack 0.21
output.add_argument(
"--mirror-url",
metavar="mirror-url",
dest="mirror_flag",
type=arguments.mirror_url,
help="URL of the mirror where buildcaches will be written. (deprecated)",
)
# Unfortunately we cannot add this to the mutually exclusive group above,
# because we have further positional arguments.
# TODO: require from Spack 0.21
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
push.add_argument(
"--update-index",
"--rebuild-index",
@@ -90,6 +127,13 @@ def setup_parser(subparser):
install.add_argument(
"-m", "--multiple", action="store_true", help="allow all matching packages "
)
# TODO: remove from Spack 0.21
install.add_argument(
"-a",
"--allow-root",
action="store_true",
help="allow install root string in binary files after RPATH substitution. (deprecated)",
)
install.add_argument(
"-u",
"--unsigned",
@@ -224,21 +268,75 @@ def setup_parser(subparser):
# Sync buildcache entries from one mirror to another
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
sync.add_argument(
"--manifest-glob", help="A quoted glob pattern identifying copy manifest files"
"--manifest-glob",
default=None,
help="A quoted glob pattern identifying copy manifest files",
)
sync.add_argument(
source = sync.add_mutually_exclusive_group(required=False)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-directory",
metavar="DIRECTORY",
dest="src_mirror_flag",
type=arguments.mirror_directory,
help="Source mirror as a local file path (deprecated)",
)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-mirror-name",
metavar="MIRROR_NAME",
dest="src_mirror_flag",
type=arguments.mirror_name,
help="Name of the source mirror (deprecated)",
)
# TODO: remove in Spack 0.21
source.add_argument(
"--src-mirror-url",
metavar="MIRROR_URL",
dest="src_mirror_flag",
type=arguments.mirror_url,
help="URL of the source mirror (deprecated)",
)
# TODO: only support this in 0.21
source.add_argument(
"src_mirror",
metavar="source mirror",
type=arguments.mirror_name_or_url,
nargs="?",
help="Source mirror name, path, or URL",
nargs="?",
)
sync.add_argument(
dest = sync.add_mutually_exclusive_group(required=False)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-directory",
metavar="DIRECTORY",
dest="dest_mirror_flag",
type=arguments.mirror_directory,
help="Destination mirror as a local file path (deprecated)",
)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-mirror-name",
metavar="MIRROR_NAME",
type=arguments.mirror_name,
dest="dest_mirror_flag",
help="Name of the destination mirror (deprecated)",
)
# TODO: remove in Spack 0.21
dest.add_argument(
"--dest-mirror-url",
metavar="MIRROR_URL",
dest="dest_mirror_flag",
type=arguments.mirror_url,
help="URL of the destination mirror (deprecated)",
)
# TODO: only support this in 0.21
dest.add_argument(
"dest_mirror",
metavar="destination mirror",
type=arguments.mirror_name_or_url,
nargs="?",
help="Destination mirror name, path, or URL",
nargs="?",
)
sync.set_defaults(func=sync_fn)
@@ -246,8 +344,39 @@ def setup_parser(subparser):
update_index = subparsers.add_parser(
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
)
update_index.add_argument(
"mirror", type=arguments.mirror_name_or_url, help="Destination mirror name, path, or URL"
update_index_out = update_index.add_mutually_exclusive_group(required=True)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"-d",
"--directory",
metavar="directory",
dest="mirror_flag",
type=arguments.mirror_directory,
help="local directory where buildcaches will be written (deprecated)",
)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"-m",
"--mirror-name",
metavar="mirror-name",
dest="mirror_flag",
type=arguments.mirror_name,
help="name of the mirror where buildcaches will be written (deprecated)",
)
# TODO: remove in Spack 0.21
update_index_out.add_argument(
"--mirror-url",
metavar="mirror-url",
dest="mirror_flag",
type=arguments.mirror_url,
help="URL of the mirror where buildcaches will be written (deprecated)",
)
# TODO: require from Spack 0.21
update_index_out.add_argument(
"mirror",
type=arguments.mirror_name_or_url,
help="Destination mirror name, path, or URL",
nargs="?",
)
update_index.add_argument(
"-k",
@@ -307,12 +436,32 @@ def _concrete_spec_from_args(args):
def push_fn(args):
"""create a binary package and push it to a mirror"""
mirror = arguments.mirror_name_or_url(args.mirror)
if args.mirror_flag:
mirror = args.mirror_flag
elif not args.mirror:
raise ValueError("No mirror provided")
else:
mirror = arguments.mirror_name_or_url(args.mirror)
if args.mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
if args.rel:
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
# TODO: remove this in 0.21. If we have mirror_flag, the first
# spec is in the positional mirror arg due to argparse limitations.
input_specs = args.specs
if args.mirror_flag and args.mirror:
input_specs.insert(0, args.mirror)
url = mirror.push_url
specs = bindist.specs_to_be_packaged(
_matching_specs(args.specs, args.spec_file),
_matching_specs(input_specs, args.spec_file),
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
@@ -337,6 +486,7 @@ def push_fn(args):
url,
bindist.PushOptions(
force=args.force,
relative=args.rel,
unsigned=args.unsigned,
allow_root=args.allow_root,
key=args.key,
@@ -374,6 +524,9 @@ def install_fn(args):
if not args.specs:
tty.die("a spec argument is required to install from a buildcache")
if args.allow_root:
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
for match in matches:
@@ -557,11 +710,21 @@ def sync_fn(args):
manifest_copy(glob.glob(args.manifest_glob))
return 0
if args.src_mirror is None or args.dest_mirror is None:
tty.die("Provide mirrors to sync from and to.")
# If no manifest_glob, require a source and dest mirror.
# TODO: Simplify in Spack 0.21
if not (args.src_mirror_flag or args.src_mirror) or not (
args.dest_mirror_flag or args.dest_mirror
):
raise ValueError("Source and destination mirror are required.")
src_mirror = args.src_mirror
dest_mirror = args.dest_mirror
if args.src_mirror_flag or args.dest_mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
src_mirror_url = src_mirror.fetch_url
dest_mirror_url = dest_mirror.push_url
@@ -640,7 +803,13 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
def update_index_fn(args):
"""Update a buildcache index."""
update_index(args.mirror, update_keys=args.keys)
if args.mirror_flag:
tty.warn(
"Using flags to specify mirrors is deprecated and will be removed in "
"Spack 0.21, use positional arguments instead."
)
mirror = args.mirror_flag if args.mirror_flag else args.mirror
update_index(mirror, update_keys=args.keys)
def buildcache(parser, args):

View File

@@ -228,7 +228,7 @@ def ci_reindex(args):
Use the active, gitlab-enabled environment to rebuild the buildcache
index for the associated mirror."""
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
yaml_root = ev.config_dict(env.manifest)
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
tty.die("spack ci rebuild-index requires an env containing a mirror")

View File

@@ -349,7 +349,7 @@ def install_status():
"-I",
"--install-status",
action="store_true",
default=True,
default=False,
help="show install status of packages. packages can be: "
"installed [+], missing and needed by an installed package [-], "
"installed in and upstream instance [^], "
@@ -357,17 +357,6 @@ def install_status():
)
@arg
def no_install_status():
return Args(
"--no-install-status",
dest="install_status",
action="store_false",
default=True,
help="do not show install status annotations",
)
@arg
def no_checksum():
return Args(

View File

@@ -53,7 +53,7 @@ def setup_parser(subparser):
"--scope",
choices=scopes,
metavar=scopes_metavar,
default=None,
default=spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
@@ -106,21 +106,19 @@ def compiler_find(args):
def compiler_remove(args):
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
if not candidate_compilers:
tty.die("No compilers match spec %s" % compiler_spec)
if not args.all and len(candidate_compilers) > 1:
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
cspec = spack.spec.CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for current_compiler in candidate_compilers:
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
tty.msg(f"{current_compiler.spec.display_str} has been removed")
for compiler in compilers:
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec.display_str)
def compiler_info(args):

View File

@@ -715,7 +715,7 @@ def __call__(self, stage, url):
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
output = ""
lines = output.splitlines()
lines = output.split("\n")
# Determine the build system based on the files contained
# in the archive.

View File

@@ -79,12 +79,6 @@ def setup_parser(subparser):
read_cray_manifest.add_argument(
"--directory", default=None, help="specify a directory storing a group of manifest files"
)
read_cray_manifest.add_argument(
"--ignore-default-dir",
action="store_true",
default=False,
help="ignore the default directory of manifest files",
)
read_cray_manifest.add_argument(
"--dry-run",
action="store_true",
@@ -183,16 +177,11 @@ def external_read_cray_manifest(args):
manifest_directory=args.directory,
dry_run=args.dry_run,
fail_on_error=args.fail_on_error,
ignore_default_dir=args.ignore_default_dir,
)
def _collect_and_consume_cray_manifest_files(
manifest_file=None,
manifest_directory=None,
dry_run=False,
fail_on_error=False,
ignore_default_dir=False,
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
):
manifest_files = []
if manifest_file:
@@ -202,7 +191,7 @@ def _collect_and_consume_cray_manifest_files(
if manifest_directory:
manifest_dirs.append(manifest_directory)
if not ignore_default_dir and os.path.isdir(cray_manifest.default_path):
if os.path.isdir(cray_manifest.default_path):
tty.debug(
"Cray manifest path {0} exists: collecting all files to read.".format(
cray_manifest.default_path

View File

@@ -44,11 +44,7 @@ def setup_parser(subparser):
)
# Below are arguments w.r.t. spec display (like spack spec)
arguments.add_common_arguments(subparser, ["long", "very_long"])
install_status_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
subparser.add_argument(
"-y",
"--yaml",

View File

@@ -31,11 +31,7 @@ def setup_parser(subparser):
for further documentation regarding the spec syntax, see:
spack help --spec
"""
arguments.add_common_arguments(subparser, ["long", "very_long"])
install_status_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
"-y",

View File

@@ -37,6 +37,7 @@
"implicit_rpaths",
"extra_rpaths",
]
_cache_config_file = []
# TODO: Caches at module level make it difficult to mock configurations in
# TODO: unit tests. It might be worth reworking their implementation.
@@ -111,26 +112,36 @@ def _to_dict(compiler):
def get_compiler_config(scope=None, init_config=True):
"""Return the compiler configuration for the specified architecture."""
config = spack.config.get("compilers", scope=scope) or []
if config or not init_config:
return config
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
spack.config.set("compilers", compilers_dict, scope=scope)
merged_config = spack.config.get("compilers")
if merged_config:
return config
_init_compiler_config(scope=scope)
config = spack.config.get("compilers", scope=scope)
return config
def _init_compiler_config(*, scope):
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
spack.config.set("compilers", compilers_dict, scope=scope)
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
if not config and init_config:
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
config = spack.config.get("compilers", scope=scope)
elif scope == "user":
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get("compilers", scope="site")
sys_config = spack.config.get("compilers", scope="system")
if not site_config and not sys_config:
init_compiler_config()
config = spack.config.get("compilers", scope=scope)
return config
elif config:
return config
else:
return [] # Return empty list which we will later append to.
def compiler_config_files():
@@ -154,65 +165,52 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
compiler_config.append(_to_dict(compiler))
global _cache_config_file
_cache_config_file = compiler_config
spack.config.set("compilers", compiler_config, scope=scope)
@_auto_compiler_spec
def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from configuration by spec.
If scope is None, all the scopes are searched for removal.
"""Remove compilers from the config, by spec.
Arguments:
compiler_spec: compiler to be removed
scope: configuration scope to modify
compiler_specs: a list of CompilerSpec objects.
scope: configuration scope to modify.
"""
candidate_scopes = [scope]
if scope is None:
candidate_scopes = spack.config.config.scopes.keys()
# Need a better way for this
global _cache_config_file
removal_happened = False
for current_scope in candidate_scopes:
removal_happened |= _remove_compiler_from_scope(compiler_spec, scope=current_scope)
return removal_happened
def _remove_compiler_from_scope(compiler_spec, scope):
"""Removes a compiler from a specific configuration scope.
Args:
compiler_spec: compiler to be removed
scope: configuration scope under consideration
Returns:
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(scope)
config_length = len(compiler_config)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
comp
for comp in compiler_config
if not spack.spec.parse_with_version_concrete(
compiler_entry["compiler"]["spec"], compiler=True
comp["compiler"]["spec"], compiler=True
).satisfies(compiler_spec)
]
if len(filtered_compiler_config) == len(compiler_config):
return False
# We need to preserve the YAML type for comments, hence we are copying the
# items in the list that has just been retrieved
compiler_config[:] = filtered_compiler_config
spack.config.set("compilers", compiler_config, scope=scope)
return True
# Update the cache for changes
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
spack.config.set("compilers", filtered_compiler_config, scope=scope)
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
return get_compiler_config(scope, init_config)
# Get compilers for this architecture.
# Create a cache of the config file so we don't load all the time.
global _cache_config_file
if not _cache_config_file:
_cache_config_file = get_compiler_config(scope, init_config)
return _cache_config_file
else:
return _cache_config_file
def all_compiler_specs(scope=None, init_config=True):
@@ -235,38 +233,25 @@ def find_compilers(path_hints=None):
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in ["apple-clang"]]
detected_compilers = spack.detection.by_executable(pkg_cls_to_check, path_hints=default_paths)
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
platform = spack.platforms.host()
operating_system = platform.operating_system("default_os")
target = platform.target("default_target")
def valid_version(item):
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
compilers = []
for name, detected in detected_compilers.items():
for entry in detected:
compiler_cls = spack.compilers.class_for_compiler_name(name)
spec = spack.spec.CompilerSpec(entry.spec.name, f"={entry.spec.versions}")
paths = [
entry.spec.extra_attributes["compilers"].get(x, None)
for x in ("c", "cxx", "f77", "fc")
]
compilers.append(compiler_cls(spec, str(operating_system), str(target), paths))
def remove_errors(item):
value, _ = item
return value
return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions)))
return compilers
def find_new_compilers(path_hints=None, scope=None):

View File

@@ -151,11 +151,7 @@ def setup_custom_environment(self, pkg, env):
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec
sdk_ver = (
""
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
else pkg.spec["win-sdk"].version.string + ".0"
)
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin

View File

@@ -81,7 +81,7 @@
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
all_schemas = copy.deepcopy(section_schemas)
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
#: Path to the default configuration
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
@@ -111,6 +111,14 @@
overrides_base_name = "overrides-"
def first_existing(dictionary, keys):
"""Get the value of the first key in keys that is in the dictionary."""
try:
return next(k for k in keys if k in dictionary)
except StopIteration:
raise KeyError("None of %s is in dict!" % str(keys))
class ConfigScope(object):
"""This class represents a configuration scope.
@@ -830,10 +838,12 @@ def _config():
def add_from_file(filename, scope=None):
"""Add updates to a config from a filename"""
# Extract internal attributes, if we are dealing with an environment
import spack.environment as ev
# Get file as config dict
data = read_config_file(filename)
if spack.schema.env.TOP_LEVEL_KEY in data:
data = data[spack.schema.env.TOP_LEVEL_KEY]
if any(k in data for k in spack.schema.env.keys):
data = ev.config_dict(data)
# update all sections from config dict
# We have to iterate on keys to keep overrides from the file
@@ -1343,11 +1353,17 @@ def use_configuration(*scopes_or_paths):
configuration = _config_from(scopes_or_paths)
config.clear_caches(), configuration.clear_caches()
# Save and clear the current compiler cache
saved_compiler_cache = spack.compilers._cache_config_file
spack.compilers._cache_config_file = []
saved_config, config = config, configuration
try:
yield configuration
finally:
# Restore previous config files
spack.compilers._cache_config_file = saved_compiler_cache
config = saved_config

View File

@@ -37,7 +37,7 @@ def validate(configuration_file):
config = syaml.load(f)
# Ensure we have a "container" attribute with sensible defaults set
env_dict = config[ev.TOP_LEVEL_KEY]
env_dict = ev.config_dict(config)
env_dict.setdefault(
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
)

View File

@@ -17,7 +17,7 @@
"template": "container/fedora_38.dockerfile",
"image": "docker.io/fedora:38"
},
"os_package_manager": "dnf",
"os_package_manager": "yum",
"build": "spack/fedora38",
"build_tags": {
"develop": "latest"
@@ -31,7 +31,7 @@
"template": "container/fedora_37.dockerfile",
"image": "docker.io/fedora:37"
},
"os_package_manager": "dnf",
"os_package_manager": "yum",
"build": "spack/fedora37",
"build_tags": {
"develop": "latest"
@@ -45,7 +45,7 @@
"template": "container/rockylinux_9.dockerfile",
"image": "docker.io/rockylinux:9"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/rockylinux9",
"build_tags": {
"develop": "latest"
@@ -59,7 +59,7 @@
"template": "container/rockylinux_8.dockerfile",
"image": "docker.io/rockylinux:8"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/rockylinux8",
"build_tags": {
"develop": "latest"
@@ -73,7 +73,7 @@
"template": "container/almalinux_9.dockerfile",
"image": "quay.io/almalinux/almalinux:9"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/almalinux9",
"build_tags": {
"develop": "latest"
@@ -87,7 +87,7 @@
"template": "container/almalinux_8.dockerfile",
"image": "quay.io/almalinux/almalinux:8"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/almalinux8",
"build_tags": {
"develop": "latest"
@@ -101,7 +101,7 @@
"template": "container/centos_stream.dockerfile",
"image": "quay.io/centos/centos:stream"
},
"os_package_manager": "dnf_epel",
"os_package_manager": "yum",
"build": "spack/centos-stream",
"final": {
"image": "quay.io/centos/centos:stream"
@@ -185,16 +185,6 @@
"install": "apt-get -yqq install",
"clean": "rm -rf /var/lib/apt/lists/*"
},
"dnf": {
"update": "dnf update -y",
"install": "dnf install -y",
"clean": "rm -rf /var/cache/dnf && dnf clean all"
},
"dnf_epel": {
"update": "dnf update -y && dnf install -y epel-release && dnf update -y",
"install": "dnf install -y",
"clean": "rm -rf /var/cache/dnf && dnf clean all"
},
"yum": {
"update": "yum update -y && yum install -y epel-release && yum update -y",
"install": "yum install -y",

View File

@@ -50,7 +50,7 @@ def create(configuration, last_phase=None):
configuration (dict): how to generate the current recipe
last_phase (str): last phase to be printed or None to print them all
"""
name = configuration[ev.TOP_LEVEL_KEY]["container"]["format"]
name = ev.config_dict(configuration)["container"]["format"]
return _writer_factory[name](configuration, last_phase)
@@ -138,7 +138,7 @@ class PathContext(tengine.Context):
template_name: Optional[str] = None
def __init__(self, config, last_phase):
self.config = config[ev.TOP_LEVEL_KEY]
self.config = ev.config_dict(config)
self.container_config = self.config["container"]
# Operating system tag as written in the configuration file

View File

@@ -48,8 +48,7 @@ def translated_compiler_name(manifest_compiler_name):
def compiler_from_entry(entry):
compiler_name = translated_compiler_name(entry["name"])
paths = entry["executables"]
# to instantiate a compiler class we may need a concrete version:
version = "={}".format(entry["version"])
version = entry["version"]
arch = entry["arch"]
operating_system = arch["os"]
target = arch["target"]
@@ -164,10 +163,7 @@ def entries_to_specs(entries):
continue
parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
for spec in spec_dict.values():
spack.spec.reconstruct_virtuals_on_edges(spec)
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
return spec_dict

View File

@@ -60,7 +60,7 @@
# DB version. This is stuck in the DB file to track changes in format.
# Increment by one when the database format changes.
# Versions before 5 were not integers.
_db_version = vn.Version("7")
_db_version = vn.Version("6")
# For any version combinations here, skip reindex when upgrading.
# Reindexing can take considerable time and is not always necessary.
@@ -72,7 +72,6 @@
# version is saved to disk the first time the DB is written.
(vn.Version("0.9.3"), vn.Version("5")),
(vn.Version("5"), vn.Version("6")),
(vn.Version("6"), vn.Version("7")),
]
# Default timeout for spack database locks in seconds or None (no timeout).
@@ -106,11 +105,7 @@
def reader(version):
reader_cls = {
vn.Version("5"): spack.spec.SpecfileV1,
vn.Version("6"): spack.spec.SpecfileV3,
vn.Version("7"): spack.spec.SpecfileV4,
}
reader_cls = {vn.Version("5"): spack.spec.SpecfileV1, vn.Version("6"): spack.spec.SpecfileV3}
return reader_cls[version]
@@ -748,9 +743,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
spec_node_dict = spec_node_dict[spec.name]
if "dependencies" in spec_node_dict:
yaml_deps = spec_node_dict["dependencies"]
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
yaml_deps
):
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
# It is important that we always check upstream installations
# in the same order, and that we always check the local
# installation first: if a downstream Spack installs a package
@@ -773,7 +766,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
tty.warn(msg)
continue
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
spec._add_dependency(child, deptypes=dtypes)
def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data.
@@ -1179,7 +1172,7 @@ def _add(
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
if not upstream:
record.ref_count += 1

View File

@@ -337,7 +337,6 @@
"""
from .environment import (
TOP_LEVEL_KEY,
Environment,
SpackEnvironmentError,
SpackEnvironmentViewError,
@@ -346,6 +345,7 @@
active_environment,
all_environment_names,
all_environments,
config_dict,
create,
create_in_dir,
deactivate,
@@ -369,7 +369,6 @@
)
__all__ = [
"TOP_LEVEL_KEY",
"Environment",
"SpackEnvironmentError",
"SpackEnvironmentViewError",
@@ -378,6 +377,7 @@
"active_environment",
"all_environment_names",
"all_environments",
"config_dict",
"create",
"create_in_dir",
"deactivate",

View File

@@ -53,7 +53,6 @@
import spack.version
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec
from spack.spec_list import InvalidSpecConstraintError, SpecList
from spack.util.path import substitute_path_variables
@@ -125,7 +124,7 @@ def default_manifest_yaml():
valid_environment_name_re = r"^\w[\w-]*$"
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 5
lockfile_format_version = 4
READER_CLS = {
@@ -133,7 +132,6 @@ def default_manifest_yaml():
2: spack.spec.SpecfileV1,
3: spack.spec.SpecfileV2,
4: spack.spec.SpecfileV3,
5: spack.spec.SpecfileV4,
}
@@ -363,6 +361,19 @@ def ensure_env_root_path_exists():
fs.mkdirp(env_root_path())
def config_dict(yaml_data):
"""Get the configuration scope section out of an spack.yaml"""
# TODO (env:): Remove env: as a possible top level keyword in v0.21
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
if key == "env":
msg = (
"using 'env:' as a top-level attribute of a Spack environment is deprecated and "
"will be removed in Spack v0.21. Please use 'spack:' instead."
)
warnings.warn(msg)
return yaml_data[key]
def all_environment_names():
"""List the names of environments that currently exist."""
# just return empty if the env path does not exist. A read-only
@@ -810,8 +821,8 @@ def write_transaction(self):
def _construct_state_from_manifest(self):
"""Read manifest file and set up user specs."""
self.spec_lists = collections.OrderedDict()
env_configuration = self.manifest[TOP_LEVEL_KEY]
for item in env_configuration.get("definitions", []):
for item in config_dict(self.manifest).get("definitions", []):
entry = copy.deepcopy(item)
when = _eval_conditional(entry.pop("when", "True"))
assert len(entry) == 1
@@ -823,13 +834,13 @@ def _construct_state_from_manifest(self):
else:
self.spec_lists[name] = user_specs
spec_list = env_configuration.get(user_speclist_name, [])
spec_list = config_dict(self.manifest).get(user_speclist_name, [])
user_specs = SpecList(
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
)
self.spec_lists[user_speclist_name] = user_specs
enable_view = env_configuration.get("view")
enable_view = config_dict(self.manifest).get("view")
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
@@ -844,11 +855,14 @@ def _construct_state_from_manifest(self):
else:
self.views = {}
# Retrieve the current concretization strategy
configuration = config_dict(self.manifest)
# Retrieve unification scheme for the concretizer
self.unify = spack.config.get("concretizer:unify", False)
# Retrieve dev-build packages:
self.dev_specs = copy.deepcopy(env_configuration.get("develop", {}))
self.dev_specs = copy.deepcopy(configuration.get("develop", {}))
for name, entry in self.dev_specs.items():
# spec must include a concrete version
assert Spec(entry["spec"]).versions.concrete_range_as_version
@@ -968,7 +982,7 @@ def included_config_scopes(self):
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
includes = config_dict(self.manifest).get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
@@ -1061,7 +1075,10 @@ def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
config_name = self.env_file_config_scope_name()
return spack.config.SingleFileScope(
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
config_name,
self.manifest_path,
spack.schema.env.schema,
[spack.config.first_existing(self.manifest, spack.schema.env.keys)],
)
def config_scopes(self):
@@ -1204,27 +1221,28 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
old_specs = set(self.user_specs)
new_specs = set()
for spec in matches:
if spec not in list_to_change:
continue
try:
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError:
# define new specs list
new_specs = set(self.user_specs)
msg = f"Spec '{spec}' is part of a spec matrix and "
msg += f"cannot be removed from list '{list_to_change}'."
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
new_specs.remove(spec)
tty.warn(msg)
else:
if list_name == user_speclist_name:
self.manifest.remove_user_spec(str(spec))
if spec in list_to_change:
try:
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError:
# define new specs list
new_specs = set(self.user_specs)
msg = f"Spec '{spec}' is part of a spec matrix and "
msg += f"cannot be removed from list '{list_to_change}'."
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
new_specs.remove(spec)
tty.warn(msg)
else:
self.manifest.remove_definition(str(spec), list_name=list_name)
if list_name == user_speclist_name:
for user_spec in matches:
self.manifest.remove_user_spec(str(user_spec))
else:
for user_spec in matches:
self.manifest.remove_definition(str(user_spec), list_name=list_name)
# If force, update stale concretized specs
for spec in old_specs - new_specs:
@@ -1334,10 +1352,6 @@ def concretize(self, force=False, tests=False):
self.concretized_order = []
self.specs_by_hash = {}
# Remove concrete specs that no longer correlate to a user spec
for spec in set(self.concretized_user_specs) - set(self.user_specs):
self.deconcretize(spec)
# Pick the right concretization strategy
if self.unify == "when_possible":
return self._concretize_together_where_possible(tests=tests)
@@ -1351,16 +1365,6 @@ def concretize(self, force=False, tests=False):
msg = "concretization strategy not implemented [{0}]"
raise SpackEnvironmentError(msg.format(self.unify))
def deconcretize(self, spec):
# spec has to be a root of the environment
index = self.concretized_user_specs.index(spec)
dag_hash = self.concretized_order.pop(index)
del self.concretized_user_specs[index]
# If this was the only user spec that concretized to this concrete spec, remove it
if dag_hash not in self.concretized_order:
del self.specs_by_hash[dag_hash]
def _get_specs_to_concretize(
self,
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
@@ -1549,13 +1553,12 @@ def _concretize_separately(self, tests=False):
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(deptype="test")
for current_edge in test_edges:
test_dependency = current_edge.spec
test_deps = node.dependencies(deptype="test")
for test_dependency in test_deps:
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
test_dependency.copy(), deptypes="test"
)
results = [
@@ -2186,9 +2189,9 @@ def _read_lockfile_dict(self, d):
# and add them to the spec
for lockfile_key, node_dict in json_specs_by_hash.items():
name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
specs_by_hash[dep_hash], deptypes=deptypes
)
# Traverse the root specs one at a time in the order they appear.
@@ -2668,8 +2671,8 @@ def add_user_spec(self, user_spec: str) -> None:
Args:
user_spec: user spec to be appended
"""
self.pristine_configuration.setdefault("specs", []).append(user_spec)
self.configuration.setdefault("specs", []).append(user_spec)
config_dict(self.pristine_yaml_content).setdefault("specs", []).append(user_spec)
config_dict(self.yaml_content).setdefault("specs", []).append(user_spec)
self.changed = True
def remove_user_spec(self, user_spec: str) -> None:
@@ -2682,8 +2685,8 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list
"""
try:
self.pristine_configuration["specs"].remove(user_spec)
self.configuration["specs"].remove(user_spec)
config_dict(self.pristine_yaml_content)["specs"].remove(user_spec)
config_dict(self.yaml_content)["specs"].remove(user_spec)
except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e
@@ -2700,8 +2703,8 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
SpackEnvironmentError: when the user spec cannot be overridden
"""
try:
self.pristine_configuration["specs"][idx] = user_spec
self.configuration["specs"][idx] = user_spec
config_dict(self.pristine_yaml_content)["specs"][idx] = user_spec
config_dict(self.yaml_content)["specs"][idx] = user_spec
except ValueError as e:
msg = f"cannot override {user_spec} from {self}"
raise SpackEnvironmentError(msg) from e
@@ -2717,14 +2720,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
Raises:
SpackEnvironmentError: is no valid definition exists already
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
item[list_name].append(user_spec)
break
self.configuration["definitions"][idx][list_name].append(user_spec)
config_dict(self.yaml_content)["definitions"][idx][list_name].append(user_spec)
self.changed = True
def remove_definition(self, user_spec: str, list_name: str) -> None:
@@ -2738,7 +2741,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
SpackEnvironmentError: if the user spec cannot be removed from the list,
or the list does not exist
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = (
f"cannot remove {user_spec} from the '{list_name}' definition, "
f"no valid list exists"
@@ -2751,7 +2754,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
except ValueError:
pass
self.configuration["definitions"][idx][list_name].remove(user_spec)
config_dict(self.yaml_content)["definitions"][idx][list_name].remove(user_spec)
self.changed = True
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
@@ -2766,7 +2769,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
Raises:
SpackEnvironmentError: if the user spec cannot be overridden
"""
defs = self.pristine_configuration.get("definitions", [])
defs = config_dict(self.pristine_yaml_content).get("definitions", [])
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
@@ -2777,7 +2780,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
except ValueError:
pass
self.configuration["definitions"][idx][list_name][sub_index] = override
config_dict(self.yaml_content)["definitions"][idx][list_name][sub_index] = override
self.changed = True
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
@@ -2809,24 +2812,24 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
True the default view is used for the environment, if False there's no view.
"""
if isinstance(view, dict):
self.pristine_configuration["view"][default_view_name].update(view)
self.configuration["view"][default_view_name].update(view)
config_dict(self.pristine_yaml_content)["view"][default_view_name].update(view)
config_dict(self.yaml_content)["view"][default_view_name].update(view)
self.changed = True
return
if not isinstance(view, bool):
view = str(view)
self.pristine_configuration["view"] = view
self.configuration["view"] = view
config_dict(self.pristine_yaml_content)["view"] = view
config_dict(self.yaml_content)["view"] = view
self.changed = True
def remove_default_view(self) -> None:
"""Removes the default view from the manifest file"""
view_data = self.pristine_configuration.get("view")
view_data = config_dict(self.pristine_yaml_content).get("view")
if isinstance(view_data, collections.abc.Mapping):
self.pristine_configuration["view"].pop(default_view_name)
self.configuration["view"].pop(default_view_name)
config_dict(self.pristine_yaml_content)["view"].pop(default_view_name)
config_dict(self.yaml_content)["view"].pop(default_view_name)
self.changed = True
return
@@ -2843,10 +2846,12 @@ def add_develop_spec(self, pkg_name: str, entry: Dict[str, str]) -> None:
if entry["path"] == pkg_name:
entry.pop("path")
self.pristine_configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(
config_dict(self.pristine_yaml_content).setdefault("develop", {}).setdefault(
pkg_name, {}
).update(entry)
config_dict(self.yaml_content).setdefault("develop", {}).setdefault(pkg_name, {}).update(
entry
)
self.configuration.setdefault("develop", {}).setdefault(pkg_name, {}).update(entry)
self.changed = True
def remove_develop_spec(self, pkg_name: str) -> None:
@@ -2859,11 +2864,11 @@ def remove_develop_spec(self, pkg_name: str) -> None:
SpackEnvironmentError: if there is nothing to remove
"""
try:
del self.pristine_configuration["develop"][pkg_name]
del config_dict(self.pristine_yaml_content)["develop"][pkg_name]
except KeyError as e:
msg = f"cannot remove '{pkg_name}' from develop specs in {self}, entry does not exist"
raise SpackEnvironmentError(msg) from e
del self.configuration["develop"][pkg_name]
del config_dict(self.yaml_content)["develop"][pkg_name]
self.changed = True
def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
@@ -2874,11 +2879,11 @@ def absolutify_dev_paths(self, init_file_dir: Union[str, pathlib.Path]) -> None:
init_file_dir: directory with the "spack.yaml" used to initialize the environment.
"""
init_file_dir = pathlib.Path(init_file_dir).absolute()
for _, entry in self.pristine_configuration.get("develop", {}).items():
for _, entry in config_dict(self.pristine_yaml_content).get("develop", {}).items():
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
entry["path"] = str(expanded_path)
for _, entry in self.configuration.get("develop", {}).items():
for _, entry in config_dict(self.yaml_content).get("develop", {}).items():
expanded_path = os.path.normpath(str(init_file_dir / entry["path"]))
entry["path"] = str(expanded_path)
self.changed = True
@@ -2892,16 +2897,6 @@ def flush(self) -> None:
_write_yaml(self.pristine_yaml_content, f)
self.changed = False
@property
def pristine_configuration(self):
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
return self.pristine_yaml_content[TOP_LEVEL_KEY]
@property
def configuration(self):
"""Return the dictionaries in the YAML, without the top level attribute"""
return self.yaml_content[TOP_LEVEL_KEY]
def __len__(self):
return len(self.yaml_content)

View File

@@ -544,7 +544,6 @@ def _static_edges(specs, deptype):
spack.spec.Spec(parent_name),
spack.spec.Spec(dependency_name),
deptypes=deptype,
virtuals=(),
)

View File

@@ -460,8 +460,7 @@ def write_tested_status(self):
elif self.counts[TestStatus.PASSED] > 0:
status = TestStatus.PASSED
with open(self.tested_file, "w") as f:
f.write(f"{status.value}\n")
_add_msg_to_file(self.tested_file, f"{status.value}")
@contextlib.contextmanager

View File

@@ -231,9 +231,7 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
)
dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",)))
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
packages.append((dep.package, True))

View File

@@ -40,7 +40,7 @@
import llnl.util.filesystem
import llnl.util.tty as tty
from llnl.util.lang import dedupe, memoized
from llnl.util.lang import dedupe
import spack.build_environment
import spack.config
@@ -170,9 +170,11 @@ def merge_config_rules(configuration, spec):
Returns:
dict: actions to be taken on the spec passed as an argument
"""
# Construct a dictionary with the actions we need to perform on the spec passed as a parameter
spec_configuration = {}
# The keyword 'all' is always evaluated first, all the others are
# evaluated in order of appearance in the module file
spec_configuration = copy.deepcopy(configuration.get("all", {}))
spec_configuration.update(copy.deepcopy(configuration.get("all", {})))
for constraint, action in configuration.items():
if spec.satisfies(constraint):
if hasattr(constraint, "override") and constraint.override:
@@ -393,7 +395,7 @@ class BaseConfiguration(object):
querying easier. It needs to be sub-classed for specific module types.
"""
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec, module_set_name, explicit=None):
# Module where type(self) is defined
@@ -672,7 +674,6 @@ def configure_options(self):
return None
@tengine.context_property
@memoized
def environment_modifications(self):
"""List of environment modifications to be processed."""
# Modifications guessed by inspecting the spec prefix
@@ -743,19 +744,6 @@ def environment_modifications(self):
return [(type(x).__name__, x) for x in env if x.name not in exclude]
@tengine.context_property
def has_manpath_modifications(self):
"""True if MANPATH environment variable is modified."""
for modification_type, cmd in self.environment_modifications:
if not isinstance(
cmd, (spack.util.environment.PrependPath, spack.util.environment.AppendPath)
):
continue
if cmd.name == "MANPATH":
return True
else:
return False
@tengine.context_property
def autoload(self):
"""List of modules that needs to be loaded automatically."""

View File

@@ -134,7 +134,6 @@ def filter_hierarchy_specs(self):
return configuration(self.name).get("filter_hierarchy_specs", {})
@property
@lang.memoized
def hierarchy_tokens(self):
"""Returns the list of tokens that are part of the modulefile
hierarchy. 'compiler' is always present.
@@ -159,7 +158,6 @@ def hierarchy_tokens(self):
return tokens
@property
@lang.memoized
def requires(self):
"""Returns a dictionary mapping all the requirements of this spec
to the actual provider. 'compiler' is always present among the
@@ -226,7 +224,6 @@ def available(self):
return available
@property
@lang.memoized
def missing(self):
"""Returns the list of tokens that are not available."""
return [x for x in self.hierarchy_tokens if x not in self.available]
@@ -320,7 +317,6 @@ def available_path_parts(self):
return parts
@property
@lang.memoized
def unlocked_paths(self):
"""Returns a dictionary mapping conditions to a list of unlocked
paths.
@@ -432,7 +428,6 @@ def missing(self):
return self.conf.missing
@tengine.context_property
@lang.memoized
def unlocked_paths(self):
"""Returns the list of paths that are unlocked unconditionally."""
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)

View File

@@ -1231,7 +1231,6 @@ def dependencies_of_type(cls, *deptypes):
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
)
# TODO: allow more than one active extendee.
@property
def extendee_spec(self):
"""
@@ -1247,6 +1246,7 @@ def extendee_spec(self):
if dep.name in self.extendees:
deps.append(dep)
# TODO: allow more than one active extendee.
if deps:
assert len(deps) == 1
return deps[0]
@@ -1256,6 +1256,7 @@ def extendee_spec(self):
if self.spec._concrete:
return None
else:
# TODO: do something sane here with more than one extendee
# If it's not concrete, then return the spec from the
# extends() directive since that is all we know so far.
spec_str, kwargs = next(iter(self.extendees.items()))

View File

@@ -291,7 +291,7 @@ def next_spec(
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
root_spec._add_dependency(dependency, deptypes=())
else:
break

View File

@@ -37,9 +37,7 @@
def slingshot_network():
return os.path.exists("/opt/cray/pe") and (
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
)
return os.path.exists("/opt/cray/pe") and os.path.exists("/lib64/libcxi.so")
def _target_name_from_craype_target_name(name):

View File

@@ -292,8 +292,8 @@ def from_json(stream, repository):
index.providers = _transform(
providers,
lambda vpkg, plist: (
spack.spec.SpecfileV4.from_node_dict(vpkg),
set(spack.spec.SpecfileV4.from_node_dict(p) for p in plist),
spack.spec.SpecfileV3.from_node_dict(vpkg),
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
),
)
return index

View File

@@ -6,6 +6,7 @@
import itertools
import os
import re
import shutil
from collections import OrderedDict
import macholib.mach_o
@@ -355,7 +356,13 @@ def _set_elf_rpaths(target, rpaths):
# Join the paths using ':' as a separator
rpaths_str = ":".join(rpaths)
patchelf, output = executable.Executable(_patchelf()), None
# If we're relocating patchelf itself, make a copy and use it
bak_path = None
if target.endswith("/bin/patchelf"):
bak_path = target + ".bak"
shutil.copy(target, bak_path)
patchelf, output = executable.Executable(bak_path or _patchelf()), None
try:
# TODO: revisit the use of --force-rpath as it might be conditional
# TODO: if we want to support setting RUNPATH from binary packages
@@ -364,6 +371,9 @@ def _set_elf_rpaths(target, rpaths):
except executable.ProcessError as e:
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
tty.warn(msg.format(target, e))
finally:
if bak_path and os.path.exists(bak_path):
os.remove(bak_path)
return output
@@ -676,7 +686,7 @@ def is_relocatable(spec):
Raises:
ValueError: if the spec is not installed
"""
if not spec.installed:
if not spec.install_status():
raise ValueError("spec is not installed [{0}]".format(str(spec)))
if spec.external or spec.virtual:

View File

@@ -1239,7 +1239,7 @@ def get_pkg_class(self, pkg_name):
try:
module = importlib.import_module(fullname)
except ImportError:
raise UnknownPackageError(fullname)
raise UnknownPackageError(pkg_name)
except Exception as e:
msg = f"cannot load package '{pkg_name}' from the '{self.namespace}' repository: {e}"
raise RepoError(msg) from e

View File

@@ -209,7 +209,7 @@ def update(data):
# Warn if deprecated section is still in the environment
ci_env = ev.active_environment()
if ci_env:
env_config = ci_env.manifest[ev.TOP_LEVEL_KEY]
env_config = ev.config_dict(ci_env.manifest)
if "gitlab-ci" in env_config:
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")

View File

@@ -15,8 +15,8 @@
import spack.schema.packages
import spack.schema.projections
#: Top level key in a manifest file
TOP_LEVEL_KEY = "spack"
#: legal first keys in the schema
keys = ("spack", "env")
spec_list_schema = {
"type": "array",
@@ -47,8 +47,8 @@
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": {
"spack": {
"patternProperties": {
"^env|spack$": {
"type": "object",
"default": {},
"additionalProperties": False,

View File

@@ -2500,15 +2500,10 @@ def depends_on(self, pkg, dep, type):
assert len(dependencies) < 2, msg
if not dependencies:
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
else:
# TODO: This assumes that each solve unifies dependencies
dependencies[0].update_deptypes(deptypes=(type,))
def virtual_on_edge(self, pkg, provider, virtual):
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
assert len(dependencies) == 1
dependencies[0].update_virtuals((virtual,))
dependencies[0].add_type(type)
def reorder_flags(self):
"""Order compiler flags on specs in predefined order.
@@ -2586,8 +2581,6 @@ def sort_fn(function_tuple):
return (-2, 0)
elif name == "external_spec_selected":
return (0, 0) # note out of order so this goes last
elif name == "virtual_on_edge":
return (1, 0)
else:
return (-1, 0)
@@ -2843,13 +2836,12 @@ class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
"""
def __init__(self, provided, conflicts):
msg = (
"Spack concretizer internal error. Please submit a bug report and include the "
"command, environment if applicable and the following error message."
f"\n {provided} is unsatisfiable, errors are:"
)
msg += "".join([f"\n {conflict}" for conflict in conflicts])
indented = [" %s\n" % conflict for conflict in conflicts]
error_msg = "".join(indented)
msg = "Spack concretizer internal error. Please submit a bug report"
msg += "\n Please include the command, environment if applicable,"
msg += "\n and the following error message."
msg = "\n %s is unsatisfiable, errors are:\n%s" % (provided, error_msg)
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)

View File

@@ -300,11 +300,6 @@ attr("depends_on", Package, Provider, Type)
provider(Provider, Virtual),
not external(Package).
attr("virtual_on_edge", Package, Provider, Virtual)
:- dependency_holds(Package, Virtual, Type),
provider(Provider, Virtual),
not external(Package).
% dependencies on virtuals also imply that the virtual is a virtual node
attr("virtual_node", Virtual)
:- dependency_holds(Package, Virtual, Type),

View File

@@ -50,7 +50,6 @@
"""
import collections
import collections.abc
import enum
import io
import itertools
import os
@@ -171,17 +170,7 @@
)
#: specfile format version. Must increase monotonically
SPECFILE_FORMAT_VERSION = 4
# InstallStatus is used to map install statuses to symbols for display
# Options are artificially disjoint for dispay purposes
class InstallStatus(enum.Enum):
installed = "@g{[+]} "
upstream = "@g{[^]} "
external = "@g{[e]} "
absent = "@K{ - } "
missing = "@r{[-]} "
SPECFILE_FORMAT_VERSION = 3
def colorize_spec(spec):
@@ -725,81 +714,47 @@ class DependencySpec:
parent: starting node of the edge
spec: ending node of the edge.
deptypes: list of strings, representing dependency relationships.
virtuals: virtual packages provided from child to parent node.
"""
__slots__ = "parent", "spec", "parameters"
__slots__ = "parent", "spec", "deptypes"
def __init__(
self,
parent: "Spec",
spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
):
def __init__(self, parent: "Spec", spec: "Spec", *, deptypes: dp.DependencyArgument):
self.parent = parent
self.spec = spec
self.parameters = {
"deptypes": dp.canonical_deptype(deptypes),
"virtuals": tuple(sorted(set(virtuals))),
}
self.deptypes = dp.canonical_deptype(deptypes)
@property
def deptypes(self) -> Tuple[str, ...]:
return self.parameters["deptypes"]
def update_deptypes(self, deptypes: dp.DependencyArgument) -> bool:
deptypes = set(deptypes)
deptypes.update(self.deptypes)
deptypes = tuple(sorted(deptypes))
changed = self.deptypes != deptypes
@property
def virtuals(self) -> Tuple[str, ...]:
return self.parameters["virtuals"]
def _update_edge_multivalued_property(
self, property_name: str, value: Tuple[str, ...]
) -> bool:
current = self.parameters[property_name]
update = set(current) | set(value)
update = tuple(sorted(update))
changed = current != update
if not changed:
return False
self.parameters[property_name] = update
return True
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
"""Update the current dependency types"""
return self._update_edge_multivalued_property("deptypes", deptypes)
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
"""Update the list of provided virtuals"""
return self._update_edge_multivalued_property("virtuals", virtuals)
self.deptypes = deptypes
return changed
def copy(self) -> "DependencySpec":
"""Return a copy of this edge"""
return DependencySpec(
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
)
return DependencySpec(self.parent, self.spec, deptypes=self.deptypes)
def add_type(self, type: dp.DependencyArgument):
self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
def _cmp_iter(self):
yield self.parent.name if self.parent else None
yield self.spec.name if self.spec else None
yield self.deptypes
yield self.virtuals
def __str__(self) -> str:
parent = self.parent.name if self.parent else None
child = self.spec.name if self.spec else None
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
return "%s %s--> %s" % (
self.parent.name if self.parent else None,
self.deptypes,
self.spec.name if self.spec else None,
)
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
def canonical(self) -> Tuple[str, str, Tuple[str, ...]]:
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
def flip(self) -> "DependencySpec":
"""Flip the dependency, and drop virtual information"""
return DependencySpec(
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
)
return DependencySpec(parent=self.spec, spec=self.parent, deptypes=self.deptypes)
class CompilerFlag(str):
@@ -1620,12 +1575,10 @@ def _set_compiler(self, compiler):
)
self.compiler = compiler
def _add_dependency(
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
):
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
"""Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies or not spec.name:
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
self.add_dependency_edge(spec, deptypes=deptypes)
return
# Keep the intersection of constraints when a dependency is added
@@ -1643,58 +1596,34 @@ def _add_dependency(
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
)
def add_dependency_edge(
self,
dependency_spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
):
def add_dependency_edge(self, dependency_spec: "Spec", *, deptypes: dp.DependencyArgument):
"""Add a dependency edge to this spec.
Args:
dependency_spec: spec of the dependency
deptypes: dependency types for this edge
virtuals: virtuals provided by this edge
"""
deptypes = dp.canonical_deptype(deptypes)
# Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
has_errors, details = False, []
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
if any(d in edge.deptypes for d in deptypes):
has_errors = True
details.append(
(
f"{edge.parent.name} has already an edge matching any"
f" of these types {str(deptypes)}"
)
msg = (
'cannot add a dependency on "{0.spec}" of {1} type '
'when the "{0.parent}" has the edge {0!s} already'
)
if any(v in edge.virtuals for v in virtuals):
has_errors = True
details.append(
(
f"{edge.parent.name} has already an edge matching any"
f" of these virtuals {str(virtuals)}"
)
)
if has_errors:
raise spack.error.SpecError(msg, "\n".join(details))
raise spack.error.SpecError(msg.format(edge, deptypes))
for edge in selected:
if id(dependency_spec) == id(edge.spec):
# If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll
# both see the deptype modification.
edge.update_deptypes(deptypes=deptypes)
edge.update_virtuals(virtuals=virtuals)
edge.add_type(deptypes)
return
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
self._dependencies.add(edge)
dependency_spec._dependents.add(edge)
@@ -1967,12 +1896,12 @@ def lookup_hash(self):
for node in self.traverse(root=False):
if node.abstract_hash:
new = node._lookup_hash()
spec._add_dependency(new, deptypes=(), virtuals=())
spec._add_dependency(new, deptypes=())
# reattach nodes that were not otherwise satisfied by new dependencies
for node in self.traverse(root=False):
if not any(n._satisfies(node) for n in spec.traverse()):
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
spec._add_dependency(node.copy(), deptypes=())
return spec
@@ -2107,14 +2036,8 @@ def to_node_dict(self, hash=ht.dag_hash):
name_tuple = ("name", name)
for dspec in edges_for_name:
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
parameters_tuple = (
"parameters",
syaml.syaml_dict(
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
),
)
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
deps_list.append(syaml.syaml_dict(ordered_entries))
type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
d["dependencies"] = deps_list
# Name is included in case this is replacing a virtual.
@@ -2438,7 +2361,7 @@ def spec_and_dependency_types(s):
dag_node, dependency_types = spec_and_dependency_types(s)
dependency_spec = spec_builder({dag_node: s_dependencies})
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
spec._add_dependency(dependency_spec, deptypes=dependency_types)
return spec
@@ -2456,10 +2379,8 @@ def from_dict(data):
spec = SpecfileV1.load(data)
elif int(data["spec"]["_meta"]["version"]) == 2:
spec = SpecfileV2.load(data)
elif int(data["spec"]["_meta"]["version"]) == 3:
spec = SpecfileV3.load(data)
else:
spec = SpecfileV4.load(data)
spec = SpecfileV3.load(data)
# Any git version should
for s in spec.traverse():
@@ -2608,7 +2529,6 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert self.virtual
virtuals = (self.name,)
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
dependent = dep_spec.parent
deptypes = dep_spec.deptypes
@@ -2619,11 +2539,7 @@ def _replace_with(self, concrete):
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
else:
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
virtuals=virtuals
)
dependent._add_dependency(concrete, deptypes=deptypes)
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
@@ -3264,9 +3180,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
# If it's a virtual dependency, try to find an existing
# provider in the spec, and merge that.
virtuals = ()
if spack.repo.path.is_virtual_safe(dep.name):
virtuals = (dep.name,)
visited.add(dep.name)
provider = self._find_provider(dep, provider_index)
if provider:
@@ -3322,7 +3236,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
# Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
self._add_dependency(spec_dependency, deptypes=dependency.type)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
@@ -3659,20 +3573,15 @@ def _constrain_dependencies(self, other):
changed |= edges_from_name[0].update_deptypes(
other._dependencies[name][0].deptypes
)
changed |= edges_from_name[0].update_virtuals(
other._dependencies[name][0].virtuals
)
# Update with additional constraints from other spec
# operate on direct dependencies only, because a concrete dep
# represented by hash may have structure that needs to be preserved
for name in other.direct_dep_difference(self):
dep_spec_copy = other._get_dependency(name)
self._add_dependency(
dep_spec_copy.spec.copy(),
deptypes=dep_spec_copy.deptypes,
virtuals=dep_spec_copy.virtuals,
)
dep_copy = dep_spec_copy.spec
deptypes = dep_spec_copy.deptypes
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
changed = True
return changed
@@ -4056,7 +3965,7 @@ def spid(spec):
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
new_specs[spid(edge.spec)], deptypes=edge.deptypes
)
def copy(self, deps=True, **kwargs):
@@ -4424,7 +4333,7 @@ def write_attribute(spec, attribute, color):
if callable(current):
raise SpecFormatStringError("Attempted to format callable object")
if current is None:
if not current:
# We're not printing anything
return
@@ -4492,20 +4401,12 @@ def __str__(self):
def install_status(self):
"""Helper for tree to print DB install status."""
if not self.concrete:
return InstallStatus.absent
if self.external:
return InstallStatus.external
upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
if not record:
return InstallStatus.absent
elif upstream and record.installed:
return InstallStatus.upstream
elif record.installed:
return InstallStatus.installed
else:
return InstallStatus.missing
return None
try:
record = spack.store.db.get_record(self)
return record.installed
except KeyError:
return None
def _installed_explicitly(self):
"""Helper for tree to print DB install status."""
@@ -4519,10 +4420,7 @@ def _installed_explicitly(self):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation.
Status function may either output a boolean or an InstallStatus
"""
with indentation."""
color = kwargs.pop("color", clr.get_color_when())
depth = kwargs.pop("depth", False)
hashes = kwargs.pop("hashes", False)
@@ -4554,12 +4452,14 @@ def tree(self, **kwargs):
if status_fn:
status = status_fn(node)
if status in list(InstallStatus):
out += clr.colorize(status.value, color=color)
if node.installed_upstream:
out += clr.colorize("@g{[^]} ", color=color)
elif status is None:
out += clr.colorize("@K{ - } ", color=color) # !installed
elif status:
out += clr.colorize("@g{[+]} ", color=color)
out += clr.colorize("@g{[+]} ", color=color) # installed
else:
out += clr.colorize("@r{[-]} ", color=color)
out += clr.colorize("@r{[-]} ", color=color) # missing
if hashes:
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
@@ -4735,16 +4635,12 @@ def from_self(name, transitive):
if name in self_nodes:
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
nodes[name].add_dependency_edge(
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
)
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
nodes[name].add_dependency_edge(
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
)
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
@@ -4834,40 +4730,11 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
# Update with additional constraints from other spec
for name in current_spec_constraint.direct_dep_difference(merged_spec):
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
merged_spec._add_dependency(
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
)
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
return merged_spec
def reconstruct_virtuals_on_edges(spec):
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
Args:
spec: spec on which we want to reconstruct virtuals
"""
# Collect all possible virtuals
possible_virtuals = set()
for node in spec.traverse():
try:
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue
# Assume all incoming edges to provider are marked with virtuals=
for vspec in possible_virtuals:
try:
provider = spec[vspec]
except KeyError:
# Virtual not in the DAG
continue
for edge in provider.edges_from_dependents():
edge.update_virtuals([vspec])
class SpecfileReaderBase:
@classmethod
def from_node_dict(cls, node):
@@ -4951,7 +4818,7 @@ def _load(cls, data):
# Pass 0: Determine hash type
for node in nodes:
for _, _, _, dhash_type, _ in cls.dependencies_from_node_dict(node):
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
any_deps = True
if dhash_type:
hash_type = dhash_type
@@ -4982,10 +4849,8 @@ def _load(cls, data):
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
node_spec = node["node_spec"]
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
node_spec._add_dependency(
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
)
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
if "build_spec" in node.keys():
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
node_spec._build_spec = hash_dict[bhash]["node_spec"]
@@ -5019,10 +4884,9 @@ def load(cls, data):
for node in nodes:
# get dependency dict from the node.
name, data = cls.name_and_data(node)
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
reconstruct_virtuals_on_edges(result)
return result
@classmethod
@@ -5051,20 +4915,18 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
if h.name in elt:
dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
virtuals = []
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)
yield dep_name, dep_hash, list(deptypes), hash_type
class SpecfileV2(SpecfileReaderBase):
@classmethod
def load(cls, data):
result = cls._load(data)
reconstruct_virtuals_on_edges(result)
return result
@classmethod
@@ -5098,7 +4960,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
result.append((dep_name, dep_hash, list(deptypes), hash_type, list(virtuals)))
result.append((dep_name, dep_hash, list(deptypes), hash_type))
return result
@classmethod
@@ -5118,20 +4980,6 @@ class SpecfileV3(SpecfileV2):
pass
class SpecfileV4(SpecfileV2):
@classmethod
def extract_info_from_dep(cls, elt, hash):
dep_hash = elt[hash.name]
deptypes = elt["parameters"]["deptypes"]
hash_type = hash.name
virtuals = elt["parameters"]["virtuals"]
return dep_hash, deptypes, hash_type, virtuals
@classmethod
def load(cls, data):
return cls._load(data)
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily
the corresponding value ``Spec(spec_like``.

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import textwrap
from typing import List, Optional, Tuple
from typing import List
import llnl.util.lang
@@ -66,17 +66,17 @@ def to_dict(self):
return dict(d)
@llnl.util.lang.memoized
def make_environment(dirs: Optional[Tuple[str, ...]] = None):
"""Returns a configured environment for template rendering."""
# Import at this scope to avoid slowing Spack startup down
import jinja2
def make_environment(dirs=None):
"""Returns an configured environment for template rendering."""
if dirs is None:
# Default directories where to search for templates
builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
extensions = spack.extensions.get_template_dirs()
dirs = tuple(canonicalize_path(d) for d in itertools.chain(builtins, extensions))
dirs = [canonicalize_path(d) for d in itertools.chain(builtins, extensions)]
# avoid importing this at the top level as it's used infrequently and
# slows down startup a bit.
import jinja2
# Loader for the templates
loader = jinja2.FileSystemLoader(dirs)

View File

@@ -115,6 +115,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
spack.config.config, old_config = cfg, spack.config.config
spack.config.config.set("repos", [spack.paths.mock_packages_path])
# This is essential, otherwise the cache will create weird side effects
# that will compromise subsequent tests if compilers.yaml is modified
monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
@@ -201,12 +204,12 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
install_cmd("--no-cache", sy_spec.name)
# Create a buildache
buildcache_cmd("push", "-au", mirror_dir, cspec.name, sy_spec.name)
buildcache_cmd("push", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
# Test force overwrite create buildcache (-f option)
buildcache_cmd("push", "-auf", mirror_dir, cspec.name)
buildcache_cmd("push", "-auf", "-d", mirror_dir, cspec.name)
# Create mirror index
buildcache_cmd("update-index", mirror_dir)
buildcache_cmd("update-index", "-d", mirror_dir)
# List the buildcaches in the mirror
buildcache_cmd("list", "-alv")
@@ -214,13 +217,13 @@ def test_default_rpaths_create_install_default_layout(mirror_dir):
uninstall_cmd("-y", "--dependents", gspec.name)
# Test installing from build caches
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-u", cspec.name)
buildcache_cmd("install", "-au", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-fu", cspec.name)
buildcache_cmd("install", "-afu", cspec.name)
buildcache_cmd("keys", "-f")
buildcache_cmd("list")
@@ -246,10 +249,35 @@ def test_default_rpaths_install_nondefault_layout(mirror_dir):
# Install some packages with dependent packages
# test install in non-default install path scheme
buildcache_cmd("install", "-u", cspec.name, sy_spec.name)
buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
# Test force install in non-default install path scheme
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
def test_relative_rpaths_create_default_layout(mirror_dir):
"""
Test the creation and installation of buildcaches with relative
rpaths into the default directory layout scheme.
"""
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
# Install 'corge' without using a cache
install_cmd("--no-cache", cspec.name)
# Create build cache with relative rpaths
buildcache_cmd("push", "-aur", "-d", mirror_dir, cspec.name)
# Create mirror index
buildcache_cmd("update-index", "-d", mirror_dir)
# Uninstall the package and deps
uninstall_cmd("-y", "--dependents", gspec.name)
@pytest.mark.requires_executables(*args)
@@ -266,19 +294,19 @@ def test_relative_rpaths_install_default_layout(mirror_dir):
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
# Install buildcache created with relativized rpaths
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
# This gives warning that spec is already installed
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
# Uninstall the package and deps
uninstall_cmd("-y", "--dependents", gspec.name)
# Install build cache
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
# Test overwrite install
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
@pytest.mark.requires_executables(*args)
@@ -295,7 +323,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
cspec = Spec("corge").concretized()
# Test install in non-default install path scheme and relative path
buildcache_cmd("install", "-uf", cspec.name)
buildcache_cmd("install", "-auf", cspec.name)
def test_push_and_fetch_keys(mock_gnupghome):
@@ -376,7 +404,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
install_cmd(s.name)
# Put installed package in the buildcache
buildcache_cmd("push", "-u", "-a", mirror_dir.strpath, s.name)
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir.strpath, s.name)
rebuild = bindist.needs_rebuild(s, mirror_url)
@@ -405,8 +433,8 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
install_cmd("--no-cache", s.name)
# Create a buildcache and update index
buildcache_cmd("push", "-ua", mirror_dir.strpath, s.name)
buildcache_cmd("update-index", mirror_dir.strpath)
buildcache_cmd("push", "-uad", mirror_dir.strpath, s.name)
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
# Check package and dependency in buildcache
cache_list = buildcache_cmd("list", "--allarch")
@@ -418,7 +446,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
os.remove(*libelf_files)
# Update index
buildcache_cmd("update-index", mirror_dir.strpath)
buildcache_cmd("update-index", "-d", mirror_dir.strpath)
with spack.config.override("config:binary_index_ttl", 0):
# Check dependency not in buildcache
@@ -494,10 +522,10 @@ def test_update_sbang(tmpdir, test_mirror):
install_cmd("--no-cache", old_spec.name)
# Create a buildcache with the installed spec.
buildcache_cmd("push", "-u", "-a", mirror_dir, old_spec_hash_str)
buildcache_cmd("push", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
# Need to force an update of the buildcache index
buildcache_cmd("update-index", mirror_dir)
buildcache_cmd("update-index", "-d", mirror_dir)
# Uninstall the original package.
uninstall_cmd("-y", old_spec_hash_str)
@@ -513,7 +541,7 @@ def test_update_sbang(tmpdir, test_mirror):
assert new_spec.dag_hash() == old_spec.dag_hash()
# Install package from buildcache
buildcache_cmd("install", "-u", "-f", new_spec.name)
buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
# Continue blowing away caches
bindist.clear_spec_cache()

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import pytest
import spack.cmd.create
@@ -10,6 +12,8 @@
import spack.util.executable
import spack.util.url as url_util
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture(
scope="function",

View File

@@ -173,7 +173,7 @@ def wrapper_environment(working_env):
SPACK_DTAGS_TO_ADD="--disable-new-dtags",
SPACK_DTAGS_TO_STRIP="--enable-new-dtags",
SPACK_COMPILER_FLAGS_KEEP="",
SPACK_COMPILER_FLAGS_REPLACE="-Werror*|",
SPACK_COMPILER_FLAGS_REPLACE="-Werror*",
):
yield
@@ -278,8 +278,8 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
ld,
test_args,
["ld"]
+ spack_ldflags
+ test_include_paths
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["--disable-new-dtags"]
+ test_rpaths
@@ -293,10 +293,10 @@ def test_cpp_flags(wrapper_environment, wrapper_flags):
cpp,
test_args,
["cpp"]
+ spack_cppflags
+ test_include_paths
+ test_library_paths
+ test_args_without_paths
+ spack_cppflags,
+ test_args_without_paths,
)
@@ -306,14 +306,10 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
test_args,
[real_cc]
+ target_args
+ test_include_paths
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_cppflags
+ spack_cflags
+ spack_ldflags
+ common_compile_args
+ spack_ldlibs,
)
@@ -324,13 +320,10 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
test_args,
[real_cc]
+ target_args
+ test_include_paths
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_cppflags
+ spack_cxxflags
+ spack_ldflags
+ common_compile_args
+ spack_ldlibs,
)
@@ -341,14 +334,10 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
test_args,
[real_cc]
+ target_args
+ test_include_paths
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_fflags
+ spack_cppflags
+ spack_ldflags
+ common_compile_args
+ spack_ldlibs,
)

View File

@@ -291,7 +291,7 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dep
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
result = {
"stage": "stage-rebuild-index",
"script": "spack buildcache update-index s3://mirror",
"script": "spack buildcache update-index --mirror-url s3://mirror",
"tags": ["tag-0", "tag-1"],
"image": {"name": "spack/centos7", "entrypoint": [""]},
"after_script": ['rm -rf "./spack"'],

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pickle
import sys
import pytest
@@ -40,10 +39,7 @@ def test_dump(tmpdir):
with tmpdir.as_cwd():
build_env("--dump", _out_file, "zlib")
with open(_out_file) as f:
if sys.platform == "win32":
assert any(line.startswith('set "PATH=') for line in f.readlines())
else:
assert any(line.startswith("PATH=") for line in f.readlines())
assert any(line.startswith("PATH=") for line in f.readlines())
@pytest.mark.usefixtures("config", "mock_packages", "working_env")

View File

@@ -85,7 +85,7 @@ def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
pkg = "trivial-install-test-package"
install(pkg)
buildcache("push", "--unsigned", str(tmpdir), pkg)
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
spec = Spec(pkg).concretized()
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
@@ -105,7 +105,7 @@ def tests_buildcache_create_env(
add(pkg)
install()
buildcache("push", "--unsigned", str(tmpdir))
buildcache("push", "-d", str(tmpdir), "--unsigned")
spec = Spec(pkg).concretized()
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
@@ -118,7 +118,7 @@ def test_buildcache_create_fails_on_noargs(tmpdir):
"""Ensure that buildcache create fails when given no args or
environment."""
with pytest.raises(spack.main.SpackCommandError):
buildcache("push", "--unsigned", str(tmpdir))
buildcache("push", "-d", str(tmpdir), "--unsigned")
def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
@@ -127,7 +127,7 @@ def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monk
tmpdir.chmod(0)
with pytest.raises(OSError) as error:
buildcache("push", "--unsigned", str(tmpdir), "trivial-install-test-package")
buildcache("push", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
assert error.value.errno == errno.EACCES
tmpdir.chmod(0o700)
@@ -159,11 +159,11 @@ def test_update_key_index(
# Put installed package in the buildcache, which, because we're signing
# it, should result in the public key getting pushed to the buildcache
# as well.
buildcache("push", "-a", mirror_dir.strpath, s.name)
buildcache("push", "-a", "-d", mirror_dir.strpath, s.name)
# Now make sure that when we pass the "--keys" argument to update-index
# it causes the index to get update.
buildcache("update-index", "--keys", mirror_dir.strpath)
buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
@@ -213,25 +213,27 @@ def verify_mirror_contents():
# Install a package and put it in the buildcache
s = Spec(out_env_pkg).concretized()
install(s.name)
buildcache("push", "-u", "-f", "-a", src_mirror_url, s.name)
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
env("create", "test")
with ev.read("test"):
add(in_env_pkg)
install()
buildcache("push", "-u", "-f", "-a", src_mirror_url, in_env_pkg)
buildcache("push", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
# Now run the spack buildcache sync command with all the various options
# for specifying mirrors
# Use urls to specify mirrors
buildcache("sync", src_mirror_url, dest_mirror_url)
buildcache(
"sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
)
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
# Use local directory paths to specify fs locations
buildcache("sync", src_mirror_dir, dest_mirror_dir)
buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
@@ -240,7 +242,7 @@ def verify_mirror_contents():
mirror("add", "src", src_mirror_url)
mirror("add", "dest", dest_mirror_url)
buildcache("sync", "src", "dest")
buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
verify_mirror_contents()
@@ -258,7 +260,7 @@ def test_buildcache_create_install(
pkg = "trivial-install-test-package"
install(pkg)
buildcache("push", "--unsigned", str(tmpdir), pkg)
buildcache("push", "-d", str(tmpdir), "--unsigned", pkg)
spec = Spec(pkg).concretized()
tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
@@ -322,12 +324,12 @@ def fake_push(node, push_url, options):
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
buildcache_create_args = ["create", "--unsigned"]
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
if things_to_install != "":
buildcache_create_args.extend(["--only", things_to_install])
buildcache_create_args.extend([str(tmpdir), slash_hash])
buildcache_create_args.extend([slash_hash])
buildcache(*buildcache_create_args)

View File

@@ -1055,7 +1055,7 @@ def test_ci_nothing_to_rebuild(
)
install_cmd("archive-files")
buildcache_cmd("push", "-a", "-f", "-u", mirror_url, "archive-files")
buildcache_cmd("push", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
@@ -1155,8 +1155,8 @@ def test_ci_generate_mirror_override(
second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
with ev.read("test"):
install_cmd()
buildcache_cmd("push", "-u", mirror_url, "patchelf")
buildcache_cmd("update-index", mirror_url, output=str)
buildcache_cmd("push", "-u", "--mirror-url", mirror_url, "patchelf")
buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
# This generate should not trigger a rebuild of patchelf, since it's in
# the main mirror referenced in the environment.
@@ -1297,7 +1297,7 @@ def test_push_mirror_contents(
mirror_cmd("rm", "test-ci")
# Test generating buildcache index while we have bin mirror
buildcache_cmd("update-index", mirror_url)
buildcache_cmd("update-index", "--mirror-url", mirror_url)
index_path = os.path.join(buildcache_path, "index.json")
with open(index_path) as idx_fd:
index_object = json.load(idx_fd)
@@ -1613,7 +1613,7 @@ def test_ci_rebuild_index(
ypfd.write(spec_json)
install_cmd("--add", "--keep-stage", "-f", json_path)
buildcache_cmd("push", "-u", "-a", "-f", mirror_url, "callpath")
buildcache_cmd("push", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
ci_cmd("rebuild-index")
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
@@ -1647,7 +1647,7 @@ def test_ci_generate_bootstrap_prune_dag(
install_cmd("gcc@=12.2.0%gcc@10.2.1")
# Put installed compiler in the buildcache
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
# Now uninstall the compiler
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
@@ -1662,7 +1662,7 @@ def test_ci_generate_bootstrap_prune_dag(
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
# Put spec built with installed compiler in the buildcache
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "b%gcc@12.2.0")
buildcache_cmd("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "b%gcc@12.2.0")
# Now uninstall the spec
uninstall_cmd("-y", "b%gcc@12.2.0")

View File

@@ -8,6 +8,8 @@
import pytest
import llnl.util.filesystem
import spack.compilers
import spack.main
import spack.version
@@ -16,8 +18,124 @@
@pytest.fixture
def compilers_dir(mock_executable):
"""Create a directory with some mock compiler scripts in it.
def mock_compiler_version():
return "4.5.3"
@pytest.fixture()
def mock_compiler_dir(tmpdir, mock_compiler_version):
"""Return a directory containing a fake, but detectable compiler."""
tmpdir.ensure("bin", dir=True)
bin_dir = tmpdir.join("bin")
gcc_path = bin_dir.join("gcc")
gxx_path = bin_dir.join("g++")
gfortran_path = bin_dir.join("gfortran")
gcc_path.write(
"""\
#!/bin/sh
for arg in "$@"; do
if [ "$arg" = -dumpversion ]; then
echo '%s'
fi
done
"""
% mock_compiler_version
)
# Create some mock compilers in the temporary directory
llnl.util.filesystem.set_executable(str(gcc_path))
gcc_path.copy(gxx_path, mode=True)
gcc_path.copy(gfortran_path, mode=True)
return str(tmpdir)
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
with open("gcc", "w") as f:
f.write(
"""\
#!/bin/sh
echo "0.0.0"
"""
)
os.chmod("gcc", 0o700)
os.environ["PATH"] = str(tmpdir)
output = compiler("find", "--scope=site")
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
# make a script to emulate apple gcc's version args
with open("gcc", "w") as f:
f.write(
"""\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
"""
)
os.chmod("gcc", 0o700)
os.environ["PATH"] = str(tmpdir)
output = compiler("find", "--scope=site")
assert "gcc" not in output
def test_compiler_remove(mutable_config, mock_packages):
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
# Compilers available by default.
old_compilers = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
)
spack.cmd.compiler.compiler_find(args)
# Ensure new compiler is in there
new_compilers = set(spack.compilers.all_compiler_specs())
new_compiler = new_compilers - old_compilers
assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
@pytest.fixture
def clangdir(tmpdir):
"""Create a directory with some dummy compiler scripts in it.
Scripts are:
- clang
@@ -27,9 +145,11 @@ def compilers_dir(mock_executable):
- gfortran-8
"""
clang_path = mock_executable(
"clang",
output="""
with tmpdir.as_cwd():
with open("clang", "w") as f:
f.write(
"""\
#!/bin/sh
if [ "$1" = "--version" ]; then
echo "clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
@@ -39,11 +159,12 @@ def compilers_dir(mock_executable):
echo "clang: error: no input files"
exit 1
fi
""",
)
shutil.copy(clang_path, clang_path.parent / "clang++")
"""
)
shutil.copy("clang", "clang++")
gcc_script = """
gcc_script = """\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "8"
elif [ "$1" = "-dumpfullversion" ]; then
@@ -57,111 +178,30 @@ def compilers_dir(mock_executable):
exit 1
fi
"""
mock_executable("gcc-8", output=gcc_script.format("gcc", "gcc-8"))
mock_executable("g++-8", output=gcc_script.format("g++", "g++-8"))
mock_executable("gfortran-8", output=gcc_script.format("GNU Fortran", "gfortran-8"))
with open("gcc-8", "w") as f:
f.write(gcc_script.format("gcc", "gcc-8"))
with open("g++-8", "w") as f:
f.write(gcc_script.format("g++", "g++-8"))
with open("gfortran-8", "w") as f:
f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
os.chmod("clang", 0o700)
os.chmod("clang++", 0o700)
os.chmod("gcc-8", 0o700)
os.chmod("g++-8", 0o700)
os.chmod("gfortran-8", 0o700)
return clang_path.parent
yield tmpdir
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
is given.
"""
gcc_path = mock_executable("gcc", output='echo "0.0.0"')
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
Clang with a few tweaks.
"""
gcc_path = mock_executable(
"gcc",
output="""
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""",
)
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" not in output
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.regression("37996")
def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
# Duplicate "site" scope into "user" scope
site_config = spack.config.get("compilers", scope="site")
spack.config.set("compilers", site_config, scope="user")
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_add(mutable_config, mock_packages, mock_executable):
"""Tests that we can add a compiler to configuration."""
expected_version = "4.5.3"
gcc_path = mock_executable(
"gcc",
output=f"""\
for arg in "$@"; do
if [ "$arg" = -dumpversion ]; then
echo '{expected_version}'
fi
done
""",
)
bin_dir = gcc_path.parent
root_dir = bin_dir.parent
compilers_before_find = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None
)
spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compiler_specs())
compilers_added_by_find = compilers_after_find - compilers_before_find
assert len(compilers_added_by_find) == 1
new_compiler = compilers_added_by_find.pop()
assert new_compiler.version == spack.version.Version(expected_version)
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("17590")
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll mix compilers with different suffixes when necessary."""
os.environ["PATH"] = str(compilers_dir)
os.environ["PATH"] = str(clangdir)
output = compiler("find", "--scope=site")
assert "clang@11.0.0" in output
@@ -171,33 +211,39 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
gfortran_path = str(compilers_dir / "gfortran-8")
gfortran_path = str(clangdir.join("gfortran-8"))
assert clang["paths"] == {
"cc": str(compilers_dir / "clang"),
"cxx": str(compilers_dir / "clang++"),
"cc": str(clangdir.join("clang")),
"cxx": str(clangdir.join("clang++")),
# we only auto-detect mixed clang on macos
"f77": gfortran_path if sys.platform == "darwin" else None,
"fc": gfortran_path if sys.platform == "darwin" else None,
}
assert gcc["paths"] == {
"cc": str(compilers_dir / "gcc-8"),
"cxx": str(compilers_dir / "g++-8"),
"cc": str(clangdir.join("gcc-8")),
"cxx": str(clangdir.join("g++-8")),
"f77": gfortran_path,
"fc": gfortran_path,
}
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
@pytest.mark.regression("17590")
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
clang_path = compilers_dir / "clang"
shutil.copy(clang_path, clang_path.parent / "clang-gpu")
shutil.copy(clang_path, clang_path.parent / "clang++-gpu")
with clangdir.as_cwd():
shutil.copy("clang", "clang-gpu")
shutil.copy("clang++", "clang++-gpu")
os.chmod("clang-gpu", 0o700)
os.chmod("clang++-gpu", 0o700)
os.environ["PATH"] = str(compilers_dir)
os.environ["PATH"] = str(clangdir)
output = compiler("find", "--scope=site")
assert "clang@11.0.0" in output
@@ -206,38 +252,46 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
assert clang["paths"]["cc"] == str(clangdir.join("clang"))
assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
new_dir = compilers_dir / "first_in_path"
new_dir.mkdir()
for name in ("gcc-8", "g++-8", "gfortran-8"):
shutil.copy(compilers_dir / name, new_dir / name)
# Set PATH to have the new folder searched first
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
@pytest.mark.skipif(
sys.platform == "win32",
reason="Cannot execute bash \
script on Windows",
)
def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
"""Ensure that we find compilers that come first in the PATH first"""
with clangdir.as_cwd():
os.mkdir("first_in_path")
shutil.copy("gcc-8", "first_in_path/gcc-8")
shutil.copy("g++-8", "first_in_path/g++-8")
shutil.copy("gfortran-8", "first_in_path/gfortran-8")
# the first_in_path folder should be searched first
os.environ["PATH"] = "{0}:{1}".format(str(clangdir.join("first_in_path")), str(clangdir))
compiler("find", "--scope=site")
config = spack.compilers.get_compiler_config("site", False)
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
assert gcc["paths"] == {
"cc": str(new_dir / "gcc-8"),
"cxx": str(new_dir / "g++-8"),
"f77": str(new_dir / "gfortran-8"),
"fc": str(new_dir / "gfortran-8"),
"cc": str(clangdir.join("first_in_path", "gcc-8")),
"cxx": str(clangdir.join("first_in_path", "g++-8")),
"f77": str(clangdir.join("first_in_path", "gfortran-8")),
"fc": str(clangdir.join("first_in_path", "gfortran-8")),
}
def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
"""Spack should not automatically search for compilers when listing them and none are
available. And when stdout is not a tty like in tests, there should be no output and
no error exit code.
"""
os.environ["PATH"] = str(compilers_dir)
def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
# Spack should not automatically search for compilers when listing them and none
# are available. And when stdout is not a tty like in tests, there should be no
# output and no error exit code.
os.environ["PATH"] = str(clangdir)
out = compiler("list")
assert not out
assert compiler.returncode == 0

View File

@@ -32,7 +32,7 @@ def check_develop(self, env, spec, path=None):
assert dev_specs_entry["spec"] == str(spec)
# check yaml representation
yaml = env.manifest[ev.TOP_LEVEL_KEY]
yaml = ev.config_dict(env.manifest)
assert spec.name in yaml["develop"]
yaml_entry = yaml["develop"][spec.name]
assert yaml_entry["spec"] == str(spec)

View File

@@ -390,19 +390,6 @@ def test_remove_after_concretize():
assert not any(s.name == "mpileaks" for s in env_specs)
def test_remove_before_concretize():
e = ev.create("test")
e.unify = True
e.add("mpileaks")
e.concretize()
e.remove("mpileaks")
e.concretize()
assert not list(e.concretized_specs())
def test_remove_command():
env("create", "test")
assert "test" in env("list")
@@ -2688,7 +2675,7 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
spec = e.specs_by_hash[e.concretized_order[0]]
view_prefix = e.default_view.get_projection_for_spec(spec)
modules_glob = "%s/modules/**/*/*" % e.path
modules_glob = "%s/modules/**/*" % e.path
modules = glob.glob(modules_glob)
assert len(modules) == 1
module = modules[0]
@@ -2724,12 +2711,12 @@ def test_multiple_modules_post_env_hook(environment_from_manifest, install_mocke
spec = e.specs_by_hash[e.concretized_order[0]]
view_prefix = e.default_view.get_projection_for_spec(spec)
modules_glob = "%s/modules/**/*/*" % e.path
modules_glob = "%s/modules/**/*" % e.path
modules = glob.glob(modules_glob)
assert len(modules) == 1
module = modules[0]
full_modules_glob = "%s/full_modules/**/*/*" % e.path
full_modules_glob = "%s/full_modules/**/*" % e.path
full_modules = glob.glob(full_modules_glob)
assert len(full_modules) == 1
full_module = full_modules[0]

View File

@@ -44,8 +44,9 @@ def define_plat_exe(exe):
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
executables_found({str(cmake_path): define_plat_exe("cmake")})
executables_found(
{mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
)
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
@@ -70,7 +71,7 @@ def test_find_external_two_instances_same_package(
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
)
cmake_exe = define_plat_exe("cmake")
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
@@ -106,7 +107,7 @@ def test_get_executables(working_env, mock_executable):
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
path_to_exe = spack.detection.executables_in_path([os.path.dirname(cmake_path1)])
cmake_exe = define_plat_exe("cmake")
assert path_to_exe[str(cmake_path1)] == cmake_exe
assert path_to_exe[cmake_path1] == cmake_exe
external = SpackCommand("external")
@@ -333,7 +334,7 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
assert "extra_attributes" in external_gcc
extra_attributes = external_gcc["extra_attributes"]
assert "prefix" not in extra_attributes
assert extra_attributes["compilers"]["c"] == str(gcc_exe)
assert extra_attributes["compilers"]["c"] == gcc_exe
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):

View File

@@ -3,12 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import pytest
from spack.main import SpackCommand, SpackCommandError
graph = SpackCommand("graph")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
@pytest.mark.usefixtures("mock_packages", "database")

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import sys
import pytest
@@ -12,6 +13,8 @@
info = SpackCommand("info")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
@pytest.fixture(scope="module")
def parser():

View File

@@ -966,7 +966,7 @@ def test_compiler_bootstrap_from_binary_mirror(
install("gcc@=10.2.0")
# Put installed compiler in the buildcache
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@10.2.0")
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
# Now uninstall the compiler
uninstall("-y", "gcc@10.2.0")
@@ -1138,7 +1138,7 @@ def install_use_buildcache(opt):
# Populate the buildcache
install(package_name)
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, package_name, dependency_name)
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, package_name, dependency_name)
# Uninstall the all of the packages for clean slate
uninstall("-y", "-a")

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import pytest
import spack.store
@@ -13,6 +15,8 @@
install = SpackCommand("install")
uninstall = SpackCommand("uninstall")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
def test_mark_mode_required(mutable_database):

View File

@@ -235,7 +235,7 @@ def test_mirror_destroy(
# Put a binary package in a buildcache
install("--no-cache", spec_name)
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
contents = os.listdir(mirror_dir.strpath)
assert "build_cache" in contents
@@ -245,7 +245,7 @@ def test_mirror_destroy(
assert not os.path.exists(mirror_dir.strpath)
buildcache("push", "-u", "-a", "-f", mirror_dir.strpath, spec_name)
buildcache("push", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
contents = os.listdir(mirror_dir.strpath)
assert "build_cache" in contents

View File

@@ -157,7 +157,7 @@ def _parse_types(string):
def test_spec_deptypes_nodes():
output = spec("--types", "--cover", "nodes", "--no-install-status", "dt-diamond")
output = spec("--types", "--cover", "nodes", "dt-diamond")
types = _parse_types(output)
assert types["dt-diamond"] == [" "]
@@ -167,7 +167,7 @@ def test_spec_deptypes_nodes():
def test_spec_deptypes_edges():
output = spec("--types", "--cover", "edges", "--no-install-status", "dt-diamond")
output = spec("--types", "--cover", "edges", "dt-diamond")
types = _parse_types(output)
assert types["dt-diamond"] == [" "]

View File

@@ -337,6 +337,8 @@ def test_compiler_flags_differ_identical_compilers(self):
# Get the compiler that matches the spec (
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
# Clear cache for compiler config since it has its own cache mechanism outside of config
spack.compilers._cache_config_file = []
# Configure spack to have two identical compilers with different flags
default_dict = spack.compilers._to_dict(compiler)
@@ -2135,7 +2137,7 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
{
"compiler": {
"spec": "gcc@foo",
"paths": {"cc": str(gcc_path), "cxx": str(gcc_path), "f77": None, "fc": None},
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
"operating_system": "debian6",
"modules": [],
}
@@ -2170,14 +2172,3 @@ def test_concretization_with_compilers_supporting_target_any(self):
with spack.config.override("compilers", compiler_configuration):
s = spack.spec.Spec("a").concretized()
assert s.satisfies("%gcc@12.1.0")
@pytest.mark.parametrize("spec_str", ["mpileaks", "mpileaks ^mpich"])
def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretization):
"""Tests that information on virtuals is annotated on DAG edges"""
spec = default_mock_concretization(spec_str)
mpi_provider = spec["mpi"].name
edges = spec.edges_to_dependencies(name=mpi_provider)
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
edges = spec.edges_to_dependencies(name="callpath")
assert len(edges) == 1 and edges[0].virtuals == ()

View File

@@ -1669,21 +1669,22 @@ def clear_directive_functions():
@pytest.fixture
def mock_executable(tmp_path):
def mock_executable(tmpdir):
"""Factory to create a mock executable in a temporary directory that
output a custom string when run.
"""
import jinja2
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
def _factory(name, output, subdir=("bin",)):
executable_dir = tmp_path.joinpath(*subdir)
executable_dir.mkdir(parents=True, exist_ok=True)
executable_path = executable_dir / name
f = tmpdir.ensure(*subdir, dir=True).join(name)
if sys.platform == "win32":
executable_path = executable_dir / (name + ".bat")
executable_path.write_text(f"{ shebang }{ output }\n")
executable_path.chmod(0o755)
return executable_path
f += ".bat"
t = jinja2.Template("{{ shebang }}{{ output }}\n")
f.write(t.render(shebang=shebang, output=output))
f.chmod(0o755)
return str(f)
return _factory

View File

@@ -167,37 +167,6 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
@pytest.mark.regression("11355")
def test_manpath_setup(self, modulefile_content, module_configuration):
"""Tests specific setup of MANPATH environment variable."""
module_configuration("autoload_direct")
# no manpath set by module
content = modulefile_content("mpileaks")
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
# manpath set by module with prepend_path
content = modulefile_content("module-manpath-prepend")
assert (
len([x for x in content if 'prepend_path("MANPATH", "/path/to/man", ":")' in x]) == 1
)
assert (
len([x for x in content if 'prepend_path("MANPATH", "/path/to/share/man", ":")' in x])
== 1
)
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
# manpath set by module with append_path
content = modulefile_content("module-manpath-append")
assert len([x for x in content if 'append_path("MANPATH", "/path/to/man", ":")' in x]) == 1
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 1
# manpath set by module with setenv
content = modulefile_content("module-manpath-setenv")
assert len([x for x in content if 'setenv("MANPATH", "/path/to/man")' in x]) == 1
assert len([x for x in content if 'append_path("MANPATH", "", ":")' in x]) == 0
def test_help_message(self, modulefile_content, module_configuration):
"""Tests the generation of module help message."""

View File

@@ -37,11 +37,6 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
module_configuration("autoload_direct")
content = modulefile_content(mpileaks_spec_string)
assert (
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
== 1
)
assert len([x for x in content if "depends-on " in x]) == 2
assert len([x for x in content if "module load " in x]) == 2
# dtbuild1 has
@@ -51,11 +46,6 @@ def test_autoload_direct(self, modulefile_content, module_configuration):
# Just make sure the 'build' dependency is not there
content = modulefile_content("dtbuild1")
assert (
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
== 1
)
assert len([x for x in content if "depends-on " in x]) == 2
assert len([x for x in content if "module load " in x]) == 2
# The configuration file sets the verbose keyword to False
@@ -68,11 +58,6 @@ def test_autoload_all(self, modulefile_content, module_configuration):
module_configuration("autoload_all")
content = modulefile_content(mpileaks_spec_string)
assert (
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
== 1
)
assert len([x for x in content if "depends-on " in x]) == 5
assert len([x for x in content if "module load " in x]) == 5
# dtbuild1 has
@@ -82,11 +67,6 @@ def test_autoload_all(self, modulefile_content, module_configuration):
# Just make sure the 'build' dependency is not there
content = modulefile_content("dtbuild1")
assert (
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
== 1
)
assert len([x for x in content if "depends-on " in x]) == 2
assert len([x for x in content if "module load " in x]) == 2
def test_prerequisites_direct(self, modulefile_content, module_configuration):
@@ -123,7 +103,6 @@ def test_alter_environment(self, modulefile_content, module_configuration):
assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
assert len([x for x in content if "unsetenv BAR" in x]) == 0
assert len([x for x in content if "depends-on foo/bar" in x]) == 1
assert len([x for x in content if "module load foo/bar" in x]) == 1
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
@@ -142,46 +121,6 @@ def test_prepend_path_separator(self, modulefile_content, module_configuration):
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
@pytest.mark.regression("11355")
def test_manpath_setup(self, modulefile_content, module_configuration):
"""Tests specific setup of MANPATH environment variable."""
module_configuration("autoload_direct")
# no manpath set by module
content = modulefile_content("mpileaks")
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
# manpath set by module with prepend-path
content = modulefile_content("module-manpath-prepend")
assert (
len([x for x in content if 'prepend-path --delim ":" MANPATH "/path/to/man"' in x])
== 1
)
assert (
len(
[
x
for x in content
if 'prepend-path --delim ":" MANPATH "/path/to/share/man"' in x
]
)
== 1
)
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
# manpath set by module with append-path
content = modulefile_content("module-manpath-append")
assert (
len([x for x in content if 'append-path --delim ":" MANPATH "/path/to/man"' in x]) == 1
)
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 1
# manpath set by module with setenv
content = modulefile_content("module-manpath-setenv")
assert len([x for x in content if 'setenv MANPATH "/path/to/man"' in x]) == 1
assert len([x for x in content if 'append-path --delim ":" MANPATH ""' in x]) == 0
def test_help_message(self, modulefile_content, module_configuration):
"""Tests the generation of module help message."""
@@ -455,16 +394,10 @@ def test_autoload_with_constraints(self, modulefile_content, module_configuratio
# Test the mpileaks that should have the autoloaded dependencies
content = modulefile_content("mpileaks ^mpich2")
assert len([x for x in content if "depends-on " in x]) == 2
assert len([x for x in content if "module load " in x]) == 2
# Test the mpileaks that should NOT have the autoloaded dependencies
content = modulefile_content("mpileaks ^mpich")
assert (
len([x for x in content if "if {![info exists ::env(LMOD_VERSION_MAJOR)]} {" in x])
== 0
)
assert len([x for x in content if "depends-on " in x]) == 0
assert len([x for x in content if "module load " in x]) == 0
def test_modules_no_arch(self, factory, module_configuration):

View File

@@ -100,7 +100,7 @@ def test_buildcache(mock_archive, tmpdir):
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
create_args = ["create", "-a", "-f", mirror_path, pkghash]
create_args = ["create", "-a", "-f", "-d", mirror_path, pkghash]
# Create a private key to sign package with if gpg2 available
spack.util.gpg.create(
name="test key 1", expires="0", email="spack@googlegroups.com", comment="Spack test key"
@@ -116,7 +116,7 @@ def test_buildcache(mock_archive, tmpdir):
# Uninstall the package
pkg.do_uninstall(force=True)
install_args = ["install", "-f", pkghash]
install_args = ["install", "-a", "-f", pkghash]
args = parser.parse_args(install_args)
# Test install
buildcache.buildcache(parser, args)
@@ -131,6 +131,30 @@ def test_buildcache(mock_archive, tmpdir):
assert buildinfo["relocate_textfiles"] == ["dummy.txt"]
assert buildinfo["relocate_links"] == ["link_to_dummy.txt"]
# create build cache with relative path
create_args.insert(create_args.index("-a"), "-f")
create_args.insert(create_args.index("-a"), "-r")
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
# Uninstall the package
pkg.do_uninstall(force=True)
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
# test overwrite install
install_args.insert(install_args.index("-a"), "-f")
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
files = os.listdir(spec.prefix)
assert "link_to_dummy.txt" in files
assert "dummy.txt" in files
# assert os.path.realpath(
# os.path.join(spec.prefix, 'link_to_dummy.txt')
# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
args = parser.parse_args(["keys"])
buildcache.buildcache(parser, args)

View File

@@ -62,7 +62,7 @@ def source_file(tmpdir, is_relocatable):
src = tmpdir.join("relocatable.c")
shutil.copy(template_src, str(src))
else:
template_dirs = (os.path.join(spack.paths.test_path, "data", "templates"),)
template_dirs = [os.path.join(spack.paths.test_path, "data", "templates")]
env = spack.tengine.make_environment(template_dirs)
template = env.get_template("non_relocatable.c")
text = template.render({"prefix": spack.store.layout.root})
@@ -173,6 +173,14 @@ def test_ensure_binary_is_relocatable(source_file, is_relocatable):
assert relocatable == is_relocatable
@pytest.mark.requires_executables("patchelf", "strings", "file")
@skip_unless_linux
def test_patchelf_is_relocatable():
patchelf = os.path.realpath(spack.relocate._patchelf())
assert llnl.util.filesystem.is_exe(patchelf)
spack.relocate.ensure_binary_is_relocatable(patchelf)
@skip_unless_linux
def test_ensure_binary_is_relocatable_errors(tmpdir):
# The file passed in as argument must exist...
@@ -233,6 +241,30 @@ def test_normalize_relative_paths(start_path, relative_paths, expected):
assert normalized == expected
def test_set_elf_rpaths(mock_patchelf):
# Try to relocate a mock version of patchelf and check
# the call made to patchelf itself
patchelf = mock_patchelf("echo $@")
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
# Assert that the arguments of the call to patchelf are as expected
assert "--force-rpath" in output
assert "--set-rpath " + ":".join(rpaths) in output
assert patchelf in output
@skip_unless_linux
def test_set_elf_rpaths_warning(mock_patchelf):
# Mock a failing patchelf command and ensure it warns users
patchelf = mock_patchelf("exit 1")
rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
# To avoid using capfd in order to check if the warning was triggered
# here we just check that output is not set
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
assert output is None
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_relocate_text_bin(binary_with_rpaths, prefix_like):

View File

@@ -125,7 +125,7 @@ def _mock_installed(self):
# use the installed C. It should *not* force A to use the installed D
# *if* we're doing a fresh installation.
a_spec = Spec(a)
a_spec._add_dependency(c_spec, deptypes=("build", "link"), virtuals=())
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
a_spec.concretize()
assert spack.version.Version("2") == a_spec[c][d].version
assert spack.version.Version("2") == a_spec[e].version
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
a_spec = Spec("a")
a_spec._add_dependency(b_spec, deptypes=("build", "link"), virtuals=())
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
a_spec.concretize()
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
@@ -989,9 +989,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
link_run_spec = Spec("c@=1.0").concretized()
build_spec = Spec("c@=2.0").concretized()
root.add_dependency_edge(link_run_spec, deptypes="link", virtuals=())
root.add_dependency_edge(link_run_spec, deptypes="run", virtuals=())
root.add_dependency_edge(build_spec, deptypes="build", virtuals=())
root.add_dependency_edge(link_run_spec, deptypes="link")
root.add_dependency_edge(link_run_spec, deptypes="run")
root.add_dependency_edge(build_spec, deptypes="build")
# Check dependencies from the perspective of root
assert len(root.dependencies()) == 2
@@ -1017,7 +1017,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
root = Spec("b@=2.0").concretized()
bootstrap = Spec("b@=1.0").concretized()
root.add_dependency_edge(bootstrap, deptypes="build", virtuals=())
root.add_dependency_edge(bootstrap, deptypes="build")
assert len(root.dependencies()) == 1
assert root.dependencies()[0].name == "b"
@@ -1036,7 +1036,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
bootstrap = Spec("b@=1.0").concretized()
for current_deptype in ("build", "link", "run"):
root.add_dependency_edge(bootstrap, deptypes=current_deptype, virtuals=())
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
# Check edges in dependencies
assert len(root.edges_to_dependencies()) == 1
@@ -1063,9 +1063,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
c1 = Spec("b@=1.0").concretized()
c2 = Spec("b@=2.0").concretized()
p.add_dependency_edge(c1, deptypes=c1_deptypes, virtuals=())
p.add_dependency_edge(c1, deptypes=c1_deptypes)
with pytest.raises(spack.error.SpackError):
p.add_dependency_edge(c2, deptypes=c2_deptypes, virtuals=())
p.add_dependency_edge(c2, deptypes=c2_deptypes)
@pytest.mark.regression("33499")
@@ -1084,16 +1084,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
z3_flavor_1 = Spec("z3 +through_a1")
z3_flavor_2 = Spec("z3 +through_z1")
root.add_dependency_edge(a1, deptypes=("build", "run", "test"), virtuals=())
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
# unique package as a dep of a build/run/test type dep.
a1.add_dependency_edge(a2, deptypes="all", virtuals=())
a1.add_dependency_edge(z3_flavor_1, deptypes="all", virtuals=())
a1.add_dependency_edge(a2, deptypes="all")
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
# chain of link type deps root -> z1 -> z2 -> z3
root.add_dependency_edge(z1, deptypes="link", virtuals=())
z1.add_dependency_edge(z2, deptypes="link", virtuals=())
z2.add_dependency_edge(z3_flavor_2, deptypes="link", virtuals=())
root.add_dependency_edge(z1, deptypes="link")
z1.add_dependency_edge(z2, deptypes="link")
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
# Indexing should prefer the link-type dep.
assert "through_z1" in root["z3"].variants

View File

@@ -660,7 +660,6 @@ def test_spec_formatting(self, default_mock_concretization):
("{architecture.os}", "", "os", lambda spec: spec.architecture),
("{architecture.target}", "", "target", lambda spec: spec.architecture),
("{prefix}", "", "prefix", lambda spec: spec),
("{external}", "", "external", lambda spec: spec), # test we print "False"
]
hash_segments = [
@@ -971,7 +970,7 @@ def test_error_message_unknown_variant(self):
def test_satisfies_dependencies_ordered(self):
d = Spec("zmpi ^fake")
s = Spec("mpileaks")
s._add_dependency(d, deptypes=(), virtuals=())
s._add_dependency(d, deptypes=())
assert s.satisfies("mpileaks ^zmpi ^fake")
@pytest.mark.parametrize("transitive", [True, False])
@@ -1018,7 +1017,6 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
def test_malformed_spec_dict():
# FIXME: This test was really testing the specific implementation with an ad-hoc test
with pytest.raises(SpecError, match="malformed"):
Spec.from_dict(
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
@@ -1026,7 +1024,6 @@ def test_malformed_spec_dict():
def test_spec_dict_hashless_dep():
# FIXME: This test was really testing the specific implementation with an ad-hoc test
with pytest.raises(SpecError, match="Couldn't parse"):
Spec.from_dict(
{
@@ -1120,7 +1117,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
# add it to an abstract spec as a dependency
top = Spec("dt-diamond")
top.add_dependency_edge(bottom, deptypes=(), virtuals=())
top.add_dependency_edge(bottom, deptypes=())
# concretize with the already-concrete dependency
top.concretize()

View File

@@ -43,6 +43,12 @@ def check_json_round_trip(spec):
assert spec.eq_dag(spec_from_json)
def test_simple_spec():
spec = Spec("mpileaks")
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_read_spec_from_signed_json():
spec_dir = os.path.join(spack.paths.test_path, "data", "mirrors", "signed_json")
file_name = (
@@ -64,6 +70,13 @@ def check_spec(spec_to_check):
check_spec(s)
def test_normal_spec(mock_packages):
spec = Spec("mpileaks+debug~opt")
spec.normalize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
@pytest.mark.parametrize(
"invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
)
@@ -82,28 +95,37 @@ def test_invalid_json_spec(invalid_json, error_message):
assert error_message in exc_msg
@pytest.mark.parametrize(
"abstract_spec",
[
# Externals
"externaltool",
"externaltest",
# Ambiguous version spec
"mpileaks@1.0:5.0,6.1,7.3+debug~opt",
# Variants
"mpileaks+debug~opt",
'multivalue-variant foo="bar,baz"',
# Virtuals on edges
"callpath",
"mpileaks",
],
)
def test_roundtrip_concrete_specs(abstract_spec, default_mock_concretization):
check_yaml_round_trip(Spec(abstract_spec))
check_json_round_trip(Spec(abstract_spec))
concrete_spec = default_mock_concretization(abstract_spec)
check_yaml_round_trip(concrete_spec)
check_json_round_trip(concrete_spec)
def test_external_spec(config, mock_packages):
spec = Spec("externaltool")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
spec = Spec("externaltest")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_ambiguous_version_spec(mock_packages):
spec = Spec("mpileaks@1.0:5.0,6.1,7.3+debug~opt")
spec.normalize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_concrete_spec(config, mock_packages):
spec = Spec("mpileaks+debug~opt")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_yaml_multivalue(config, mock_packages):
spec = Spec('multivalue-variant foo="bar,baz"')
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_yaml_subdag(config, mock_packages):
@@ -484,8 +506,6 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
# Add properties on edges, see https://github.com/spack/spack/pull/34821
("specfiles/hdf5.v020.json.gz", "vlirlcgazhvsvtundz4kug75xkkqqgou", spack.spec.SpecfileV4),
],
)
def test_load_json_specfiles(specfile, expected_hash, reader_cls):

View File

@@ -71,7 +71,7 @@ def test_template_retrieval(self):
"""Tests the template retrieval mechanism hooked into config files"""
# Check the directories are correct
template_dirs = spack.config.get("config:template_dirs")
template_dirs = tuple([canonicalize_path(x) for x in template_dirs])
template_dirs = [canonicalize_path(x) for x in template_dirs]
assert len(template_dirs) == 3
env = tengine.make_environment(template_dirs)

View File

@@ -441,30 +441,6 @@ def test_write_tested_status(
assert TestStatus(status) == expected
@pytest.mark.regression("37840")
def test_write_tested_status_no_repeats(
tmpdir, install_mockery_mutable_config, mock_fetch, mock_test_stage
):
"""Emulate re-running the same stand-alone tests a second time."""
s = spack.spec.Spec("trivial-smoke-test").concretized()
pkg = s.package
statuses = [TestStatus.PASSED, TestStatus.PASSED]
for i, status in enumerate(statuses):
pkg.tester.test_parts[f"test_{i}"] = status
pkg.tester.counts[status] += 1
pkg.tester.tested_file = tmpdir.join("test-log.txt")
pkg.tester.write_tested_status()
pkg.tester.write_tested_status()
# The test should NOT result in a ValueError: invalid literal for int()
# with base 10: '2\n2' (i.e., the results being appended instead of
# written to the file).
with open(pkg.tester.tested_file, "r") as f:
status = int(f.read().strip("\n"))
assert TestStatus(status) == TestStatus.PASSED
def test_check_special_outputs(tmpdir):
"""This test covers two related helper methods"""
contents = """CREATE TABLE packages (

View File

@@ -19,7 +19,7 @@ def create_dag(nodes, edges):
"""
specs = {name: Spec(name) for name in nodes}
for parent, child, deptypes in edges:
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes, virtuals=())
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
return specs

View File

@@ -119,10 +119,7 @@ def test_dump_environment(prepare_environment_for_tests, tmpdir):
dumpfile_path = str(tmpdir.join("envdump.txt"))
envutil.dump_environment(dumpfile_path)
with open(dumpfile_path, "r") as dumpfile:
if sys.platform == "win32":
assert 'set "TEST_ENV_VAR={}"\n'.format(test_paths) in list(dumpfile)
else:
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
def test_reverse_environment_modifications(working_env):

View File

@@ -337,15 +337,15 @@ def test_remove_complex_package_logic_filtered():
("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
# has @when("@4.1.0") and raw unicode literals
("mfem", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
("mfem@4.0.0", "lbhr43gm5zdye2yhqznucxb4sg6vhryl"),
("mfem@4.1.0", "vjdjdgjt6nyo7ited2seki5epggw5gza"),
("mfem", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
("mfem@4.0.0", "qtneutm6khd6epd2rhyuv2y6zavsxbed"),
("mfem@4.1.0", "uit2ydzhra3b2mlvnq262qlrqqmuwq3d"),
# has @when("@1.5.0:")
("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
# has a print with multiple arguments
("legion", "efpfd2c4pzhsbyc3o7plqcmtwm6b57yh"),
("legion", "sffy6vz3dusxnxeetofoomlaieukygoj"),
# has nested `with when()` blocks and loops
("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
],

View File

@@ -935,7 +935,7 @@ def test_inclusion_upperbound():
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_git_version_repo_attached_after_serialization(
mock_git_version_info, mock_packages, config, monkeypatch
mock_git_version_info, mock_packages, monkeypatch
):
"""Test that a GitVersion instance can be serialized and deserialized
without losing its repository reference.
@@ -954,9 +954,7 @@ def test_git_version_repo_attached_after_serialization(
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_resolved_git_version_is_shown_in_str(
mock_git_version_info, mock_packages, config, monkeypatch
):
def test_resolved_git_version_is_shown_in_str(mock_git_version_info, mock_packages, monkeypatch):
"""Test that a GitVersion from a commit without a user supplied version is printed
as <hash>=<version>, and not just <hash>."""
repo_path, _, commits = mock_git_version_info
@@ -970,7 +968,7 @@ def test_resolved_git_version_is_shown_in_str(
assert str(spec.version) == f"{commit}=1.0-git.1"
def test_unresolvable_git_versions_error(config, mock_packages):
def test_unresolvable_git_versions_error(mock_packages):
"""Test that VersionLookupError is raised when a git prop is not set on a package."""
with pytest.raises(VersionLookupError):
# The package exists, but does not have a git property set. When dereferencing

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import os
import sys
import pytest
@@ -33,6 +34,7 @@ def _create_url(relative_url):
root_with_fragment = _create_url("index_with_fragment.html")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.parametrize(
"depth,expected_found,expected_not_found,expected_text",
[
@@ -97,17 +99,20 @@ def test_spider_no_response(monkeypatch):
assert not pages and not links
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_0():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=0)
assert Version("0.0.0") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_1():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=1)
assert Version("0.0.0") in versions
assert Version("1.0.0") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_2():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
assert Version("0.0.0") in versions
@@ -115,12 +120,14 @@ def test_find_versions_of_archive_2():
assert Version("2.0.0") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_exotic_versions_of_archive_2():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
# up for grabs to make this better.
assert Version("2.0.0b2") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_3():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
assert Version("0.0.0") in versions
@@ -130,6 +137,7 @@ def test_find_versions_of_archive_3():
assert Version("4.5") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_exotic_versions_of_archive_3():
versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
assert Version("2.0.0b2") in versions
@@ -137,6 +145,7 @@ def test_find_exotic_versions_of_archive_3():
assert Version("4.5-rc5") in versions
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_with_fragment():
versions = spack.util.web.find_versions_of_archive(
root_tarball, root_with_fragment, list_depth=0
@@ -197,6 +206,7 @@ def test_etag_parser():
assert spack.util.web.parse_etag("abc def") is None
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_list_url(tmpdir):
testpath = str(tmpdir)
testpath_url = url_util.path_to_file_url(testpath)

Some files were not shown because too many files have changed in this diff Show More