Compare commits

..

1 Commits

Author SHA1 Message Date
Todd Gamblin
5d961e8ed2 concretizer: move remove_node transform into spec_clauses
`remove_node` is used in almost all calls to `condition()`, but it's removing something
we could just not add in the first place. The `node` and `virtual_node` attrs are only
needed in certain places, so annotate calls to `spec_clauses` there instead of stripping
them after it's called.

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2024-12-09 00:47:30 -08:00
563 changed files with 6541 additions and 13760 deletions

View File

@@ -66,7 +66,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
with:
name: coverage-audits-${{ matrix.system.os }}

View File

@@ -161,7 +161,11 @@ jobs:
source share/spack/setup-env.sh
spack -d gpg list
tree $HOME/.spack/bootstrap/store/
- name: Bootstrap File
run: |
source share/spack/setup-env.sh
spack -d python share/spack/qa/bootstrap-file.py
tree $HOME/.spack/bootstrap/store/
windows:
runs-on: "windows-latest"
@@ -192,3 +196,9 @@ jobs:
spack -d gpg list
./share/spack/qa/validate_last_exit.ps1
tree $env:userprofile/.spack/bootstrap/store/
- name: Bootstrap File
run: |
./share/spack/setup-env.ps1
spack -d python share/spack/qa/bootstrap-file.py
./share/spack/qa/validate_last_exit.ps1
tree $env:userprofile/.spack/bootstrap/store/

View File

@@ -94,7 +94,7 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles
@@ -103,7 +103,7 @@ jobs:
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
- name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
@@ -133,7 +133,7 @@ jobs:
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: dockerfiles
pattern: dockerfiles_*

View File

@@ -32,4 +32,4 @@ jobs:
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
with:
verbose: true
fail_ci_if_error: false
fail_ci_if_error: true

View File

@@ -15,17 +15,17 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- python-version: '3.6'
os: ubuntu-20.04
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.7'
os: ubuntu-22.04
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-latest
on_develop: false
- python-version: '3.8'
os: ubuntu-latest
on_develop: false
@@ -52,13 +52,7 @@ jobs:
# Needed for unit tests
sudo apt-get -y install \
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
cmake bison libbison-dev subversion
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
- name: Set up Homebrew
id: set-up-homebrew
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
- name: Install kcov with brew
run: "brew install kcov"
cmake bison libbison-dev kcov
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
@@ -86,7 +80,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
path: coverage
@@ -105,13 +99,7 @@ jobs:
run: |
sudo apt-get -y update
# Needed for shell tests
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
- name: Set up Homebrew
id: set-up-homebrew
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
- name: Install kcov with brew
run: "brew install kcov"
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
@@ -125,7 +113,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-shell
path: coverage
@@ -140,13 +128,13 @@ jobs:
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory '*'
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
@@ -187,7 +175,7 @@ jobs:
spack bootstrap status
spack solve zlib
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-clingo-cffi
path: coverage
@@ -225,7 +213,7 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
path: coverage
@@ -256,7 +244,7 @@ jobs:
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: coverage-windows
path: coverage

View File

@@ -13,7 +13,8 @@ concurrency:
jobs:
# Validate that the code can be run on all the Python versions supported by Spack
# Validate that the code can be run on all the Python versions
# supported by Spack
validate:
runs-on: ubuntu-latest
steps:
@@ -73,7 +74,7 @@ jobs:
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory '*'
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
useradd spack-test
@@ -86,7 +87,6 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
@@ -121,46 +121,28 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
- name: Import cycles before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
run: make SPACK_ROOT=../old && cp solution solution.old
- name: Import cycles after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: '3.13'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pylint
- name: Pylint (Spack Core)
run: |
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib

View File

@@ -102,6 +102,6 @@ PackageName: sbang
PackageHomePage: https://github.com/spack/sbang
PackageLicenseDeclared: Apache-2.0 OR MIT
PackageName: typing_extensions
PackageHomePage: https://pypi.org/project/typing-extensions/
PackageLicenseDeclared: Python-2.0
PackageName: six
PackageHomePage: https://pypi.python.org/pypi/six
PackageLicenseDeclared: MIT

View File

@@ -194,12 +194,6 @@ config:
# executables with many dependencies, in particular on slow filesystems.
bind: false
# Controls the handling of missing dynamic libraries after installation.
# Options are ignore (default), warn, or error. If set to error, the
# installation fails if installed binaries reference dynamic libraries that
# are not found in their specified rpaths.
missing_library_policy: ignore
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
# manipulation by unprivileged user (e.g. AFS)

View File

@@ -265,30 +265,25 @@ infrastructure, or to cache Spack built binaries in Github Actions and
GitLab CI.
To get started, configure an OCI mirror using ``oci://`` as the scheme,
and optionally specify variables that hold the username and password (or
personal access token) for the registry:
and optionally specify a username and password (or personal access token):
.. code-block:: console
$ spack mirror add --oci-username-variable REGISTRY_USER \
--oci-password-variable REGISTRY_TOKEN \
my_registry oci://example.com/my_image
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
Spack follows the naming conventions of Docker, with Dockerhub as the default
registry. To use Dockerhub, you can omit the registry domain:
.. code-block:: console
$ spack mirror add ... my_registry oci://username/my_image
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
From here, you can use the mirror as any other build cache:
.. code-block:: console
$ export REGISTRY_USER=...
$ export REGISTRY_TOKEN=...
$ spack buildcache push my_registry <specs...> # push to the registry
$ spack install <specs...> # or install from the registry
$ spack install <specs...> # install from the registry
A unique feature of buildcaches on top of OCI registries is that it's incredibly
easy to generate get a runnable container image with the binaries installed. This

View File

@@ -25,14 +25,6 @@ QMake does not appear to have a standardized way of specifying
the installation directory, so you may have to set environment
variables or edit ``*.pro`` files to get things working properly.
QMake packages will depend on the virtual ``qmake`` package which
is provided by multiple versions of Qt: ``qt`` provides Qt up to
Qt5, and ``qt-base`` provides Qt from version Qt6 onwards. This
split was motivated by the desire to split the single Qt package
into its components to allow for more fine-grained installation.
To depend on a specific version, refer to the documentation on
:ref:`virtual-dependencies`.
^^^^^^
Phases
^^^^^^

View File

@@ -38,11 +38,9 @@ just have to configure and OCI registry and run ``spack buildcache push``.
spack -e . install
# Configure the registry
spack -e . mirror add --oci-username-variable REGISTRY_USER \
--oci-password-variable REGISTRY_TOKEN \
container-registry oci://example.com/name/image
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
# Push the image
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
The resulting container image can then be run as follows:

View File

@@ -178,8 +178,8 @@ Spec-related modules
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
of specs.
:mod:`spack.spec_parser`
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
:mod:`spack.parser`
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
:mod:`spack.version`
Implements a simple :class:`~spack.version.Version` class with simple

View File

@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
.. code-block:: console
apt update
apt install bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
.. tab-item:: RHEL
@@ -148,22 +148,20 @@ The first time you concretize a spec, Spack will bootstrap automatically:
--------------------------------
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
The default bootstrap behavior is to use pre-built binaries. You can verify the
active bootstrap repositories with:
.. command-output:: spack bootstrap list
If for security concerns you cannot bootstrap ``clingo`` from pre-built
binaries, you have to disable fetching the binaries we generated with Github Actions.
.. code-block:: console
$ spack bootstrap disable github-actions-v0.6
==> "github-actions-v0.6" is now disabled and will not be used for bootstrapping
$ spack bootstrap disable github-actions-v0.5
==> "github-actions-v0.5" is now disabled and will not be used for bootstrapping
$ spack bootstrap disable github-actions-v0.4
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
$ spack bootstrap disable github-actions-v0.3
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
You can verify that the new settings are effective with:
.. command-output:: spack bootstrap list
You can verify that the new settings are effective with ``spack bootstrap list``.
.. note::

View File

@@ -5137,7 +5137,7 @@ other checks.
- Not applicable
* - :ref:`PythonPackage <pythonpackage>`
- Not applicable
- ``test_imports`` (module imports)
- ``test`` (module imports)
* - :ref:`QMakePackage <qmakepackage>`
- ``check`` (``make check``)
- Not applicable
@@ -5146,7 +5146,7 @@ other checks.
- Not applicable
* - :ref:`SIPPackage <sippackage>`
- Not applicable
- ``test_imports`` (module imports)
- ``test`` (module imports)
* - :ref:`WafPackage <wafpackage>`
- ``build_test`` (must be overridden)
- ``install_test`` (must be overridden)

View File

@@ -1,11 +1,11 @@
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.18.0
urllib3==2.3.0
urllib3==2.2.3
pytest==8.3.4
isort==5.13.2
black==24.10.0

View File

@@ -8,6 +8,7 @@ unzip, , , Compress/Decompress archives
bzip2, , , Compress/Decompress archives
xz, , , Compress/Decompress archives
zstd, , Optional, Compress/Decompress archives
file, , , Create/Use Buildcaches
lsb-release, , , Linux: identify operating system version
gnupg2, , , Sign/Verify Buildcaches
git, , , Manage Software Repositories
1 Name Supported Versions Notes Requirement Reason
8 bzip2 Compress/Decompress archives
9 xz Compress/Decompress archives
10 zstd Optional Compress/Decompress archives
11 file Create/Use Buildcaches
12 lsb-release Linux: identify operating system version
13 gnupg2 Sign/Verify Buildcaches
14 git Manage Software Repositories

View File

@@ -1,254 +0,0 @@
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations (now Zope
Corporation, see http://www.zope.com). In 2001, the Python Software
Foundation (PSF, see http://www.python.org/psf/) was formed, a
non-profit organization created specifically to own Python-related
Intellectual Property. Zope Corporation is a sponsoring member of
the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2 and above 2.1.1 2001-now PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
retained in Python alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
from typing_extensions import *

View File

@@ -8,4 +8,3 @@ six==1.16.0
macholib==1.16.2
altgraph==0.17.3
ruamel.yaml==0.17.21
typing_extensions==4.1.1

View File

@@ -66,7 +66,7 @@ def _is_url(path_or_url: str) -> bool:
return result
def _system_path_filter(_func=None, arg_slice: Optional[slice] = None):
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
"""Filters function arguments to account for platform path separators.
Optional slicing range can be specified to select specific arguments
@@ -100,16 +100,6 @@ def path_filter_caller(*args, **kwargs):
return holder_func
def _noop_decorator(_func=None, arg_slice: Optional[slice] = None):
return _func if _func else lambda x: x
if sys.platform == "win32":
system_path_filter = _system_path_filter
else:
system_path_filter = _noop_decorator
def sanitize_win_longpath(path: str) -> str:
"""Strip Windows extended path prefix from strings
Returns sanitized string.

View File

@@ -301,32 +301,35 @@ def filter_file(
ignore_absent: bool = False,
start_at: Optional[str] = None,
stop_at: Optional[str] = None,
encoding: Optional[str] = "utf-8",
) -> None:
r"""Like sed, but uses python regular expressions.
Filters every line of each file through regex and replaces the file with a filtered version.
Preserves mode of filtered files.
Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
passed the match object and should return a suitable replacement string. If it is a string, it
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
As with re.sub, ``repl`` can be either a string or a callable.
If it is a callable, it is passed the match object and should
return a suitable replacement string. If it is a string, it
can contain ``\1``, ``\2``, etc. to represent back-substitution
as sed would allow.
Args:
regex: The regular expression to search for
repl: The string to replace matches with
*filenames: One or more files to search and replace string: Treat regex as a plain string.
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
ignore_absent: Ignore any files that don't exist. Default is False
start_at: Marker used to start applying the replacements. If a text line matches this
marker filtering is started at the next line. All contents before the marker and the
marker itself are copied verbatim. Default is to start filtering from the first line of
the file.
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
until the end of the file.
encoding: The encoding to use when reading and writing the files. Default is None, which
uses the system's default encoding.
regex (str): The regular expression to search for
repl (str): The string to replace matches with
*filenames: One or more files to search and replace
string (bool): Treat regex as a plain string. Default it False
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
ignore_absent (bool): Ignore any files that don't exist.
Default is False
start_at (str): Marker used to start applying the replacements. If a
text line matches this marker filtering is started at the next line.
All contents before the marker and the marker itself are copied
verbatim. Default is to start filtering from the first line of the
file.
stop_at (str): Marker used to stop scanning the file further. If a text
line matches this marker filtering is stopped and the rest of the
file is copied verbatim. Default is to filter until the end of the
file.
"""
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
@@ -342,56 +345,72 @@ def groupid_to_group(x):
if string:
regex = re.escape(regex)
regex_compiled = re.compile(regex)
for path in path_to_os_path(*filenames):
if ignore_absent and not os.path.exists(path):
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
for filename in path_to_os_path(*filenames):
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
backup_filename = filename + "~"
tmp_filename = filename + ".spack~"
if ignore_absent and not os.path.exists(filename):
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
tty.debug(msg.format(filename))
continue
else:
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
fd, temp_path = tempfile.mkstemp(
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
)
os.close(fd)
# Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
shutil.copy(path, temp_path)
errored = False
# Create a temporary file to read from. We cannot use backup_filename
# in case filter_file is invoked multiple times on the same file.
shutil.copy(filename, tmp_filename)
try:
# Open as a text file and filter until the end of the file is reached, or we found a
# marker in the line if it was specified. To avoid translating line endings (\n to
# \r\n and vice-versa) use newline="".
with open(
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
) as input_file, open(
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
) as output_file:
if start_at is None and stop_at is None: # common case, avoids branching in loop
for line in input_file:
output_file.write(re.sub(regex_compiled, repl, line))
else:
# state is -1 before start_at; 0 between; 1 after stop_at
state = 0 if start_at is None else -1
for line in input_file:
if state == 0:
# Open as a text file and filter until the end of the file is
# reached, or we found a marker in the line if it was specified
#
# To avoid translating line endings (\n to \r\n and vice-versa)
# we force os.open to ignore translations and use the line endings
# the file comes with
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
do_filtering = start_at is None
# Using iter and readline is a workaround needed not to
# disable input_file.tell(), which will happen if we call
# input_file.next() implicitly via the for loop
for line in iter(input_file.readline, ""):
if stop_at is not None:
current_position = input_file.tell()
if stop_at == line.strip():
state = 1
else:
line = re.sub(regex_compiled, repl, line)
elif state == -1 and start_at == line.strip():
state = 0
output_file.write(line)
output_file.write(line)
break
if do_filtering:
filtered_line = re.sub(regex, repl, line)
output_file.write(filtered_line)
else:
do_filtering = start_at == line.strip()
output_file.write(line)
else:
current_position = None
# If we stopped filtering at some point, reopen the file in
# binary mode and copy verbatim the remaining part
if current_position and stop_at:
with open(tmp_filename, mode="rb") as input_binary_buffer:
input_binary_buffer.seek(current_position)
with open(filename, mode="ab") as output_binary_buffer:
output_binary_buffer.writelines(input_binary_buffer.readlines())
except BaseException:
# restore the original file
os.rename(temp_path, path)
errored = True
# clean up the original file on failure.
shutil.move(backup_filename, filename)
raise
finally:
if not errored and not backup:
os.unlink(temp_path)
os.remove(tmp_filename)
if not backup and os.path.exists(backup_filename):
os.remove(backup_filename)
class FileFilter:
@@ -1096,12 +1115,12 @@ def hash_directory(directory, ignore=[]):
@contextmanager
@system_path_filter
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
tmp = os.path.join(dirname, ".%s.tmp" % basename)
with open(tmp, "w", encoding=encoding) as f:
with open(tmp, "w") as f:
yield f
shutil.move(tmp, filename)

View File

@@ -863,10 +863,8 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
if sys.version_info >= (3, 9):
PatternStr = re.Pattern[str]
PatternBytes = re.Pattern[bytes]
else:
PatternStr = typing.Pattern[str]
PatternBytes = typing.Pattern[bytes]
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:

View File

@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
Arguments:
path: path to lock file we want a filehandle for
"""
# Open writable files as rb+ so we can upgrade to write later
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
# Open writable files as 'r+' so we can upgrade to write later
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
pid = os.getpid()
open_file = None # OpenFile object, if there is one
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
# we know path exists but not if it's writable. If it's read-only,
# only open the file for reading (and fail if we're trying to get
# an exclusive (write) lock on it)
os_mode, fh_mode = os.O_RDONLY, "rb"
os_mode, fh_mode = os.O_RDONLY, "r"
fd = os.open(path, os_mode)
fh = os.fdopen(fd, fh_mode)
@@ -243,7 +243,7 @@ def __init__(
helpful for distinguishing between different Spack locks.
"""
self.path = path
self._file: Optional[IO[bytes]] = None
self._file: Optional[IO] = None
self._reads = 0
self._writes = 0
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
self._ensure_parent_directory()
self._file = FILE_TRACKER.get_fh(self.path)
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
# Attempt to upgrade to write lock w/a read-only file.
# If the file were writable, we'd have opened it rb+
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
self._log_debug(
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
line = self._file.read()
if line:
pid, host = line.decode("utf-8").strip().split(",")
pid, host = line.strip().split(",")
_, _, pid = pid.rpartition("=")
_, _, self.host = host.rpartition("=")
self.pid = int(pid)
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
# write pid, host to disk to sync over FS
self._file.seek(0)
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
self._file.truncate()
self._file.flush()
os.fsync(self._file.fileno())

View File

@@ -161,7 +161,7 @@ def _err_check(result, func, args):
)
# Use conout$ here to handle a redirectired stdout/get active console associated
# with spack
with open(r"\\.\CONOUT$", "w", encoding="utf-8") as conout:
with open(r"\\.\CONOUT$", "w") as conout:
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
# which is defined as conout$ on Windows.

View File

@@ -762,7 +762,7 @@ def __enter__(self):
self.reader = open(self.logfile, mode="rb+")
# Dup stdout so we can still write to it after redirection
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w", encoding=sys.stdout.encoding)
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
# Redirect stdout and stderr to write to logfile
self.stderr.redirect_stream(self.writer.fileno())
self.stdout.redirect_stream(self.writer.fileno())
@@ -879,13 +879,10 @@ def _writer_daemon(
write_fd.close()
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O. [needs citation]
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
# The downside is that the log file will not contain the exact output of the build process.
# that prevents unbuffered text I/O.
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
# 3. closefd=False because Connection has "ownership"
read_file = os.fdopen(
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
)
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
if stdin_fd:
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
@@ -931,7 +928,11 @@ def _writer_daemon(
try:
while line_count < 100:
# Handle output from the calling process.
line = _retry(read_file.readline)()
try:
line = _retry(read_file.readline)()
except UnicodeDecodeError:
# installs like --test=root gpgme produce non-UTF8 logs
line = "<line lost: output was not encoded as UTF-8>\n"
if not line:
return
@@ -945,13 +946,6 @@ def _writer_daemon(
output_line = clean_line
if filter_fn:
output_line = filter_fn(clean_line)
enc = sys.stdout.encoding
if enc != "utf-8":
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
# may not be able to handle utf-8 output. We do an inefficient
# dance of re-encoding with errors replaced, so stdout.write
# does not raise.
output_line = output_line.encode(enc, "replace").decode(enc)
sys.stdout.write(output_line)
# Stripped output to log file.

View File

@@ -656,7 +656,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
for pkg_name in pkgs:
details = []
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
with open(filename, "r", encoding="utf-8") as package_file:
with open(filename, "r") as package_file:
for i, line in enumerate(package_file):
pattern = next((r for r in fixme_regexes if r.search(line)), None)
if pattern:
@@ -809,7 +809,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
continue
file = spack.repo.PATH.filename_for_package_name(pkg_name)
tree = ast.parse(open(file, "rb").read())
tree = ast.parse(open(file).read())
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
visitor.visit(tree)
if visitor.references_to_globals:
@@ -1009,6 +1009,20 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for when, deps_by_name in pkg_cls.dependencies.items():
for dep_name, dep in deps_by_name.items():
# Check if there are nested dependencies declared. We don't want directives like:
#
# depends_on('foo+bar ^fee+baz')
#
# but we'd like to have two dependencies listed instead.
nested_dependencies = dep.spec.dependencies()
if nested_dependencies:
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
ndir = len(nested_dependencies) + 1
details = [
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
f"in {filename}",
]
errors.append(error_cls(summary=summary, details=details))
def check_virtual_with_variants(spec, msg):
if not spec.virtual or not spec.variants:

View File

@@ -24,12 +24,13 @@
import urllib.request
import warnings
from contextlib import closing
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.filesystem as fsys
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
from llnl.util.symlink import readlink
import spack.caches
import spack.config as config
@@ -53,6 +54,7 @@
import spack.util.archive
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.filesystem as ssys
import spack.util.gpg
import spack.util.parallel
import spack.util.path
@@ -67,8 +69,10 @@
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
ensure_valid_tag,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
@@ -79,6 +83,7 @@
)
from spack.package_prefs import get_package_dir_permissions, get_package_group
from spack.relocate_text import utf8_paths_to_single_binary_regex
from spack.spec import Spec
from spack.stage import Stage
from spack.util.executable import which
@@ -581,15 +586,133 @@ def buildinfo_file_name(prefix):
def read_buildinfo_file(prefix):
"""Read buildinfo file"""
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
with open(buildinfo_file_name(prefix), "r") as f:
return syaml.load(f)
def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
try:
return bool(regex.search(f.read()))
finally:
f.seek(0)
class BuildManifestVisitor(BaseDirectoryVisitor):
"""Visitor that collects a list of files and symlinks
that can be checked for need of relocation. It knows how
to dedupe hardlinks and deal with symlinks to files and
directories."""
def __init__(self):
# Save unique identifiers of hardlinks to avoid relocating them multiple times
self.visited = set()
# Lists of files we will check
self.files = []
self.symlinks = []
def seen_before(self, root, rel_path):
stat_result = os.lstat(os.path.join(root, rel_path))
if stat_result.st_nlink == 1:
return False
identifier = (stat_result.st_dev, stat_result.st_ino)
if identifier in self.visited:
return True
else:
self.visited.add(identifier)
return False
def visit_file(self, root, rel_path, depth):
if self.seen_before(root, rel_path):
return
self.files.append(rel_path)
def visit_symlinked_file(self, root, rel_path, depth):
# Note: symlinks *can* be hardlinked, but it is unclear if
# symlinks can be relinked in-place (preserving inode).
# Therefore, we do *not* de-dupe hardlinked symlinks.
self.symlinks.append(rel_path)
def before_visit_dir(self, root, rel_path, depth):
return os.path.basename(rel_path) not in (".spack", "man")
def before_visit_symlinked_dir(self, root, rel_path, depth):
# Treat symlinked directories simply as symlinks.
self.visit_symlinked_file(root, rel_path, depth)
# Never recurse into symlinked directories.
return False
def file_matches(path, regex):
with open(path, "rb") as f:
contents = f.read()
return bool(regex.search(contents))
def get_buildfile_manifest(spec):
"""
Return a data structure with information about a build, including
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
link_to_relocate, and other, which means it doesn't fit any of previous
checks (and should not be relocated). We exclude docs (man) and
metadata (.spack). This can be used to find a particular kind of file
in spack, or to generate the build metadata.
"""
data = {
"text_to_relocate": [],
"binary_to_relocate": [],
"link_to_relocate": [],
"other": [],
"binary_to_relocate_fullpath": [],
"hardlinks_deduped": True,
}
# Guard against filesystem footguns of hardlinks and symlinks by using
# a visitor to retrieve a list of files and symlinks, so we don't have
# to worry about hardlinks of symlinked dirs and what not.
visitor = BuildManifestVisitor()
root = spec.prefix
visit_directory_tree(root, visitor)
# Collect a list of prefixes for this package and it's dependencies, Spack will
# look for them to decide if text file needs to be relocated or not
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
prefixes.append(spack.hooks.sbang.sbang_install_path())
prefixes.append(str(spack.store.STORE.layout.root))
# Create a giant regex that matches all prefixes
regex = utf8_paths_to_single_binary_regex(prefixes)
# Symlinks.
# Obvious bugs:
# 1. relative links are not relocated.
# 2. paths are used as strings.
for rel_path in visitor.symlinks:
abs_path = os.path.join(root, rel_path)
link = readlink(abs_path)
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
data["link_to_relocate"].append(rel_path)
# Non-symlinks.
for rel_path in visitor.files:
abs_path = os.path.join(root, rel_path)
m_type, m_subtype = ssys.mime_type(abs_path)
if relocate.needs_binary_relocation(m_type, m_subtype):
# Why is this branch not part of needs_binary_relocation? :(
if (
(
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
and sys.platform != "darwin"
)
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
or (not rel_path.endswith(".o"))
):
data["binary_to_relocate"].append(rel_path)
data["binary_to_relocate_fullpath"].append(abs_path)
continue
elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex):
data["text_to_relocate"].append(rel_path)
continue
data["other"].append(abs_path)
return data
def deps_to_relocate(spec):
@@ -622,15 +745,17 @@ def deps_to_relocate(spec):
def get_buildinfo_dict(spec):
"""Create metadata for a tarball"""
manifest = get_buildfile_manifest(spec)
return {
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
"buildpath": spack.store.STORE.layout.root,
"spackprefix": spack.paths.prefix,
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
# "relocate_textfiles": [],
# "relocate_binaries": [],
# "relocate_links": [],
"hardlinks_deduped": True,
"relocate_textfiles": manifest["text_to_relocate"],
"relocate_binaries": manifest["binary_to_relocate"],
"relocate_links": manifest["link_to_relocate"],
"hardlinks_deduped": manifest["hardlinks_deduped"],
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
}
@@ -702,10 +827,10 @@ def _read_specs_and_push_index(
contents = read_method(file)
# Need full spec.json name or this gets confused with index.json.
if file.endswith(".json.sig"):
specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents)
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
specfile_json = Spec.extract_json_from_clearsig(contents)
fetched_spec = Spec.from_dict(specfile_json)
elif file.endswith(".json"):
fetched_spec = spack.spec.Spec.from_json(contents)
fetched_spec = Spec.from_json(contents)
else:
continue
@@ -715,17 +840,17 @@ def _read_specs_and_push_index(
# Now generate the index, compute its hash, and push the two files to
# the mirror.
index_json_path = os.path.join(temp_dir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f:
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Read the index back in and compute its hash
with open(index_json_path, encoding="utf-8") as f:
with open(index_json_path) as f:
index_string = f.read()
index_hash = compute_hash(index_string)
# Write the hash out to a local file
index_hash_path = os.path.join(temp_dir, "index.json.hash")
with open(index_hash_path, "w", encoding="utf-8") as f:
with open(index_hash_path, "w") as f:
f.write(index_hash)
# Push the index itself
@@ -759,7 +884,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
aws = which("aws")
def file_read_method(file_path):
with open(file_path, encoding="utf-8") as fd:
with open(file_path) as fd:
return fd.read()
tmpspecsdir = tempfile.mkdtemp()
@@ -904,7 +1029,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
target = os.path.join(tmpdir, "index.json")
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
with open(target, "w", encoding="utf-8") as f:
with open(target, "w") as f:
sjson.dump(index, f)
try:
@@ -920,55 +1045,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
) from e
class FileTypes:
BINARY = 0
TEXT = 1
UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int:
try:
# first check if this is an ELF or mach-o binary.
magic = f.read(8)
if len(magic) < 8:
return FileTypes.UNKNOWN
elif relocate.is_elf_magic(magic) or relocate.is_macho_magic(magic):
return FileTypes.BINARY
f.seek(0)
# Then try utf-8, which has a fast exponential decay in false positive rate with file size.
# Use chunked reads for fast early exit.
f_txt = io.TextIOWrapper(f, encoding="utf-8", errors="strict")
try:
while f_txt.read(1024):
pass
return FileTypes.TEXT
except UnicodeError:
f_txt.seek(0)
pass
finally:
f_txt.detach()
# Finally try iso-8859-1 heuristically. In Python, all possible 256 byte values are valid.
# We classify it as text if it does not contain any control characters / null bytes.
data = f.read(1024)
while data:
if NOT_ISO8859_1_TEXT.search(data):
break
data = f.read(1024)
else:
return FileTypes.TEXT
return FileTypes.UNKNOWN
finally:
f.seek(0)
def tarfile_of_spec_prefix(
tar: tarfile.TarFile, prefix: str, prefixes_to_relocate: List[str]
) -> dict:
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
Args:
@@ -984,33 +1061,6 @@ def tarfile_of_spec_prefix(
except OSError:
skip = lambda entry: False
binary_regex = utf8_paths_to_single_binary_regex(prefixes_to_relocate)
relocate_binaries = []
relocate_links = []
relocate_textfiles = []
# use callbacks to add files and symlinks, so we can register which files need relocation upon
# extraction.
def add_file(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
with open(path, "rb") as f:
relpath = os.path.relpath(path, prefix)
# no need to relocate anything in the .spack directory
if relpath.split(os.sep, 1)[0] == ".spack":
tar.addfile(info, f)
return
f_type = file_type(f)
if f_type == FileTypes.BINARY:
relocate_binaries.append(os.path.relpath(path, prefix))
elif f_type == FileTypes.TEXT and file_matches(f, binary_regex):
relocate_textfiles.append(os.path.relpath(path, prefix))
tar.addfile(info, f)
def add_symlink(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
if os.path.isabs(info.linkname) and binary_regex.match(info.linkname.encode("utf-8")):
relocate_links.append(os.path.relpath(path, prefix))
tar.addfile(info)
spack.util.archive.reproducible_tarfile_from_prefix(
tar,
prefix,
@@ -1018,51 +1068,29 @@ def add_symlink(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
# used in runtimes like AWS lambda.
include_parent_directories=True,
skip=skip,
add_file=add_file,
add_symlink=add_symlink,
)
return {
"relocate_binaries": relocate_binaries,
"relocate_links": relocate_links,
"relocate_textfiles": relocate_textfiles,
}
def create_tarball(spec: spack.spec.Spec, tarfile_path: str) -> Tuple[str, str]:
"""Create a tarball of a spec and return the checksums of the compressed tarfile and the
uncompressed tarfile."""
return _do_create_tarball(
tarfile_path,
spec.prefix,
buildinfo=get_buildinfo_dict(spec),
prefixes_to_relocate=prefixes_to_relocate(spec),
)
def _do_create_tarball(
tarfile_path: str, prefix: str, buildinfo: dict, prefixes_to_relocate: List[str]
) -> Tuple[str, str]:
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
tar,
tar_gz_checksum,
tar_checksum,
inner_checksum,
outer_checksum,
):
# Tarball the install prefix
files_to_relocate = tarfile_of_spec_prefix(tar, prefix, prefixes_to_relocate)
buildinfo.update(files_to_relocate)
tarfile_of_spec_prefix(tar, binaries_dir)
# Serialize buildinfo for the tarball
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(
name=spack.util.archive.default_path_to_name(buildinfo_file_name(prefix))
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
)
tarinfo.type = tarfile.REGTYPE
tarinfo.size = len(bstring)
tarinfo.mode = 0o644
tar.addfile(tarinfo, io.BytesIO(bstring))
return tar_gz_checksum.hexdigest(), tar_checksum.hexdigest()
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
class ExistsInBuildcache(NamedTuple):
@@ -1072,7 +1100,7 @@ class ExistsInBuildcache(NamedTuple):
class BuildcacheFiles:
def __init__(self, spec: spack.spec.Spec, local: str, remote: str):
def __init__(self, spec: Spec, local: str, remote: str):
"""
Args:
spec: The spec whose tarball and specfile are being managed.
@@ -1102,7 +1130,7 @@ def local_tarball(self) -> str:
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
exist in the buildcache"""
files = BuildcacheFiles(spec, tmpdir, out_url)
@@ -1112,23 +1140,12 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
return ExistsInBuildcache(signed, unsigned, tarball)
def prefixes_to_relocate(spec):
prefixes = [s.prefix for s in deps_to_relocate(spec)]
prefixes.append(spack.hooks.sbang.sbang_install_path())
prefixes.append(str(spack.store.STORE.layout.root))
return prefixes
def _url_upload_tarball_and_specfile(
spec: spack.spec.Spec,
tmpdir: str,
out_url: str,
exists: ExistsInBuildcache,
signing_key: Optional[str],
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
):
files = BuildcacheFiles(spec, tmpdir, out_url)
tarball = files.local_tarball()
checksum, _ = create_tarball(spec, tarball)
checksum, _ = _do_create_tarball(tarball, spec.prefix, get_buildinfo_dict(spec))
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
@@ -1142,7 +1159,7 @@ def _url_upload_tarball_and_specfile(
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
specfile = files.local_specfile()
with open(specfile, "w", encoding="utf-8") as f:
with open(specfile, "w") as f:
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
# If lines are longer, they are truncated without error. Thanks GPG!
# So, here we still add newlines, but no indent, so save on file size and
@@ -1297,7 +1314,7 @@ def make_uploader(
)
def _format_spec(spec: spack.spec.Spec) -> str:
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
@@ -1320,7 +1337,7 @@ def _progress(self):
return f"[{self.n:{digits}}/{self.total}] "
return ""
def start(self, spec: spack.spec.Spec, running: bool) -> None:
def start(self, spec: Spec, running: bool) -> None:
self.n += 1
self.running = running
self.pre = self._progress()
@@ -1339,18 +1356,18 @@ def fail(self) -> None:
def _url_push(
specs: List[spack.spec.Spec],
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool,
update_index: bool,
tmpdir: str,
executor: concurrent.futures.Executor,
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
present (when force=False), and a list of errors. Does not raise on error."""
skipped: List[spack.spec.Spec] = []
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
skipped: List[Spec] = []
errors: List[Tuple[Spec, BaseException]] = []
exists_futures = [
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
@@ -1423,7 +1440,7 @@ def _url_push(
return skipped, errors
def _oci_upload_success_msg(spec: spack.spec.Spec, digest: Digest, size: int, elapsed: float):
def _oci_upload_success_msg(spec: Spec, digest: Digest, size: int, elapsed: float):
elapsed = max(elapsed, 0.001) # guard against division by zero
return (
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
@@ -1452,11 +1469,13 @@ def _oci_push_pkg_blob(
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
# Create an oci.image.layer aka tarball of the package
tar_gz_checksum, tar_checksum = create_tarball(spec, filename)
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
filename, spec.prefix, get_buildinfo_dict(spec)
)
blob = spack.oci.oci.Blob(
Digest.from_sha256(tar_gz_checksum),
Digest.from_sha256(tar_checksum),
Digest.from_sha256(compressed_tarfile_checksum),
Digest.from_sha256(tarfile_checksum),
os.path.getsize(filename),
)
@@ -1507,7 +1526,7 @@ def _oci_put_manifest(
):
architecture = _oci_archspec_to_gooarch(specs[0])
expected_blobs: List[spack.spec.Spec] = [
expected_blobs: List[Spec] = [
s
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
if not s.external
@@ -1551,7 +1570,7 @@ def _oci_put_manifest(
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
with open(config_file, "w", encoding="utf-8") as f:
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
@@ -1621,33 +1640,19 @@ def _oci_update_base_images(
)
def _oci_default_tag(spec: spack.spec.Spec) -> str:
"""Return a valid, default image tag for a spec."""
return ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
#: Default OCI index tag
default_index_tag = "index.spack"
def tag_is_spec(tag: str) -> bool:
"""Check if a tag is likely a Spec"""
return tag.endswith(".spack") and tag != default_index_tag
def _oci_push(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
installed_specs_with_deps: List[spack.spec.Spec],
installed_specs_with_deps: List[Spec],
tmpdir: str,
executor: concurrent.futures.Executor,
force: bool = False,
) -> Tuple[
List[spack.spec.Spec],
List[Spec],
Dict[str, Tuple[dict, dict]],
Dict[str, spack.oci.oci.Blob],
List[Tuple[spack.spec.Spec, BaseException]],
List[Tuple[Spec, BaseException]],
]:
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
@@ -1656,15 +1661,13 @@ def _oci_push(
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped: List[spack.spec.Spec] = []
skipped: List[Spec] = []
if not force:
tty.info("Checking for existing specs in the buildcache")
blobs_to_upload = []
tags_to_check = (
target_image.with_tag(_oci_default_tag(s)) for s in installed_specs_with_deps
)
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
@@ -1692,8 +1695,8 @@ def _oci_push(
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
]
manifests_to_upload: List[spack.spec.Spec] = []
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
manifests_to_upload: List[Spec] = []
errors: List[Tuple[Spec, BaseException]] = []
# And update the spec to blob mapping for successful uploads
for spec, blob_future in zip(blobs_to_upload, blob_futures):
@@ -1719,7 +1722,7 @@ def _oci_push(
base_image_cache=base_images,
)
def extra_config(spec: spack.spec.Spec):
def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
spec_dict["binary_cache_checksum"] = {
@@ -1735,7 +1738,7 @@ def extra_config(spec: spack.spec.Spec):
_oci_put_manifest,
base_images,
checksums,
target_image.with_tag(_oci_default_tag(spec)),
target_image.with_tag(default_tag(spec)),
tmpdir,
extra_config(spec),
{"org.opencontainers.image.description": spec.format()},
@@ -1752,7 +1755,7 @@ def extra_config(spec: spack.spec.Spec):
manifest_progress.start(spec, manifest_future.running())
if error is None:
manifest_progress.ok(
f"Tagged {_format_spec(spec)} as {target_image.with_tag(_oci_default_tag(spec))}"
f"Tagged {_format_spec(spec)} as {target_image.with_tag(default_tag(spec))}"
)
else:
manifest_progress.fail()
@@ -1787,13 +1790,13 @@ def _oci_update_index(
db = BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = spack.spec.Spec.from_dict(spec_dict)
spec = Spec.from_dict(spec_dict)
db.add(spec)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w", encoding="utf-8") as f:
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
@@ -1902,7 +1905,7 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
try:
as_string = binary_content.decode("utf-8")
if path.endswith(".json.sig"):
spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string)
spec_dict = Spec.extract_json_from_clearsig(as_string)
else:
spec_dict = json.loads(as_string)
except Exception as e:
@@ -1998,7 +2001,7 @@ def fetch_url_to_mirror(url):
if fetch_url.startswith("oci://"):
ref = spack.oci.image.ImageReference.from_string(
fetch_url[len("oci://") :]
).with_tag(_oci_default_tag(spec))
).with_tag(spack.oci.image.default_tag(spec))
# Fetch the manifest
try:
@@ -2242,8 +2245,7 @@ def relocate_package(spec):
]
if analogs:
# Prefer same-name analogs and prefer higher versions
# This matches the preferences in spack.spec.Spec.splice, so we
# will find same node
# This matches the preferences in Spec.splice, so we will find same node
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
lookup_dag_hash = analog.dag_hash()
@@ -2415,14 +2417,6 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
yield m
def extract_buildcache_tarball(tarfile_path: str, destination: str) -> None:
with closing(tarfile.open(tarfile_path, "r")) as tar:
# Remove common prefix from tarball entries and directly extract them to the install dir.
tar.extractall(
path=destination, members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
)
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
"""
extract binary tarball for given package into install area
@@ -2492,7 +2486,12 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
tarfile_path, size, contents, "sha256", expected, local_checksum
)
try:
extract_buildcache_tarball(tarfile_path, destination=spec.prefix)
with closing(tarfile.open(tarfile_path, "r")) as tar:
# Remove install prefix from tarfil to extract directly into spec.prefix
tar.extractall(
path=spec.prefix,
members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar)),
)
except Exception:
shutil.rmtree(spec.prefix, ignore_errors=True)
_delete_staged_downloads(download_result)
@@ -2682,10 +2681,10 @@ def try_direct_fetch(spec, mirrors=None):
# are concrete (as they are built) so we need to mark this spec
# concrete on read-in.
if specfile_is_signed:
specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents)
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
fetched_spec = Spec.from_dict(specfile_json)
else:
fetched_spec = spack.spec.Spec.from_json(specfile_contents)
fetched_spec = Spec.from_json(specfile_contents)
fetched_spec._mark_concrete()
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
@@ -2890,7 +2889,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
}
if output_file:
with open(output_file, "w", encoding="utf-8") as outf:
with open(output_file, "w") as outf:
outf.write(json.dumps(rebuilds))
return 1 if rebuilds else 0
@@ -2984,7 +2983,7 @@ def __init__(self, all_architectures):
self.possible_specs = specs
def __call__(self, spec: spack.spec.Spec, **kwargs):
def __call__(self, spec: Spec, **kwargs):
"""
Args:
spec: The spec being searched for
@@ -3122,7 +3121,7 @@ def __init__(self, url: str, local_hash, urlopen=None) -> None:
def conditional_fetch(self) -> FetchIndexResult:
"""Download an index from an OCI registry type mirror."""
url_manifest = self.ref.with_tag(default_index_tag).manifest_url()
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
try:
response = self.urlopen(
urllib.request.Request(

View File

@@ -9,6 +9,7 @@
all_core_root_specs,
ensure_clingo_importable_or_raise,
ensure_core_dependencies,
ensure_file_in_path_or_raise,
ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise,
)
@@ -19,6 +20,7 @@
"is_bootstrapping",
"ensure_bootstrap_configuration",
"ensure_core_dependencies",
"ensure_file_in_path_or_raise",
"ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise",

View File

@@ -481,6 +481,19 @@ def ensure_gpg_in_path_or_raise() -> None:
)
def file_root_spec() -> str:
"""Return the root spec used to bootstrap file"""
root_spec_name = "win-file" if IS_WINDOWS else "file"
return _root_spec(root_spec_name)
def ensure_file_in_path_or_raise() -> None:
"""Ensure file is in the PATH or raise"""
return ensure_executables_in_path_or_raise(
executables=["file"], abstract_spec=file_root_spec()
)
def patchelf_root_spec() -> str:
"""Return the root spec used to bootstrap patchelf"""
# 0.13.1 is the last version not to require C++17.
@@ -564,13 +577,15 @@ def ensure_core_dependencies() -> None:
"""Ensure the presence of all the core dependencies."""
if sys.platform.lower() == "linux":
ensure_patchelf_in_path_or_raise()
elif sys.platform == "win32":
ensure_file_in_path_or_raise()
ensure_gpg_in_path_or_raise()
ensure_clingo_importable_or_raise()
def all_core_root_specs() -> List[str]:
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
def bootstrapping_sources(scope: Optional[str] = None):

View File

@@ -3,8 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Query the status of bootstrapping on this machine"""
import sys
from typing import Dict, List, Optional, Sequence, Tuple, Union
import platform
from typing import List, Optional, Sequence, Tuple, Union
import spack.util.executable
@@ -72,7 +72,7 @@ def _core_requirements() -> List[RequiredResponseType]:
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
"git": _missing("git", "required to fetch/manage git repositories"),
}
if sys.platform == "linux":
if platform.system().lower() == "linux":
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
# Executables that are not bootstrapped yet
@@ -87,16 +87,17 @@ def _core_requirements() -> List[RequiredResponseType]:
def _buildcache_requirements() -> List[RequiredResponseType]:
_buildcache_exes: Dict[ExecutablesType, str] = {
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False)
_buildcache_exes = {
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
}
if sys.platform == "darwin":
if platform.system().lower() == "darwin":
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
# Executables that are not bootstrapped yet
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
if sys.platform == "linux":
if platform.system().lower() == "linux":
result.append(
_required_executable(
"patchelf",

View File

@@ -182,7 +182,10 @@ def patch_config_files(self) -> bool:
@property
def _removed_la_files_log(self) -> str:
"""File containing the list of removed libtool archives"""
return os.path.join(self.build_directory, "removed_la_files.txt")
build_dir = self.build_directory
if not os.path.isabs(self.build_directory):
build_dir = os.path.join(self.pkg.stage.path, build_dir)
return os.path.join(build_dir, "removed_la_files.txt")
@property
def archive_files(self) -> List[str]:
@@ -520,12 +523,7 @@ def configure_abs_path(self) -> str:
@property
def build_directory(self) -> str:
"""Override to provide another place to build the package"""
# Handle the case where the configure directory is set to a non-absolute path
# Non-absolute paths are always relative to the staging source path
build_dir = self.configure_directory
if not os.path.isabs(build_dir):
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
return build_dir
return self.configure_directory
@spack.phase_callbacks.run_before("autoreconf")
def delete_configure_to_force_update(self) -> None:
@@ -838,7 +836,7 @@ def remove_libtool_archives(self) -> None:
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
with open(self._removed_la_files_log, mode="w") as f:
f.write("\n".join(libtool_files))
def setup_build_environment(self, env):

View File

@@ -324,7 +324,7 @@ def initconfig(self, pkg, spec, prefix):
+ self.initconfig_package_entries()
)
with open(self.cache_name, "w", encoding="utf-8") as f:
with open(self.cache_name, "w") as f:
for entry in cache_entries:
f.write("%s\n" % entry)
f.write("\n")

View File

@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
# The file will have been created upon self.license_required AND
# self.license_files having been populated, so the "if" is usually
# true by the time the present function runs; ../hooks/licensing.py
with open(f, encoding="utf-8") as fh:
with open(f) as fh:
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
license_type = {
"ACTIVATION_TYPE": "license_file",
@@ -1185,7 +1185,7 @@ def configure(self):
# our configuration accordingly. We can do this because the tokens are
# quite long and specific.
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
validator_code = open("pset/check.awk", "r").read()
# Let's go a little further and distill the tokens (plus some noise).
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
@@ -1222,7 +1222,7 @@ def configure(self):
config_draft.update(self._determine_license_type)
# Write sorted *by token* so the file looks less like a hash dump.
f = open("silent.cfg", "w", encoding="utf-8")
f = open("silent.cfg", "w")
for token, value in sorted(config_draft.items()):
if token in tokenlike_words:
f.write("%s=%s\n" % (token, value))
@@ -1273,7 +1273,7 @@ def configure_rpath(self):
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
compiler_cfg = os.path.abspath(f + ".cfg")
with open(compiler_cfg, "w", encoding="utf-8") as fh:
with open(compiler_cfg, "w") as fh:
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
@spack.phase_callbacks.run_after("install")
@@ -1297,7 +1297,7 @@ def configure_auto_dispatch(self):
ad.append(x)
compiler_cfg = os.path.abspath(f + ".cfg")
with open(compiler_cfg, "a", encoding="utf-8") as fh:
with open(compiler_cfg, "a") as fh:
fh.write("-ax{0}\n".format(",".join(ad)))
@spack.phase_callbacks.run_after("install")

View File

@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
table_entries.append(self._generate_tree_line(d.name, d.prefix))
path = self._luarocks_config_path()
with open(path, "w", encoding="utf-8") as config:
with open(path, "w") as config:
config.write(
"""
deps_mode="all"

View File

@@ -32,9 +32,6 @@ class IntelOneApiPackage(Package):
# organization (e.g. University/Company).
redistribute(source=False, binary=False)
# contains precompiled binaries without rpaths
unresolved_libraries = ["*"]
for c in [
"target=ppc64:",
"target=ppc64le:",

2270
lib/spack/spack/ci.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,41 +0,0 @@
# Spack CI generators
This document describes how the ci module can be extended to provide novel
ci generators. The module currently has only a single generator for gitlab.
The unit-tests for the ci module define a small custom generator for testing
purposes as well.
The process of generating a pipeline involves creating a ci-enabled spack
environment, activating it, and running `spack ci generate`, possibly with
arguments describing things like where the output should be written.
Internally pipeline generation is broken into two components: general and
ci platform specific.
## General pipeline functionality
General pipeline functionality includes building a pipeline graph (really,
a forest), pruning it in a variety of ways, and gathering attributes for all
the generated spec build jobs from the spack configuration.
All of the above functionality is defined in the `__init__.py` of the top-level
ci module, and should be roughly the same for pipelines generated for any
platform.
## CI platform specific functionality
Functionality specific to CI platforms (e.g. gitlab, gha, etc.) should be
defined in a dedicated module. In order to define a generator for a new
platform, there are only a few requirements:
1. add a file under `ci` in which you define a generator method decorated with
the `@generator` attribute. .
1. import it from `lib/spack/spack/ci/__init__.py`, so that your new generator
is registered.
1. the generator method must take as arguments PipelineDag, SpackCIConfig,
and PipelineOptions objects, in that order.
1. the generator method must produce an output file containing the
generated pipeline.

File diff suppressed because it is too large Load Diff

View File

@@ -1,825 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import copy
import json
import os
import re
import ssl
import sys
import time
from collections import deque
from enum import Enum
from typing import Dict, Generator, List, Optional, Set, Tuple
from urllib.parse import quote, urlencode, urlparse
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import Singleton, memoized
import spack.binary_distribution as bindist
import spack.config as cfg
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.mirrors.mirror
import spack.schema
import spack.spec
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
from spack import traverse
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp
def _urlopen():
error_handler = web_util.SpackHTTPDefaultErrorHandler()
# One opener with HTTPS ssl enabled
with_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
)
# One opener with HTTPS ssl disabled
without_ssl = build_opener(
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
)
# And dynamically dispatch based on the config:verify_ssl.
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
opener = with_ssl if verify_ssl else without_ssl
timeout = timeout or cfg.get("config:connect_timeout", 1)
return opener.open(fullurl, data, timeout)
return dispatch_open
IS_WINDOWS = sys.platform == "win32"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
_dyn_mapping_urlopener = Singleton(_urlopen)
def copy_files_to_artifacts(src, artifacts_dir):
"""
Copy file(s) to the given artifacts directory
Parameters:
src (str): the glob-friendly path expression for the file(s) to copy
artifacts_dir (str): the destination directory
"""
try:
fs.copy(src, artifacts_dir)
except Exception as err:
msg = (
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
f"exception: {str(err)}"
)
tty.warn(msg)
def win_quote(quote_str: str) -> str:
if IS_WINDOWS:
quote_str = f'"{quote_str}"'
return quote_str
def _spec_matches(spec, match_string):
return spec.intersects(match_string)
def _noop(x):
return x
def unpack_script(script_section, op=_noop):
script = []
for cmd in script_section:
if isinstance(cmd, list):
for subcmd in cmd:
script.append(op(subcmd))
else:
script.append(op(cmd))
return script
def ensure_expected_target_path(path: str) -> str:
"""Returns passed paths with all Windows path separators exchanged
for posix separators
TODO (johnwparent): Refactor config + cli read/write to deal only in posix style paths
"""
if path:
return path.replace("\\", "/")
return path
def update_env_scopes(
env: ev.Environment,
cli_scopes: List[str],
output_file: str,
transform_windows_paths: bool = False,
) -> None:
"""Add any config scopes from cli_scopes which aren't already included in the
environment, by reading the yaml, adding the missing includes, and writing the
updated yaml back to the same location.
"""
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
env_yaml_root = syaml.load(env_fd)
# Add config scopes to environment
env_includes = env_yaml_root["spack"].get("include", [])
include_scopes: List[str] = []
for scope in cli_scopes:
if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope)
env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
]
with open(output_file, "w", encoding="utf-8") as fd:
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
"""Write out the file describing specs that should be copied"""
buildcache_copies = {}
for release_spec in specs:
release_spec_dag_hash = release_spec.dag_hash()
# TODO: This assumes signed version of the spec
buildcache_copies[release_spec_dag_hash] = [
{
"src": url_util.join(
src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
"dest": url_util.join(
dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
},
{
"src": url_util.join(
src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
"dest": url_util.join(
dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
},
]
target_dir = os.path.dirname(output_file)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
with open(output_file, "w", encoding="utf-8") as fd:
fd.write(json.dumps(buildcache_copies))
class CDashHandler:
"""
Class for managing CDash data and processing.
"""
def __init__(self, ci_cdash):
# start with the gitlab ci configuration
self.url = ci_cdash.get("url")
self.build_group = ci_cdash.get("build-group")
self.project = ci_cdash.get("project")
self.site = ci_cdash.get("site")
# grab the authorization token when available
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
if self.auth_token:
tty.verbose("Using CDash auth token from environment")
# append runner description to the site if available
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
if runner:
self.site += f" ({runner})"
def args(self):
return [
"--cdash-upload-url",
win_quote(self.upload_url),
"--cdash-build",
win_quote(self.build_name()),
"--cdash-site",
win_quote(self.site),
"--cdash-buildstamp",
win_quote(self.build_stamp),
]
def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
"""Returns the CDash build name.
A name will be generated if the `spec` is provided,
otherwise, the value will be retrieved from the environment
through the `SPACK_CDASH_BUILD_NAME` variable.
Returns: (str) given spec's CDash build name."""
if spec:
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
return build_name
env_build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
tty.debug(f"Using CDash build name ({env_build_name}) from the environment")
return env_build_name
@property # type: ignore
def build_stamp(self):
"""Returns the CDash build stamp.
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
is preferred due to the representation of timestamps; otherwise,
one will be built.
Returns: (str) current CDash build stamp"""
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
if build_stamp:
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
return build_stamp
build_stamp = cdash_build_stamp(self.build_group, time.time())
tty.debug(f"Generated new build stamp ({build_stamp})")
return build_stamp
@property # type: ignore
@memoized
def project_enc(self):
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
encode = urlencode({"project": self.project})
index = encode.find("=") + 1
return encode[index:]
@property
def upload_url(self):
url_format = f"{self.url}/submit.php?project={self.project_enc}"
return url_format
def copy_test_results(self, source, dest):
"""Copy test results to artifacts directory."""
reports = fs.join_path(source, "*_Test*.xml")
copy_files_to_artifacts(reports, dest)
def create_buildgroup(self, opener, headers, url, group_name, group_type):
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code not in [200, 201]:
msg = f"Creating buildgroup failed (response code = {response_code})"
tty.warn(msg)
return None
response_text = response.read()
response_json = json.loads(response_text)
build_group_id = response_json["id"]
return build_group_id
def populate_buildgroup(self, job_names):
url = f"{self.url}/api/v1/buildgroup.php"
headers = {
"Authorization": f"Bearer {self.auth_token}",
"Content-Type": "application/json",
}
opener = build_opener(HTTPHandler)
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
group_id = self.create_buildgroup(
opener, headers, url, f"Latest {self.build_group}", "Latest"
)
if not parent_group_id or not group_id:
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
tty.warn(msg)
return
data = {
"dynamiclist": [
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
for name in job_names
]
}
enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in populate_buildgroup"
tty.warn(msg)
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
"""Explicitly report skipping testing of a spec (e.g., it's CI
configuration identifies it as known to have broken tests or
the CI installation failed).
Args:
spec: spec being tested
report_dir: directory where the report will be written
reason: reason the test is being skipped
"""
configuration = CDashConfiguration(
upload_url=self.upload_url,
packages=[spec.name],
build=self.build_name(),
site=self.site,
buildstamp=self.build_stamp,
track=None,
)
reporter = CDash(configuration=configuration)
reporter.test_skipped_report(report_dir, spec, reason)
class PipelineType(Enum):
COPY_ONLY = 1
spack_copy_only = 1
PROTECTED_BRANCH = 2
spack_protected_branch = 2
PULL_REQUEST = 3
spack_pull_request = 3
class PipelineOptions:
"""A container for all pipeline options that can be specified (whether
via cli, config/yaml, or environment variables)"""
def __init__(
self,
env: ev.Environment,
buildcache_destination: spack.mirrors.mirror.Mirror,
artifacts_root: str = "jobs_scratch_dir",
print_summary: bool = True,
output_file: Optional[str] = None,
check_index_only: bool = False,
broken_specs_url: Optional[str] = None,
rebuild_index: bool = True,
untouched_pruning_dependent_depth: Optional[int] = None,
prune_untouched: bool = False,
prune_up_to_date: bool = True,
prune_external: bool = True,
stack_name: Optional[str] = None,
pipeline_type: Optional[PipelineType] = None,
require_signing: bool = False,
cdash_handler: Optional["CDashHandler"] = None,
):
"""
Args:
env: Active spack environment
buildcache_destination: The mirror where built binaries should be pushed
artifacts_root: Path to location where artifacts should be stored
print_summary: Print a summary of the scheduled pipeline
output_file: Path where output file should be written
check_index_only: Only fetch the index or fetch all spec files
broken_specs_url: URL where broken specs (on develop) should be reported
rebuild_index: Generate a job to rebuild mirror index after rebuilds
untouched_pruning_dependent_depth: How many parents to traverse from changed pkg specs
prune_untouched: Prune jobs for specs that were unchanged in git history
prune_up_to_date: Prune specs from pipeline if binary exists on the mirror
prune_external: Prune specs from pipeline if they are external
stack_name: Name of spack stack
pipeline_type: Type of pipeline running (optional)
require_signing: Require buildcache to be signed (fail w/out signing key)
cdash_handler: Object for communicating build information with CDash
"""
self.env = env
self.buildcache_destination = buildcache_destination
self.artifacts_root = artifacts_root
self.print_summary = print_summary
self.output_file = output_file
self.check_index_only = check_index_only
self.broken_specs_url = broken_specs_url
self.rebuild_index = rebuild_index
self.untouched_pruning_dependent_depth = untouched_pruning_dependent_depth
self.prune_untouched = prune_untouched
self.prune_up_to_date = prune_up_to_date
self.prune_external = prune_external
self.stack_name = stack_name
self.pipeline_type = pipeline_type
self.require_signing = require_signing
self.cdash_handler = cdash_handler
class PipelineNode:
spec: spack.spec.Spec
parents: Set[str]
children: Set[str]
def __init__(self, spec: spack.spec.Spec):
self.spec = spec
self.parents = set()
self.children = set()
@property
def key(self):
"""Return key of the stored spec"""
return PipelineDag.key(self.spec)
class PipelineDag:
"""Turn a list of specs into a simple directed graph, that doesn't keep track
of edge types."""
@classmethod
def key(cls, spec: spack.spec.Spec) -> str:
return spec.dag_hash()
def __init__(self, specs: List[spack.spec.Spec]) -> None:
# Build dictionary of nodes
self.nodes: Dict[str, PipelineNode] = {
PipelineDag.key(s): PipelineNode(s)
for s in traverse.traverse_nodes(specs, deptype=dt.ALL_TYPES, root=True)
}
# Create edges
for edge in traverse.traverse_edges(
specs, deptype=dt.ALL_TYPES, root=False, cover="edges"
):
parent_key = PipelineDag.key(edge.parent)
child_key = PipelineDag.key(edge.spec)
self.nodes[parent_key].children.add(child_key)
self.nodes[child_key].parents.add(parent_key)
def prune(self, node_key: str):
"""Remove a node from the graph, and reconnect its parents and children"""
node = self.nodes[node_key]
for parent in node.parents:
self.nodes[parent].children.remove(node_key)
self.nodes[parent].children |= node.children
for child in node.children:
self.nodes[child].parents.remove(node_key)
self.nodes[child].parents |= node.parents
del self.nodes[node_key]
def traverse_nodes(
self, direction: str = "children"
) -> Generator[Tuple[int, PipelineNode], None, None]:
"""Yields (depth, node) from the pipeline graph. Traversal is topologically
ordered from the roots if ``direction`` is ``children``, or from the leaves
if ``direction`` is ``parents``. The yielded depth is the length of the
longest path from the starting point to the yielded node."""
if direction == "children":
get_in_edges = lambda node: node.parents
get_out_edges = lambda node: node.children
else:
get_in_edges = lambda node: node.children
get_out_edges = lambda node: node.parents
sort_key = lambda k: self.nodes[k].spec.name
out_edges = {k: sorted(get_out_edges(n), key=sort_key) for k, n in self.nodes.items()}
num_in_edges = {k: len(get_in_edges(n)) for k, n in self.nodes.items()}
# Populate a queue with all the nodes that have no incoming edges
nodes = deque(
sorted(
[(0, key) for key in self.nodes.keys() if num_in_edges[key] == 0],
key=lambda item: item[1],
)
)
while nodes:
# Remove the next node, n, from the queue and yield it
depth, n_key = nodes.pop()
yield (depth, self.nodes[n_key])
# Remove an in-edge from every node, m, pointed to by an
# out-edge from n. If any of those nodes are left with
# 0 remaining in-edges, add them to the queue.
for m in out_edges[n_key]:
num_in_edges[m] -= 1
if num_in_edges[m] == 0:
nodes.appendleft((depth + 1, m))
def get_dependencies(self, node: PipelineNode) -> List[PipelineNode]:
"""Returns a list of nodes corresponding to the direct dependencies
of the given node."""
return [self.nodes[k] for k in node.children]
class SpackCIConfig:
"""Spack CI object used to generate intermediate representation
used by the CI generator(s).
"""
def __init__(self, ci_config):
"""Given the information from the ci section of the config
and the staged jobs, set up meta data needed for generating Spack
CI IR.
"""
self.ci_config = ci_config
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
self.ir = {
"jobs": {},
"rebuild-index": self.ci_config.get("rebuild-index", True),
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
"target": self.ci_config.get("target", "gitlab"),
}
jobs = self.ir["jobs"]
for name in self.named_jobs:
# Skip the special named jobs
if name not in ["any", "build"]:
jobs[name] = self.__init_job("")
def __init_job(self, release_spec):
"""Initialize job object"""
job_object = {"spec": release_spec, "attributes": {}}
if release_spec:
job_vars = job_object["attributes"].setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
return job_object
def __is_named(self, section):
"""Check if a pipeline-gen configuration section is for a named job,
and if so return the name otherwise return none.
"""
for _name in self.named_jobs:
keys = [f"{_name}-job", f"{_name}-job-remove"]
if any([key for key in keys if key in section]):
return _name
return None
@staticmethod
def __job_name(name, suffix=""):
"""Compute the name of a named job with appropriate suffix.
Valid suffixes are either '-remove' or empty string or None
"""
assert isinstance(name, str)
jname = name
if suffix:
jname = f"{name}-job{suffix}"
else:
jname = f"{name}-job"
return jname
def __apply_submapping(self, dest, spec, section):
"""Apply submapping setion to the IR dict"""
matched = False
only_first = section.get("match_behavior", "first") == "first"
for match_attrs in reversed(section["submapping"]):
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
for match_string in match_attrs["match"]:
if _spec_matches(spec, match_string):
matched = True
if "build-job-remove" in match_attrs:
spack.config.remove_yaml(dest, attrs["build-job-remove"])
if "build-job" in match_attrs:
spack.schema.merge_yaml(dest, attrs["build-job"])
break
if matched and only_first:
break
return dest
# Create jobs for all the pipeline specs
def init_pipeline_jobs(self, pipeline: PipelineDag):
for _, node in pipeline.traverse_nodes():
dag_hash = node.spec.dag_hash()
self.ir["jobs"][dag_hash] = self.__init_job(node.spec)
# Generate IR from the configs
def generate_ir(self):
"""Generate the IR from the Spack CI configurations."""
jobs = self.ir["jobs"]
# Implicit job defaults
defaults = [
{
"build-job": {
"script": [
"cd {env_dir}",
"spack env activate --without-view .",
"spack ci rebuild",
]
}
},
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
]
# Job overrides
overrides = [
# Reindex script
{
"reindex-job": {
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
}
},
# Cleanup script
{
"cleanup-job": {
"script:": ["spack -d mirror destroy {mirror_prefix}/$CI_PIPELINE_ID"]
}
},
# Add signing job tags
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
# Remove reserved tags
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
]
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
for section in reversed(pipeline_gen):
name = self.__is_named(section)
has_submapping = "submapping" in section
has_dynmapping = "dynamic-mapping" in section
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
if name:
remove_job_name = self.__job_name(name, suffix="-remove")
merge_job_name = self.__job_name(name)
do_remove = remove_job_name in section
do_merge = merge_job_name in section
def _apply_section(dest, src):
if do_remove:
dest = spack.config.remove_yaml(dest, src[remove_job_name])
if do_merge:
dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
if name == "build":
# Apply attributes to all build jobs
for _, job in jobs.items():
if job["spec"]:
_apply_section(job["attributes"], section)
elif name == "any":
# Apply section attributes too all jobs
for _, job in jobs.items():
_apply_section(job["attributes"], section)
else:
# Create a signing job if there is script and the job hasn't
# been initialized yet
if name == "signing" and name not in jobs:
if "signing-job" in section:
if "script" not in section["signing-job"]:
continue
else:
jobs[name] = self.__init_job("")
# Apply attributes to named job
_apply_section(jobs[name]["attributes"], section)
elif has_submapping:
# Apply section jobs with specs to match
for _, job in jobs.items():
if job["spec"]:
job["attributes"] = self.__apply_submapping(
job["attributes"], job["spec"], section
)
elif has_dynmapping:
mapping = section["dynamic-mapping"]
dynmap_name = mapping.get("name")
# Check if this section should be skipped
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
if dynmap_name and dynmap_skip:
if re.match(dynmap_skip, dynmap_name):
continue
# Get the endpoint
endpoint = mapping["endpoint"]
endpoint_url = urlparse(endpoint)
# Configure the request header
header = {"User-Agent": web_util.SPACK_USER_AGENT}
header.update(mapping.get("header", {}))
# Expand header environment variables
# ie. if tokens are passed
for value in header.values():
value = os.path.expandvars(value)
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
required = mapping.get("require", [])
allowed = mapping.get("allow", [])
ignored = mapping.get("ignore", [])
# required keys are implicitly allowed
allowed = sorted(set(allowed + required))
ignored = sorted(set(ignored))
required = sorted(set(required))
# Make sure required things are not also ignored
assert not any([ikey in required for ikey in ignored])
def job_query(job):
job_vars = job["attributes"]["variables"]
query = (
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
" {SPACK_JOB_SPEC_VARIANTS}"
" arch={SPACK_JOB_SPEC_ARCH}"
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
).format_map(job_vars)
return f"spec={quote(query)}"
for job in jobs.values():
if not job["spec"]:
continue
# Create request for this job
query = job_query(job)
request = Request(
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
)
try:
response = _dyn_mapping_urlopener(
request, verify_ssl=verify_ssl, timeout=timeout
)
except Exception as e:
# For now just ignore any errors from dynamic mapping and continue
# This is still experimental, and failures should not stop CI
# from running normally
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
tty.warn(f"{e}")
continue
config = json.load(codecs.getreader("utf-8")(response))
# Strip ignore keys
if ignored:
for key in ignored:
if key in config:
config.pop(key)
# Only keep allowed keys
clean_config = {}
if allowed:
for key in allowed:
if key in config:
clean_config[key] = config[key]
else:
clean_config = config
# Verify all of the required keys are present
if required:
missing_keys = []
for key in required:
if key not in clean_config.keys():
missing_keys.append(key)
if missing_keys:
tty.warn(f"Response missing required keys: {missing_keys}")
if clean_config:
job["attributes"] = spack.schema.merge_yaml(
job.get("attributes", {}), clean_config
)
for _, job in jobs.items():
if job["spec"]:
job["spec"] = job["spec"].name
return self.ir
class SpackCIError(spack.error.SpackError):
def __init__(self, msg):
super().__init__(msg)

View File

@@ -1,36 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# Holds all known formatters
"""Generators that support writing out pipelines for various CI platforms,
using a common pipeline graph definition.
"""
import spack.error
_generators = {}
def generator(name):
"""Decorator to register a pipeline generator method.
A generator method should take PipelineDag, SpackCIConfig, and
PipelineOptions arguments, and should produce a pipeline file.
"""
def _decorator(generate_method):
_generators[name] = generate_method
return generate_method
return _decorator
def get_generator(name):
try:
return _generators[name]
except KeyError:
raise UnknownGeneratorException(name)
class UnknownGeneratorException(spack.error.SpackError):
def __init__(self, generator_name):
super().__init__(f"No registered generator for {generator_name}")

View File

@@ -1,416 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import os
import shutil
from typing import List, Optional
import ruamel.yaml
import llnl.util.tty as tty
import spack
import spack.binary_distribution as bindist
import spack.config as cfg
import spack.mirrors.mirror
import spack.schema
import spack.spec
import spack.util.spack_yaml as syaml
from .common import (
SPACK_RESERVED_TAGS,
PipelineDag,
PipelineOptions,
PipelineType,
SpackCIConfig,
SpackCIError,
ensure_expected_target_path,
unpack_script,
update_env_scopes,
write_pipeline_manifest,
)
from .generator_registry import generator
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
JOB_RETRY_CONDITIONS = [
# "always",
"unknown_failure",
"script_failure",
"api_failure",
"stuck_or_timeout_failure",
"runner_system_failure",
"runner_unsupported",
"stale_schedule",
# "job_execution_timeout",
"archived_failure",
"unmet_prerequisites",
"scheduler_failure",
"data_integrity_failure",
]
JOB_NAME_FORMAT = "{name}{@version} {/hash}"
def _remove_reserved_tags(tags):
"""Convenience function to strip reserved tags from jobs"""
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def get_job_name(spec: spack.spec.Spec, build_group: Optional[str] = None) -> str:
"""Given a spec and possibly a build group, return the job name. If the
resulting name is longer than 255 characters, it will be truncated.
Arguments:
spec: Spec job will build
build_group: Name of build group this job belongs to (a CDash notion)
Returns: The job name
"""
job_name = spec.format(JOB_NAME_FORMAT)
if build_group:
job_name = f"{job_name} {build_group}"
return job_name[:255]
def maybe_generate_manifest(pipeline: PipelineDag, options: PipelineOptions, manifest_path):
# TODO: Consider including only hashes of rebuilt specs in the manifest,
# instead of full source and destination urls. Also, consider renaming
# the variable that controls whether or not to write the manifest from
# "SPACK_COPY_BUILDCACHE" to "SPACK_WRITE_PIPELINE_MANIFEST" or similar.
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
if spack_buildcache_copy:
buildcache_copy_src_prefix = options.buildcache_destination.fetch_url
buildcache_copy_dest_prefix = spack_buildcache_copy
if options.pipeline_type == PipelineType.COPY_ONLY:
manifest_specs = [s for s in options.env.all_specs() if not s.external]
else:
manifest_specs = [n.spec for _, n in pipeline.traverse_nodes(direction="children")]
write_pipeline_manifest(
manifest_specs, buildcache_copy_src_prefix, buildcache_copy_dest_prefix, manifest_path
)
@generator("gitlab")
def generate_gitlab_yaml(pipeline: PipelineDag, spack_ci: SpackCIConfig, options: PipelineOptions):
"""Given a pipeline graph, job attributes, and pipeline options,
write a pipeline that can be consumed by GitLab to the given output file.
Arguments:
pipeline: An already pruned graph of jobs representing all the specs to build
spack_ci: An object containing the configured attributes of all jobs in the pipeline
options: An object containing all the pipeline options gathered from yaml, env, etc...
"""
ci_project_dir = os.environ.get("CI_PROJECT_DIR") or os.getcwd()
generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
generate_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
artifacts_root = options.artifacts_root
if artifacts_root.startswith(ci_project_dir):
artifacts_root = os.path.relpath(artifacts_root, ci_project_dir)
pipeline_artifacts_dir = os.path.join(ci_project_dir, artifacts_root)
output_file = options.output_file
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
else:
output_file_path = os.path.abspath(output_file)
gen_ci_dir = os.path.dirname(output_file_path)
if not os.path.exists(gen_ci_dir):
os.makedirs(gen_ci_dir)
spack_ci_ir = spack_ci.generate_ir()
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
# Now that we've added the mirrors we know about, they should be properly
# reflected in the environment manifest file, so copy that into the
# concrete environment directory, along with the spack.lock file.
if not os.path.exists(concrete_env_dir):
os.makedirs(concrete_env_dir)
shutil.copyfile(options.env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
shutil.copyfile(options.env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
update_env_scopes(
options.env,
[
os.path.relpath(s.path, concrete_env_dir)
for s in cfg.scopes().values()
if not s.writable
and isinstance(s, (cfg.DirectoryConfigScope))
and os.path.exists(s.path)
],
os.path.join(concrete_env_dir, "spack.yaml"),
# Here transforming windows paths is only required in the special case
# of copy_only_pipelines, a unique scenario where the generate job and
# child pipelines are run on different platforms. To make this compatible
# w/ Windows, we cannot write Windows style path separators that will be
# consumed on by the Posix copy job runner.
#
# TODO (johnwparent): Refactor config + cli read/write to deal only in
# posix style paths
transform_windows_paths=(options.pipeline_type == PipelineType.COPY_ONLY),
)
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
# We communicate relative paths to the downstream jobs to avoid issues in
# situations where the CI_PROJECT_DIR varies between the pipeline
# generation job and the rebuild jobs. This can happen when gitlab
# checks out the project into a runner-specific directory, for example,
# and different runners are picked for generate and rebuild jobs.
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", rel_concrete_env_dir)
output_object = {}
job_id = 0
stage_id = 0
stages: List[List] = []
stage_names = []
max_length_needs = 0
max_needs_job = ""
if not options.pipeline_type == PipelineType.COPY_ONLY:
for level, node in pipeline.traverse_nodes(direction="parents"):
stage_id = level
if len(stages) == stage_id:
stages.append([])
stages[stage_id].append(node.spec)
stage_name = f"stage-{level}"
if stage_name not in stage_names:
stage_names.append(stage_name)
release_spec = node.spec
release_spec_dag_hash = release_spec.dag_hash()
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
if not job_object:
tty.warn(f"No match found for {release_spec}, skipping it")
continue
if options.pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
if options.pipeline_type == PipelineType.PROTECTED_BRANCH:
job_object["tags"].extend(["protected"])
elif options.pipeline_type == PipelineType.PULL_REQUEST:
job_object["tags"].extend(["public"])
if "script" not in job_object:
raise AttributeError
job_object["script"] = unpack_script(job_object["script"], op=main_script_replacements)
if "before_script" in job_object:
job_object["before_script"] = unpack_script(job_object["before_script"])
if "after_script" in job_object:
job_object["after_script"] = unpack_script(job_object["after_script"])
build_group = options.cdash_handler.build_group if options.cdash_handler else None
job_name = get_job_name(release_spec, build_group)
dep_nodes = pipeline.get_dependencies(node)
job_object["needs"] = [
{"job": get_job_name(dep_node.spec, build_group), "artifacts": False}
for dep_node in dep_nodes
]
job_object["needs"].append(
{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}
)
job_vars = job_object["variables"]
# Let downstream jobs know whether the spec needed rebuilding, regardless
# whether DAG pruning was enabled or not.
already_built = bindist.get_mirrors_for_spec(spec=release_spec, index_only=True)
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = "False" if already_built else "True"
if options.cdash_handler:
build_name = options.cdash_handler.build_name(release_spec)
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
build_stamp = options.cdash_handler.build_stamp
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
job_object["artifacts"] = spack.schema.merge_yaml(
job_object.get("artifacts", {}),
{
"when": "always",
"paths": [
rel_job_log_dir,
rel_job_repro_dir,
rel_job_test_dir,
rel_user_artifacts_dir,
],
},
)
job_object["stage"] = stage_name
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
job_object["interruptible"] = True
length_needs = len(job_object["needs"])
if length_needs > max_length_needs:
max_length_needs = length_needs
max_needs_job = job_name
output_object[job_name] = job_object
job_id += 1
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
if job_id > 0:
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
service_job_retries = {
"max": 2,
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
}
# In some cases, pipeline generation should write a manifest. Currently
# the only purpose is to specify a list of sources and destinations for
# everything that should be copied.
distinguish_stack = options.stack_name if options.stack_name else "rebuilt"
manifest_path = os.path.join(
pipeline_artifacts_dir, "specs_to_copy", f"copy_{distinguish_stack}_specs.json"
)
maybe_generate_manifest(pipeline, options, manifest_path)
if options.pipeline_type == PipelineType.COPY_ONLY:
stage_names.append("copy")
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
sync_job["stage"] = "copy"
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}]
if "variables" not in sync_job:
sync_job["variables"] = {}
sync_job["variables"][
"SPACK_COPY_ONLY_DESTINATION"
] = options.buildcache_destination.fetch_url
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
if "buildcache-source" not in pipeline_mirrors:
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
sync_job["dependencies"] = []
output_object["copy"] = sync_job
job_id += 1
if job_id > 0:
if (
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
and options.pipeline_type == PipelineType.PROTECTED_BRANCH
):
# External signing: generate a job to check and sign binary pkgs
stage_names.append("stage-sign-pkgs")
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
signing_job["script"] = unpack_script(signing_job["script"])
signing_job["stage"] = "stage-sign-pkgs"
signing_job["when"] = "always"
signing_job["retry"] = {"max": 2, "when": ["always"]}
signing_job["interruptible"] = True
if "variables" not in signing_job:
signing_job["variables"] = {}
signing_job["variables"][
"SPACK_BUILDCACHE_DESTINATION"
] = options.buildcache_destination.push_url
signing_job["dependencies"] = []
output_object["sign-pkgs"] = signing_job
if options.rebuild_index:
# Add a final job to regenerate the index
stage_names.append("stage-rebuild-index")
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
final_job["stage"] = "stage-rebuild-index"
target_mirror = options.buildcache_destination.push_url
final_job["script"] = unpack_script(
final_job["script"],
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
)
final_job["when"] = "always"
final_job["retry"] = service_job_retries
final_job["interruptible"] = True
final_job["dependencies"] = []
output_object["rebuild-index"] = final_job
output_object["stages"] = stage_names
# Capture the version of Spack used to generate the pipeline, that can be
# passed to `git checkout` for version consistency. If we aren't in a Git
# repository, presume we are a Spack release and use the Git tag instead.
spack_version = spack.get_version()
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
rebuild_everything = not options.prune_up_to_date and not options.prune_untouched
output_object["variables"] = {
"SPACK_ARTIFACTS_ROOT": artifacts_root,
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
"SPACK_VERSION": spack_version,
"SPACK_CHECKOUT_VERSION": version_to_clone,
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
"SPACK_PIPELINE_TYPE": options.pipeline_type.name if options.pipeline_type else "None",
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(options.prune_up_to_date),
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": str(options.require_signing),
}
if options.stack_name:
output_object["variables"]["SPACK_CI_STACK_NAME"] = options.stack_name
output_vars = output_object["variables"]
for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val)
else:
# No jobs were generated
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
# If this job fails ignore the status and carry on
noop_job["retry"] = 0
noop_job["allow_failure"] = True
tty.debug("No specs to rebuild, generating no-op job")
output_object = {"no-specs-to-rebuild": noop_job}
# Ensure the child pipeline always runs
output_object["workflow"] = {"rules": [{"when": "always"}]}
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value
# Minimize yaml output size through use of anchors
syaml.anchorify(sorted_output)
with open(output_file, "w", encoding="utf-8") as f:
ruamel.yaml.YAML().dump(sorted_output, f)

View File

@@ -24,10 +24,10 @@
import spack.environment as ev
import spack.error
import spack.extensions
import spack.parser
import spack.paths
import spack.repo
import spack.spec
import spack.spec_parser
import spack.store
import spack.traverse as traverse
import spack.user_environment as uenv
@@ -163,12 +163,12 @@ def quote_kvp(string: str) -> str:
or ``name==``, and we assume the rest of the argument is the value. This covers the
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
"""
match = spack.spec_parser.SPLIT_KVP.match(string)
match = spack.parser.SPLIT_KVP.match(string)
if not match:
return string
key, delim, value = match.groups()
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
def parse_specs(
@@ -180,7 +180,7 @@ def parse_specs(
args = [args] if isinstance(args, str) else args
arg_string = " ".join([quote_kvp(arg) for arg in args])
specs = spack.spec_parser.parse(arg_string)
specs = spack.parser.parse(arg_string)
if not concretize:
return specs

View File

@@ -29,7 +29,7 @@
# Tarball to be downloaded if binary packages are requested in a local mirror
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
#: Subdirectory where to create the mirror
LOCAL_MIRROR_DIR = "bootstrap_cache"
@@ -51,9 +51,9 @@
},
}
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/clingo.json"
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/gnupg.json"
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/patchelf.json"
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
# Metadata for a generated source mirror
SOURCE_METADATA = {
@@ -419,7 +419,7 @@ def write_metadata(subdir, metadata):
metadata_rel_dir = os.path.join("metadata", subdir)
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
with open(metadata_yaml, mode="w") as f:
spack.util.spack_yaml.dump(metadata, stream=f)
return os.path.dirname(metadata_yaml), metadata_rel_dir

View File

@@ -731,7 +731,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
deduped_manifest = {}
for manifest_path in manifest_file_list:
with open(manifest_path, encoding="utf-8") as fd:
with open(manifest_path) as fd:
manifest = json.loads(fd.read())
for spec_hash, copy_list in manifest.items():
# Last duplicate hash wins

View File

@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
if match:
new_versions.append((Version(match.group(1)), ver_line))
with open(filename, "r+", encoding="utf-8") as f:
with open(filename, "r+") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)

View File

@@ -6,6 +6,7 @@
import json
import os
import shutil
import warnings
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
@@ -16,7 +17,6 @@
import spack.ci as spack_ci
import spack.cmd
import spack.cmd.buildcache as buildcache
import spack.cmd.common.arguments
import spack.config as cfg
import spack.environment as ev
import spack.hash_types as ht
@@ -62,8 +62,22 @@ def setup_parser(subparser):
"path to the file where generated jobs file should be written. "
"default is .gitlab-ci.yml in the root of the repository",
)
prune_dag_group = generate.add_mutually_exclusive_group()
prune_dag_group.add_argument(
generate.add_argument(
"--optimize",
action="store_true",
default=False,
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
"run the generated document through a series of optimization passes "
"designed to reduce the size of the generated file",
)
generate.add_argument(
"--dependencies",
action="store_true",
default=False,
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
)
prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument(
"--prune-dag",
action="store_true",
dest="prune_dag",
@@ -71,7 +85,7 @@ def setup_parser(subparser):
help="skip up-to-date specs\n\n"
"do not generate jobs for specs that are up-to-date on the mirror",
)
prune_dag_group.add_argument(
prune_group.add_argument(
"--no-prune-dag",
action="store_false",
dest="prune_dag",
@@ -79,23 +93,6 @@ def setup_parser(subparser):
help="process up-to-date specs\n\n"
"generate jobs for specs even when they are up-to-date on the mirror",
)
prune_ext_group = generate.add_mutually_exclusive_group()
prune_ext_group.add_argument(
"--prune-externals",
action="store_true",
dest="prune_externals",
default=True,
help="skip external specs\n\n"
"do not generate jobs for specs that are marked as external",
)
prune_ext_group.add_argument(
"--no-prune-externals",
action="store_false",
dest="prune_externals",
default=True,
help="process external specs\n\n"
"generate jobs for specs even when they are marked as external",
)
generate.add_argument(
"--check-index-only",
action="store_true",
@@ -111,18 +108,14 @@ def setup_parser(subparser):
)
generate.add_argument(
"--artifacts-root",
default="jobs_scratch_dir",
default=None,
help="path to the root of the artifacts directory\n\n"
"The spack ci module assumes it will normally be run from within your project "
"directory, wherever that is checked out to run your ci. The artifacts root directory "
"should specifiy a name that can safely be used for artifacts within your project "
"directory.",
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
"this directory. their location will be passed to generated child jobs through the "
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
)
generate.set_defaults(func=ci_generate)
spack.cmd.common.arguments.add_concretizer_args(generate)
spack.cmd.common.arguments.add_common_arguments(generate, ["jobs"])
# Rebuild the buildcache index associated with the mirror in the
# active, gitlab-enabled environment.
index = subparsers.add_parser(
@@ -152,7 +145,6 @@ def setup_parser(subparser):
help="stop stand-alone tests after the first failure",
)
rebuild.set_defaults(func=ci_rebuild)
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
# Facilitate reproduction of a failed CI build job
reproduce = subparsers.add_parser(
@@ -195,8 +187,42 @@ def ci_generate(args):
before invoking this command. the value must be the CDash authorization token needed to create
a build group and register all generated jobs under it
"""
if args.optimize:
warnings.warn(
"The --optimize option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
if args.dependencies:
warnings.warn(
"The --dependencies option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
env = spack.cmd.require_active_env(cmd_name="ci generate")
spack_ci.generate_pipeline(env, args)
output_file = args.output_file
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
else:
output_file_path = os.path.abspath(output_file)
gen_ci_dir = os.path.dirname(output_file_path)
if not os.path.exists(gen_ci_dir):
os.makedirs(gen_ci_dir)
# Generate the jobs
spack_ci.generate_gitlab_ci_yaml(
env,
True,
output_file,
prune_dag=prune_dag,
check_index_only=index_only,
artifacts_root=artifacts_root,
)
def ci_reindex(args):
@@ -361,7 +387,7 @@ def ci_rebuild(args):
# Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
with open(job_spec_json_path, "w") as fd:
fd.write(job_spec.to_json(hash=ht.dag_hash))
# Write some other details to aid in reproduction into an artifact
@@ -371,7 +397,7 @@ def ci_rebuild(args):
"job_spec_json": job_spec_json_file,
"ci_project_dir": ci_project_dir,
}
with open(repro_file, "w", encoding="utf-8") as fd:
with open(repro_file, "w") as fd:
fd.write(json.dumps(repro_details))
# Write information about spack into an artifact in the repro dir
@@ -407,19 +433,14 @@ def ci_rebuild(args):
if not config["verify_ssl"]:
spack_cmd.append("-k")
install_args = [
f'--use-buildcache={spack_ci.common.win_quote("package:never,dependencies:only")}'
]
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries:
install_args.append("--no-check-signature")
if args.jobs:
install_args.append(f"-j{args.jobs}")
slash_hash = spack_ci.common.win_quote("/" + job_spec.dag_hash())
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
@@ -584,7 +605,7 @@ def ci_rebuild(args):
rebuild_timer.stop()
try:
with open("install_timers.json", "w", encoding="utf-8") as timelog:
with open("install_timers.json", "w") as timelog:
extra_attributes = {"name": ".ci-rebuild"}
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:

View File

@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
documented_commands: Set[str] = set()
for filename in args.rst_files:
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
for line in f:
match = re.match(r"\.\. _cmd-(spack-.*):", line)
if match:
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
if not args.header:
return
with open(args.header, encoding="utf-8") as header:
with open(args.header) as header:
out.write(header.read())
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
if args.update:
tty.msg(f"Updating file: {args.update}")
with open(args.update, "w", encoding="utf-8") as f:
with open(args.update, "w") as f:
prepend_header(args, f)
formatter(args, f)

View File

@@ -169,7 +169,7 @@ def installed_specs(args):
else:
packages = []
for file in args.specfiles:
with open(file, "r", encoding="utf-8") as f:
with open(file, "r") as f:
s = spack.spec.Spec.from_yaml(f)
packages.append(s.format())
return packages
@@ -529,7 +529,6 @@ def __call__(self, parser, namespace, values, option_string):
# the const from the constructor or a value from the CLI.
# Note that this is only called if the argument is actually
# specified on the command line.
spack.config.CONFIG.ensure_scope_ordering()
spack.config.set(self.config_path, self.const, scope="command_line")

View File

@@ -14,7 +14,6 @@
import spack.config
import spack.environment as ev
import spack.error
import spack.schema
import spack.schema.env
import spack.spec
import spack.store
@@ -567,7 +566,7 @@ def config_prefer_upstream(args):
# Simply write the config to the specified file.
existing = spack.config.get("packages", scope=scope)
new = spack.schema.merge_yaml(existing, pkgs)
new = spack.config.merge_yaml(existing, pkgs)
spack.config.set("packages", new, scope)
config_file = spack.config.CONFIG.get_config_filename(scope, section)

View File

@@ -110,7 +110,7 @@ def write(self, pkg_path):
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
package_template.format(
name=self.name,

View File

@@ -90,12 +90,16 @@ def compare_specs(a, b, to_string=False, color=None, ignore_packages=None):
# specs and to descend into dependency hashes so we include all facts.
a_facts = set(
shift(func)
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
for func in setup.spec_clauses(
a, body=True, expand_hashes=True, concrete_build_deps=True, node=True
)
if func.name == "attr"
)
b_facts = set(
shift(func)
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
for func in setup.spec_clauses(
b, body=True, expand_hashes=True, concrete_build_deps=True, node=True
)
if func.name == "attr"
)

View File

@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
path = repo.filename_for_package_name(name)
try:
with open(path, "r", encoding="utf-8"):
with open(path, "r"):
return path
except OSError as e:
if e.errno == errno.ENOENT:
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
# Try to open direct match.
try:
with open(file_path, "r", encoding="utf-8"):
with open(file_path, "r"):
return file_path
except OSError as e:
if e.errno != errno.ENOENT:

View File

@@ -865,7 +865,7 @@ def env_loads(args):
args.recurse_dependencies = False
loads_file = fs.join_path(env.path, "loads")
with open(loads_file, "w", encoding="utf-8") as f:
with open(loads_file, "w") as f:
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
spack.cmd.modules.loads(module_type, specs, args, f)
@@ -1053,7 +1053,7 @@ def env_depfile(args):
# Finally write to stdout/file.
if args.output:
with open(args.output, "w", encoding="utf-8") as f:
with open(args.output, "w") as f:
f.write(makefile)
else:
sys.stdout.write(makefile)

View File

@@ -291,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
tty.error("'spack install' created no log.")
else:
sys.stderr.write("Full build log:\n")
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
with open(e.pkg.log_path, errors="replace") as log:
shutil.copyfileobj(log, sys.stderr)
@@ -445,7 +445,7 @@ def concrete_specs_from_file(args):
"""Return the list of concrete specs read from files."""
result = []
for file in args.specfiles:
with open(file, "r", encoding="utf-8") as f:
with open(file, "r") as f:
if file.endswith("yaml") or file.endswith("yml"):
s = spack.spec.Spec.from_yaml(f)
else:

View File

@@ -191,7 +191,7 @@ def verify(args):
for relpath in _licensed_files(args):
path = os.path.join(args.root, relpath)
with open(path, encoding="utf-8") as f:
with open(path) as f:
lines = [line for line in f][:license_lines]
error = _check_license(lines, path)

View File

@@ -340,7 +340,7 @@ def list(parser, args):
return
tty.msg("Updating file: %s" % args.update)
with open(args.update, "w", encoding="utf-8") as f:
with open(args.update, "w") as f:
formatter(sorted_packages, f)
elif args.count:

View File

@@ -31,7 +31,7 @@ def line_to_rtf(str):
return str.replace("\n", "\\par")
contents = ""
with open(file_path, "r+", encoding="utf-8") as f:
with open(file_path, "r+") as f:
for line in f.readlines():
contents += line_to_rtf(line)
return rtf_header.format(contents)
@@ -93,7 +93,7 @@ def make_installer(parser, args):
rtf_spack_license = txt_to_rtf(spack_license)
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
with open(spack_license, "w", encoding="utf-8") as rtf_license:
with open(spack_license, "w") as rtf_license:
written = rtf_license.write(rtf_spack_license)
if written == 0:
raise RuntimeError("Failed to generate properly formatted license file")

View File

@@ -468,7 +468,7 @@ def specs_from_text_file(filename, concretize=False):
concretize (bool): if True concretize the specs before returning
the list.
"""
with open(filename, "r", encoding="utf-8") as f:
with open(filename, "r") as f:
specs_in_file = f.readlines()
specs_in_file = [s.strip() for s in specs_in_file]
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)

View File

@@ -150,7 +150,7 @@ def pkg_source(args):
content = ph.canonical_source(spec)
else:
message = "Source for %s:" % filename
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
content = f.read()
if sys.stdout.isatty():

View File

@@ -94,7 +94,7 @@ def ipython_interpreter(args):
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with open(startup_file, encoding="utf-8") as startup:
with open(startup_file) as startup:
exec(startup.read())
# IPython can also support running a script OR command, not both
@@ -126,7 +126,7 @@ def python_interpreter(args):
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with open(startup_file, encoding="utf-8") as startup:
with open(startup_file) as startup:
console.runsource(startup.read(), startup_file, "exec")
if args.python_command:
propagate_exceptions_from(console)

View File

@@ -19,48 +19,11 @@
level = "long"
class StageFilter:
"""
Encapsulation of reasons to skip staging
"""
def __init__(self, exclusions, skip_installed):
"""
:param exclusions: A list of specs to skip if satisfied.
:param skip_installed: A boolean indicating whether to skip already installed specs.
"""
self.exclusions = exclusions
self.skip_installed = skip_installed
def __call__(self, spec):
"""filter action, true means spec should be filtered"""
if spec.external:
return True
if self.skip_installed and spec.installed:
return True
if any(spec.satisfies(exclude) for exclude in self.exclusions):
return True
return False
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
subparser.add_argument(
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
)
subparser.add_argument(
"-e",
"--exclude",
action="append",
default=[],
help="exclude packages that satisfy the specified specs",
)
subparser.add_argument(
"-s", "--skip-installed", action="store_true", help="dont restage already installed specs"
)
arguments.add_concretizer_args(subparser)
@@ -68,14 +31,11 @@ def stage(parser, args):
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
exclusion_specs = spack.cmd.parse_specs(args.exclude, concretize=False)
filter = StageFilter(exclusion_specs, args.skip_installed)
if not args.specs:
env = ev.active_environment()
if not env:
tty.die("`spack stage` requires a spec or an active environment")
return _stage_env(env, filter)
return _stage_env(env)
specs = spack.cmd.parse_specs(args.specs, concretize=False)
@@ -89,11 +49,6 @@ def stage(parser, args):
specs = spack.cmd.matching_specs_from_env(specs)
for spec in specs:
spec = spack.cmd.matching_spec_from_env(spec)
if filter(spec):
continue
pkg = spec.package
if custom_path:
@@ -102,13 +57,9 @@ def stage(parser, args):
_stage(pkg)
def _stage_env(env: ev.Environment, filter):
def _stage_env(env: ev.Environment):
tty.msg(f"Staging specs from environment {env.name}")
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
if filter(spec):
continue
_stage(spec.package)

View File

@@ -415,8 +415,8 @@ def _run_import_check(
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
try:
with open(file, "r", encoding="utf-8") as f:
contents = f.read()
with open(file, "r") as f:
contents = open(file, "r").read()
parsed = ast.parse(contents)
except Exception:
exit_code = 1
@@ -448,7 +448,7 @@ def _run_import_check(
if not fix or not to_add and not to_remove:
continue
with open(file, "r", encoding="utf-8") as f:
with open(file, "r") as f:
lines = f.readlines()
if to_add:
@@ -468,7 +468,7 @@ def _run_import_check(
for statement in to_remove:
new_contents = new_contents.replace(f"{statement}\n", "")
with open(file, "w", encoding="utf-8") as f:
with open(file, "w") as f:
f.write(new_contents)
return exit_code

View File

@@ -346,7 +346,7 @@ def _report_suite_results(test_suite, args, constraints):
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
results = {}
with open(test_suite.results_file, "r", encoding="utf-8") as f:
with open(test_suite.results_file, "r") as f:
for line in f:
pkg_id, status = line.split()
results[pkg_id] = status
@@ -371,7 +371,7 @@ def _report_suite_results(test_suite, args, constraints):
spec = test_specs[pkg_id]
log_file = test_suite.log_file_for_spec(spec)
if os.path.isfile(log_file):
with open(log_file, "r", encoding="utf-8") as f:
with open(log_file, "r") as f:
msg += "\n{0}".format("".join(f.readlines()))
tty.msg(msg)

View File

@@ -192,7 +192,7 @@ def view(parser, args):
if args.action in actions_link and args.projection_file:
# argparse confirms file exists
with open(args.projection_file, "r", encoding="utf-8") as f:
with open(args.projection_file, "r") as f:
projections_data = s_yaml.load(f)
validate(projections_data, spack.schema.projections.schema)
ordered_projections = projections_data["projections"]

View File

@@ -469,7 +469,7 @@ def _compile_dummy_c_source(self) -> Optional[str]:
fout = os.path.join(tmpdir, "output")
fin = os.path.join(tmpdir, f"main.{ext}")
with open(fin, "w", encoding="utf-8") as csource:
with open(fin, "w") as csource:
csource.write(
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
)

View File

@@ -179,7 +179,7 @@ def _write_section(self, section: str) -> None:
try:
filesystem.mkdirp(self.path)
with open(filename, "w", encoding="utf-8") as f:
with open(filename, "w") as f:
syaml.dump_config(data, stream=f, default_flow_style=False)
except (syaml.SpackYAMLError, OSError) as e:
raise ConfigFileError(f"cannot write to '{filename}'") from e
@@ -314,7 +314,7 @@ def _write_section(self, section: str) -> None:
filesystem.mkdirp(parent)
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
with open(tmp, "w", encoding="utf-8") as f:
with open(tmp, "w") as f:
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
filesystem.rename(tmp, self.path)
@@ -431,19 +431,6 @@ def ensure_unwrapped(self) -> "Configuration":
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
return self
def highest(self) -> ConfigScope:
"""Scope with highest precedence"""
return next(reversed(self.scopes.values())) # type: ignore
@_config_mutator
def ensure_scope_ordering(self):
"""Ensure that scope order matches documented precedent"""
# FIXME: We also need to consider that custom configurations and other orderings
# may not be preserved correctly
if "command_line" in self.scopes:
# TODO (when dropping python 3.6): self.scopes.move_to_end
self.scopes["command_line"] = self.remove_scope("command_line")
@_config_mutator
def push_scope(self, scope: ConfigScope) -> None:
"""Add a higher precedence scope to the Configuration."""
@@ -632,7 +619,7 @@ def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfig
if changed:
self.format_updates[section].append(scope)
merged_section = spack.schema.merge_yaml(merged_section, data)
merged_section = merge_yaml(merged_section, data)
# no config files -- empty config.
if section not in merged_section:
@@ -693,7 +680,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
while len(parts) > 1:
key = parts.pop(0)
if spack.schema.override(key):
if _override(key):
new = type(data[key])()
del data[key]
else:
@@ -706,7 +693,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
data[key] = new
data = new
if spack.schema.override(parts[0]):
if _override(parts[0]):
data.pop(parts[0], None)
# update new value
@@ -803,6 +790,30 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
return config_paths
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
import spack.environment.environment as env # circular import
for i, path in enumerate(command_line_scopes):
name = f"cmd_scope_{i}"
if env.exists(path): # managed environment
manifest = env.EnvironmentManifestFile(env.root(path))
elif env.is_env_dir(path): # anonymous environment
manifest = env.EnvironmentManifestFile(path)
elif os.path.isdir(path): # directory with config files
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
_add_platform_scope(cfg, name, path, writable=False)
continue
else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
for scope in manifest.env_config_scopes:
scope.name = f"{name}:{scope.name}"
scope.writable = False
cfg.push_scope(scope)
def create() -> Configuration:
"""Singleton Configuration instance.
@@ -883,7 +894,7 @@ def add_from_file(filename: str, scope: Optional[str] = None) -> None:
value = data[section]
existing = get(section, scope=scope)
new = spack.schema.merge_yaml(existing, value)
new = merge_yaml(existing, value)
# We cannot call config.set directly (set is a type)
CONFIG.set(section, new, scope)
@@ -935,7 +946,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
value: List[str] = [value] # type: ignore[no-redef]
# merge value into existing
new = spack.schema.merge_yaml(existing, value)
new = merge_yaml(existing, value)
CONFIG.set(path, new, scope)
@@ -1082,7 +1093,7 @@ def read_config_file(
# schema when it's not necessary) while allowing us to validate against a
# known schema when the top-level key could be incorrect.
try:
with open(path, encoding="utf-8") as f:
with open(path) as f:
tty.debug(f"Reading config from file {path}")
data = syaml.load_config(f)
@@ -1109,6 +1120,44 @@ def read_config_file(
raise ConfigFileError(str(e)) from e
def _override(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
and if they do, their values completely replace lower-precedence
configs instead of merging into them.
"""
return hasattr(string, "override") and string.override
def _append(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values append lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : append lists.
"""
return getattr(string, "append", False)
def _prepend(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values prepend lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : prepend lists. (default behavior)
"""
return getattr(string, "prepend", False)
def _mark_internal(data, name):
"""Add a simple name mark to raw YAML/JSON data.
@@ -1211,7 +1260,7 @@ def they_are(t):
unmerge = sk in dest
old_dest_value = dest.pop(sk, None)
if unmerge and not spack.schema.override(sk):
if unmerge and not _override(sk):
dest[sk] = remove_yaml(old_dest_value, sv)
return dest
@@ -1221,6 +1270,81 @@ def they_are(t):
return dest
def merge_yaml(dest, source, prepend=False, append=False):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = merge_yaml(dest, source)
In the result, elements from lists from ``source`` will appear before
elements of lists from ``dest``. Likewise, when iterating over keys
or items in merged ``OrderedDict`` objects, keys from ``source`` will
appear before keys from ``dest``.
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
`+:` will extend the default prepend merge strategy to include string concatenation
`-:` will change the merge strategy to append, it also includes string concatentation
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
# If source is None, overwrite with source.
if source is None:
return None
# Source list is prepended (for precedence)
if they_are(list):
if append:
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source] + source
else:
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
# save dest keys to reinsert later -- this ensures that source items
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).
merge = sk in dest
old_dest_value = dest.pop(sk, None)
if merge and not _override(sk):
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
else:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.deepcopy(sv)
# reinsert dest keys so they are last in the result
for dk in dest_keys:
dest[dk] = dest.pop(dk)
return dest
elif they_are(str):
# Concatenate strings in prepend mode
if prepend:
return source + dest
elif append:
return dest + source
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return copy.copy(source)
class ConfigPath:
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
unquoted_string = "[^:'\"]+"

View File

@@ -33,7 +33,7 @@ def validate(configuration_file):
"""
import jsonschema
with open(configuration_file, encoding="utf-8") as f:
with open(configuration_file) as f:
config = syaml.load(f)
# Ensure we have a "container" attribute with sensible defaults set

View File

@@ -27,7 +27,7 @@ def data():
if not _data:
json_dir = os.path.abspath(os.path.dirname(__file__))
json_file = os.path.join(json_dir, "images.json")
with open(json_file, encoding="utf-8") as f:
with open(json_file) as f:
_data = json.load(f)
return _data

View File

@@ -211,7 +211,7 @@ def entries_to_specs(entries):
def read(path, apply_updates):
decode_exception_type = json.decoder.JSONDecodeError
try:
with open(path, "r", encoding="utf-8") as json_file:
with open(path, "r") as json_file:
json_data = json.load(json_file)
jsonschema.validate(json_data, manifest_schema)

View File

@@ -760,7 +760,7 @@ def _read_from_file(self, filename):
Does not do any locking.
"""
try:
with open(filename, "r", encoding="utf-8") as f:
with open(filename, "r") as f:
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
fdata, _ = JSONDecoder().raw_decode(f.read())
except Exception as e:
@@ -1031,12 +1031,12 @@ def _write(self, type, value, traceback):
# Write a temporary database file them move it into place
try:
with open(temp_file, "w", encoding="utf-8") as f:
with open(temp_file, "w") as f:
self._write_to_file(f)
fs.rename(temp_file, self._index_path)
if _use_uuid:
with open(self._verifier_path, "w", encoding="utf-8") as f:
with open(self._verifier_path, "w") as f:
new_verifier = str(uuid.uuid4())
f.write(new_verifier)
self.last_seen_verifier = new_verifier
@@ -1053,7 +1053,7 @@ def _read(self):
current_verifier = ""
if _use_uuid:
try:
with open(self._verifier_path, "r", encoding="utf-8") as f:
with open(self._verifier_path, "r") as f:
current_verifier = f.read()
except BaseException:
pass

View File

@@ -6,8 +6,6 @@
from typing import Iterable, List, Tuple, Union
from typing_extensions import Literal
#: Type hint for the low-level dependency input (enum.Flag is too slow)
DepFlag = int
@@ -15,7 +13,7 @@
DepTypes = Union[str, List[str], Tuple[str, ...]]
#: Individual dependency types
DepType = Literal["build", "link", "run", "test"]
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
# the order (link, run, build, test) when depending on the same package multiple times,

View File

@@ -27,7 +27,6 @@
import spack.config
import spack.error
import spack.operating_systems.windows_os as winOs
import spack.schema
import spack.spec
import spack.util.environment
import spack.util.spack_yaml
@@ -227,7 +226,7 @@ def update_configuration(
pkg_to_cfg[package_name] = pkg_config
pkgs_cfg = spack.config.get("packages", scope=scope)
pkgs_cfg = spack.schema.merge_yaml(pkgs_cfg, pkg_to_cfg)
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set("packages", pkgs_cfg, scope=scope)
return all_new_specs
@@ -247,7 +246,7 @@ def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -
# Update the provided scope
spack.config.set(
"packages",
spack.schema.merge_yaml(spack.config.get("packages", scope=scope), new_config),
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
scope=scope,
)

View File

@@ -198,6 +198,6 @@ def _detection_tests_yaml(
) -> Tuple[pathlib.Path, Dict[str, Any]]:
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
detection_tests_yaml = pkg_dir / "detection_test.yaml"
with open(str(detection_tests_yaml), encoding="utf-8") as f:
with open(str(detection_tests_yaml)) as f:
content = spack_yaml.load(f)
return detection_tests_yaml, content

View File

@@ -297,13 +297,6 @@ def _depends_on(
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
dependency = deps_by_name.get(spec.name)
if spec.dependencies():
raise DirectiveError(
f"the '^' sigil cannot be used in 'depends_on' directives. Please reformulate "
f"the directive below as multiple directives:\n\n"
f'\tdepends_on("{spec}", when="{when_spec}")\n'
)
if not dependency:
dependency = Dependency(pkg, spec, depflag=depflag)
deps_by_name[spec.name] = dependency

View File

@@ -141,7 +141,7 @@ def relative_path_for_spec(self, spec):
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, "w", encoding="utf-8") as f:
with open(path, "w") as f:
# The hash of the projection is the DAG hash which contains
# the full provenance, so it's availabe if we want it later
spec.to_json(f, hash=ht.dag_hash)
@@ -153,13 +153,13 @@ def write_host_environment(self, spec):
"""
env_file = self.env_metadata_path(spec)
environ = spack.spec.get_host_environment_metadata()
with open(env_file, "w", encoding="utf-8") as fd:
with open(env_file, "w") as fd:
sjson.dump(environ, fd)
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
try:
with open(path, encoding="utf-8") as f:
with open(path) as f:
extension = os.path.splitext(path)[-1].lower()
if extension == ".json":
spec = spack.spec.Spec.from_json(f)

View File

@@ -482,7 +482,6 @@
display_specs,
environment_dir_from_name,
environment_from_name_or_dir,
environment_path_scopes,
exists,
initialize_environment_dir,
installed_specs,
@@ -519,7 +518,6 @@
"display_specs",
"environment_dir_from_name",
"environment_from_name_or_dir",
"environment_path_scopes",
"exists",
"initialize_environment_dir",
"installed_specs",

View File

@@ -27,6 +27,7 @@
import spack.concretize
import spack.config
import spack.deptypes as dt
import spack.environment
import spack.error
import spack.filesystem_view as fsv
import spack.hash_types as ht
@@ -162,7 +163,7 @@ def installed_specs():
Returns the specs of packages installed in the active environment or None
if no packages are installed.
"""
env = active_environment()
env = spack.environment.active_environment()
hashes = env.all_hashes() if env else None
return spack.store.STORE.db.query(hashes=hashes)
@@ -971,7 +972,7 @@ def _read(self):
self._construct_state_from_manifest()
if os.path.exists(self.lock_path):
with open(self.lock_path, encoding="utf-8") as f:
with open(self.lock_path) as f:
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
if read_lock_version == 1:
@@ -1053,7 +1054,7 @@ def _process_concrete_includes(self):
if self.included_concrete_envs:
if os.path.exists(self.lock_path):
with open(self.lock_path, encoding="utf-8") as f:
with open(self.lock_path) as f:
data = self._read_lockfile(f)
if included_concrete_name in data:
@@ -2332,7 +2333,7 @@ def write(self, regenerate: bool = True) -> None:
self.new_specs.clear()
def update_lockfile(self) -> None:
with fs.write_tmp_and_move(self.lock_path, encoding="utf-8") as f:
with fs.write_tmp_and_move(self.lock_path) as f:
sjson.dump(self._to_lockfile_dict(), stream=f)
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
@@ -2507,7 +2508,7 @@ def update_yaml(manifest, backup_file):
AssertionError: in case anything goes wrong during the update
"""
# Check if the environment needs update
with open(manifest, encoding="utf-8") as f:
with open(manifest) as f:
data = syaml.load(f)
top_level_key = _top_level_key(data)
@@ -2525,7 +2526,7 @@ def update_yaml(manifest, backup_file):
assert not os.path.exists(backup_file), msg.format(backup_file)
shutil.copy(manifest, backup_file)
with open(manifest, "w", encoding="utf-8") as f:
with open(manifest, "w") as f:
syaml.dump_config(data, f)
return True
@@ -2553,7 +2554,7 @@ def is_latest_format(manifest):
manifest (str): manifest file to be analyzed
"""
try:
with open(manifest, encoding="utf-8") as f:
with open(manifest) as f:
data = syaml.load(f)
except (OSError, IOError):
return True
@@ -2655,7 +2656,7 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
# TBD: Should this be the abspath?
manifest_dir = pathlib.Path(manifest_dir)
lockfile = manifest_dir / lockfile_name
with lockfile.open("r", encoding="utf-8") as f:
with lockfile.open("r") as f:
data = sjson.load(f)
user_specs = data["roots"]
@@ -2682,7 +2683,7 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
raise SpackEnvironmentError(msg)
with self.manifest_file.open(encoding="utf-8") as f:
with self.manifest_file.open() as f:
self.yaml_content = _read_yaml(f)
self.changed = False
@@ -3043,13 +3044,11 @@ def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.push_scope(scope)
spack.config.CONFIG.ensure_scope_ordering()
def deactivate_config_scope(self) -> None:
"""Remove any of the manifest's scopes from the global config path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name)
spack.config.CONFIG.ensure_scope_ordering()
@contextlib.contextmanager
def use_config(self):
@@ -3060,29 +3059,6 @@ def use_config(self):
self.deactivate_config_scope()
def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
"""Retrieve the suitably named environment path scopes
Arguments:
name: configuration scope name
path: path to configuration file(s)
Returns: list of environment scopes, if any, or None
"""
if exists(path): # managed environment
manifest = EnvironmentManifestFile(root(path))
elif is_env_dir(path): # anonymous environment
manifest = EnvironmentManifestFile(path)
else:
return None
for scope in manifest.env_config_scopes:
scope.name = f"{name}:{scope.name}"
scope.writable = False
return manifest.env_config_scopes
class SpackEnvironmentError(spack.error.SpackError):
"""Superclass for all errors to do with Spack environments."""

View File

@@ -12,8 +12,6 @@
import sys
from typing import Callable, Dict, Optional
from typing_extensions import Literal
from llnl.string import comma_or
from llnl.util import tty
from llnl.util.filesystem import (
@@ -111,9 +109,6 @@ def view_copy(
tty.debug(f"Can't change the permissions for {dst}")
#: Type alias for link types
LinkType = Literal["hardlink", "hard", "copy", "relocate", "add", "symlink", "soft"]
#: supported string values for `link_type` in an env, mapped to canonical values
_LINK_TYPES = {
"hardlink": "hardlink",
@@ -128,7 +123,7 @@ def view_copy(
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
def canonicalize_link_type(link_type: LinkType) -> str:
def canonicalize_link_type(link_type: str) -> str:
"""Return canonical"""
canonical = _LINK_TYPES.get(link_type)
if not canonical:
@@ -138,7 +133,7 @@ def canonicalize_link_type(link_type: LinkType) -> str:
return canonical
def function_for_link_type(link_type: LinkType) -> LinkCallbackType:
def function_for_link_type(link_type: str) -> LinkCallbackType:
link_type = canonicalize_link_type(link_type)
if link_type == "hardlink":
return view_hardlink
@@ -147,7 +142,7 @@ def function_for_link_type(link_type: LinkType) -> LinkCallbackType:
elif link_type == "copy":
return view_copy
assert False, "invalid link type"
assert False, "invalid link type" # need mypy Literal values
class FilesystemView:
@@ -171,7 +166,7 @@ def __init__(
projections: Optional[Dict] = None,
ignore_conflicts: bool = False,
verbose: bool = False,
link_type: LinkType = "symlink",
link_type: str = "symlink",
):
"""
Initialize a filesystem view under the given `root` directory with
@@ -297,7 +292,7 @@ def __init__(
projections: Optional[Dict] = None,
ignore_conflicts: bool = False,
verbose: bool = False,
link_type: LinkType = "symlink",
link_type: str = "symlink",
):
super().__init__(
root,
@@ -331,12 +326,12 @@ def __init__(
def write_projections(self):
if self.projections:
mkdirp(os.path.dirname(self.projections_path))
with open(self.projections_path, "w", encoding="utf-8") as f:
with open(self.projections_path, "w") as f:
f.write(s_yaml.dump_config({"projections": self.projections}))
def read_projections(self):
if os.path.exists(self.projections_path):
with open(self.projections_path, "r", encoding="utf-8") as f:
with open(self.projections_path, "r") as f:
projections_data = s_yaml.load(f)
spack.config.validate(projections_data, spack.schema.projections.schema)
return projections_data["projections"]
@@ -434,7 +429,7 @@ def needs_file(spec, file):
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
)
try:
with open(manifest_file, "r", encoding="utf-8") as f:
with open(manifest_file, "r") as f:
manifest = s_json.load(f)
except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file.
@@ -838,7 +833,7 @@ def get_projection_for_spec(self, spec):
#####################
def get_spec_from_file(filename):
try:
with open(filename, "r", encoding="utf-8") as f:
with open(filename, "r") as f:
return spack.spec.Spec.from_yaml(f)
except IOError:
return None

View File

@@ -35,7 +35,6 @@ class _HookRunner:
"spack.hooks.drop_redundant_rpaths",
"spack.hooks.absolutify_elf_sonames",
"spack.hooks.permissions_setters",
"spack.hooks.resolve_shared_libraries",
# after all mutations to the install prefix, write metadata
"spack.hooks.write_install_manifest",
# after all metadata is written

View File

@@ -142,7 +142,7 @@ def write_license_file(pkg, license_path):
os.makedirs(os.path.dirname(license_path))
# Output
with open(license_path, "w", encoding="utf-8") as f:
with open(license_path, "w") as f:
for line in txt.splitlines():
f.write("{0}{1}\n".format(pkg.license_comment, line))
f.close()

View File

@@ -1,240 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import fnmatch
import io
import os
import re
from typing import Dict, List, Union
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
from llnl.util.lang import stable_partition
import spack.config
import spack.error
import spack.util.elf as elf
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
#: default search paths of the dynamic linker.
ALLOW_UNRESOLVED = [
# kernel
"linux-vdso.so.*",
"libselinux.so.*",
# musl libc
"ld-musl-*.so.*",
# glibc
"ld-linux*.so.*",
"ld64.so.*",
"libanl.so.*",
"libc.so.*",
"libdl.so.*",
"libm.so.*",
"libmemusage.so.*",
"libmvec.so.*",
"libnsl.so.*",
"libnss_compat.so.*",
"libnss_db.so.*",
"libnss_dns.so.*",
"libnss_files.so.*",
"libnss_hesiod.so.*",
"libpcprofile.so.*",
"libpthread.so.*",
"libresolv.so.*",
"librt.so.*",
"libSegFault.so.*",
"libthread_db.so.*",
"libutil.so.*",
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
"libasan.so.*",
"libatomic.so.*",
"libcc1.so.*",
"libgcc_s.so.*",
"libgfortran.so.*",
"libgomp.so.*",
"libitm.so.*",
"liblsan.so.*",
"libquadmath.so.*",
"libssp.so.*",
"libstdc++.so.*",
"libtsan.so.*",
"libubsan.so.*",
# systemd
"libudev.so.*",
# cuda driver
"libcuda.so.*",
]
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
return (
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
and parent.is_little_endian == child.is_little_endian
and parent.is_64_bit == child.is_64_bit
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
)
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
try:
with open(candidate_path, "rb") as g:
return is_compatible(current_elf, elf.parse_elf(g))
except (OSError, elf.ElfParsingError):
return False
class Problem:
def __init__(
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
) -> None:
self.resolved = resolved
self.unresolved = unresolved
self.relative_rpaths = relative_rpaths
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
self.problems: Dict[str, Problem] = {}
self._allow_unresolved_regex = re.compile(
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
)
def allow_unresolved(self, needed: bytes) -> bool:
try:
name = needed.decode("utf-8")
except UnicodeDecodeError:
return False
return bool(self._allow_unresolved_regex.match(name))
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# We work with byte strings for paths.
path = os.path.join(root, rel_path).encode("utf-8")
# For $ORIGIN interpolation: should not have trailing dir seperator.
origin = os.path.dirname(path)
# Retrieve the needed libs + rpaths.
try:
with open(path, "rb") as f:
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
except (OSError, elf.ElfParsingError):
# Not dealing with an invalid ELF file.
return
# If there's no needed libs all is good
if not parsed_elf.has_needed:
return
# Get the needed libs and rpaths (notice: byte strings)
# Don't force an encoding cause paths are just a bag of bytes.
needed_libs = parsed_elf.dt_needed_strs
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
# supported in general. Also remove empty paths.
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
# Do not allow relative rpaths (they are relative to the current working directory)
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
# If there's a / in the needed lib, it's opened directly, otherwise it needs
# a search.
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
# Do not allow relative paths in direct libs (they are relative to the current working
# directory)
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
resolved: Dict[bytes, bytes] = {}
for lib in search_libs:
if self.allow_unresolved(lib):
continue
for rpath in rpaths:
candidate = os.path.join(rpath, lib)
if candidate_matches(parsed_elf, candidate):
resolved[lib] = candidate
break
else:
unresolved.append(lib)
# Check if directly opened libs are compatible
for lib in direct_libs:
if candidate_matches(parsed_elf, lib):
resolved[lib] = lib
else:
unresolved.append(lib)
if unresolved or relative_rpaths:
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# There can be binaries in .spack/test which shouldn't be checked.
if rel_path == ".spack":
return False
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
return False
class CannotLocateSharedLibraries(spack.error.SpackError):
pass
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
try:
return byte_str.decode("utf-8")
except UnicodeDecodeError:
return byte_str
def post_install(spec, explicit):
"""Check whether shared libraries can be resolved in RPATHs."""
policy = spack.config.get("config:shared_linking:missing_library_policy", "ignore")
# Currently only supported for ELF files.
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
return
visitor = ResolveSharedElfLibDepsVisitor(
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
# All good?
if not visitor.problems:
return
# For now just list the issues (print it in ldd style, except we don't recurse)
output = io.StringIO()
output.write("not all executables and libraries can resolve their dependencies:\n")
for path, problem in visitor.problems.items():
output.write(path)
output.write("\n")
for needed, full_path in problem.resolved.items():
output.write(" ")
if needed == full_path:
output.write(maybe_decode(needed))
else:
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
output.write("\n")
for not_found in problem.unresolved:
output.write(f" {maybe_decode(not_found)} => not found\n")
for relative_rpath in problem.relative_rpaths:
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
message = output.getvalue().strip()
if policy == "error":
raise CannotLocateSharedLibraries(message)
tty.warn(message)

View File

@@ -81,7 +81,7 @@ def get_escaped_text_output(filename: str) -> List[str]:
Returns:
escaped text lines read from the file
"""
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
# Ensure special characters are escaped as needed
expected = f.read()
@@ -458,7 +458,7 @@ def write_tested_status(self):
elif self.counts[TestStatus.PASSED] > 0:
status = TestStatus.PASSED
with open(self.tested_file, "w", encoding="utf-8") as f:
with open(self.tested_file, "w") as f:
f.write(f"{status.value}\n")
@@ -502,7 +502,7 @@ def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbos
for i, entry in enumerate(stack):
filename, lineno, function, text = entry
if spack.repo.is_package_file(filename):
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
lines = f.readlines()
new_lineno = lineno - 2
text = lines[new_lineno]
@@ -822,7 +822,7 @@ def get_test_suite(name: str) -> Optional["TestSuite"]:
def write_test_suite_file(suite):
"""Write the test suite to its (JSON) lock file."""
with open(suite.stage.join(test_suite_filename), "w", encoding="utf-8") as f:
with open(suite.stage.join(test_suite_filename), "w") as f:
sjson.dump(suite.to_dict(), stream=f)
@@ -977,7 +977,7 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
status = TestStatus.NO_TESTS
return status
with open(tests_status_file, "r", encoding="utf-8") as f:
with open(tests_status_file, "r") as f:
value = (f.read()).strip("\n")
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
@@ -1179,7 +1179,7 @@ def from_file(filename):
BaseException: sjson.SpackJSONError if problem parsing the file
"""
try:
with open(filename, encoding="utf-8") as f:
with open(filename) as f:
data = sjson.load(f)
test_suite = TestSuite.from_dict(data)
content_hash = os.path.basename(os.path.dirname(filename))
@@ -1196,7 +1196,7 @@ def _add_msg_to_file(filename, msg):
filename (str): path to the file
msg (str): message to be appended to the file
"""
with open(filename, "a+", encoding="utf-8") as f:
with open(filename, "a+") as f:
f.write(f"{msg}\n")

View File

@@ -105,7 +105,7 @@ def __str__(self):
def _write_timer_json(pkg, timer, cache):
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
try:
with open(pkg.times_log_path, "w", encoding="utf-8") as timelog:
with open(pkg.times_log_path, "w") as timelog:
timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
@@ -692,7 +692,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
if errors.getvalue():
error_file = os.path.join(target_dir, "errors.txt")
fs.mkdirp(target_dir)
with open(error_file, "w", encoding="utf-8") as err:
with open(error_file, "w") as err:
err.write(errors.getvalue())
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
@@ -2405,7 +2405,7 @@ def _real_install(self) -> None:
# Save just the changes to the environment. This file can be
# safely installed, since it does not contain secret variables.
with open(pkg.env_mods_path, "w", encoding="utf-8") as env_mods_file:
with open(pkg.env_mods_path, "w") as env_mods_file:
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
env_mods_file.write(mods)
@@ -2414,7 +2414,7 @@ def _real_install(self) -> None:
configure_args = getattr(pkg, attr)()
configure_args = " ".join(configure_args)
with open(pkg.configure_args_path, "w", encoding="utf-8") as args_file:
with open(pkg.configure_args_path, "w") as args_file:
args_file.write(configure_args)
break

View File

@@ -48,6 +48,7 @@
import spack.util.debug
import spack.util.environment
import spack.util.lock
from spack.error import SpackError
#: names of profile statistics
stat_names = pstats.Stats.sort_arg_dict_default
@@ -857,33 +858,6 @@ def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]:
return cmd_name, cmd
def add_command_line_scopes(
cfg: spack.config.Configuration, command_line_scopes: List[str]
) -> None:
"""Add additional scopes from the --config-scope argument, either envs or dirs.
Args:
cfg: configuration instance
command_line_scopes: list of configuration scope paths
Raises:
spack.error.ConfigError: if the path is an invalid configuration scope
"""
for i, path in enumerate(command_line_scopes):
name = f"cmd_scope_{i}"
scopes = ev.environment_path_scopes(name, path)
if scopes is None:
if os.path.isdir(path): # directory with config files
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
spack.config._add_platform_scope(cfg, name, path, writable=False)
continue
else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
for scope in scopes:
cfg.push_scope(scope)
def _main(argv=None):
"""Logic for the main entry point for the Spack command.
@@ -952,7 +926,7 @@ def _main(argv=None):
# Push scopes from the command line last
if args.config_scopes:
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
setup_main_options(args)
@@ -1038,7 +1012,7 @@ def main(argv=None):
try:
return _main(argv)
except spack.error.SpackError as e:
except SpackError as e:
tty.debug(e)
e.die() # gracefully die on any SpackErrors

View File

@@ -48,7 +48,6 @@
import spack.error
import spack.paths
import spack.projections as proj
import spack.schema
import spack.schema.environment
import spack.spec
import spack.store
@@ -217,7 +216,7 @@ def root_path(name, module_set_name):
roots = spack.config.get(f"modules:{module_set_name}:roots", {})
# Merge config values into the defaults so we prefer configured values
roots = spack.schema.merge_yaml(defaults, roots)
roots = spack.config.merge_yaml(defaults, roots)
path = roots.get(name, os.path.join(spack.paths.share_path, name))
return spack.util.path.canonicalize_path(path)
@@ -228,7 +227,7 @@ def generate_module_index(root, modules, overwrite=False):
if overwrite or not os.path.exists(index_path):
entries = syaml.syaml_dict()
else:
with open(index_path, encoding="utf-8") as index_file:
with open(index_path) as index_file:
yaml_content = syaml.load(index_file)
entries = yaml_content["module_index"]
@@ -237,7 +236,7 @@ def generate_module_index(root, modules, overwrite=False):
entries[m.spec.dag_hash()] = entry
index = {"module_index": entries}
llnl.util.filesystem.mkdirp(root)
with open(index_path, "w", encoding="utf-8") as index_file:
with open(index_path, "w") as index_file:
syaml.dump(index, default_flow_style=False, stream=index_file)
@@ -257,7 +256,7 @@ def read_module_index(root):
index_path = os.path.join(root, "module-index.yaml")
if not os.path.exists(index_path):
return {}
with open(index_path, encoding="utf-8") as index_file:
with open(index_path) as index_file:
return _read_module_index(index_file)
@@ -606,7 +605,7 @@ def configure_options(self):
return msg
if os.path.exists(pkg.install_configure_args_path):
with open(pkg.install_configure_args_path, encoding="utf-8") as args_file:
with open(pkg.install_configure_args_path) as args_file:
return spack.util.path.padding_filter(args_file.read())
# Returning a false-like value makes the default templates skip
@@ -625,10 +624,10 @@ def environment_modifications(self):
"""List of environment modifications to be processed."""
# Modifications guessed by inspecting the spec prefix
prefix_inspections = syaml.syaml_dict()
spack.schema.merge_yaml(
spack.config.merge_yaml(
prefix_inspections, spack.config.get("modules:prefix_inspections", {})
)
spack.schema.merge_yaml(
spack.config.merge_yaml(
prefix_inspections,
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
)
@@ -901,7 +900,7 @@ def write(self, overwrite=False):
# Render the template
text = template.render(context)
# Write it to file
with open(self.layout.filename, "w", encoding="utf-8") as f:
with open(self.layout.filename, "w") as f:
f.write(text)
# Set the file permissions of the module to match that of the package
@@ -940,7 +939,7 @@ def update_module_hiddenness(self, remove=False):
if modulerc_exists:
# retrieve modulerc content
with open(modulerc_path, encoding="utf-8") as f:
with open(modulerc_path) as f:
content = f.readlines()
content = "".join(content).split("\n")
# remove last empty item if any
@@ -975,7 +974,7 @@ def update_module_hiddenness(self, remove=False):
elif content != self.modulerc_header:
# ensure file ends with a newline character
content.append("")
with open(modulerc_path, "w", encoding="utf-8") as f:
with open(modulerc_path, "w") as f:
f.write("\n".join(content))
def remove(self):

View File

@@ -7,6 +7,8 @@
import urllib.parse
from typing import Optional, Union
import spack.spec
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
# case sensitive though.
@@ -193,7 +195,7 @@ def __eq__(self, __value: object) -> bool:
)
def ensure_valid_tag(tag: str) -> str:
def _ensure_valid_tag(tag: str) -> str:
"""Ensure a tag is valid for an OCI registry."""
sanitized = re.sub(r"[^\w.-]", "_", tag)
if len(sanitized) > 128:
@@ -201,6 +203,20 @@ def ensure_valid_tag(tag: str) -> str:
return sanitized
def default_tag(spec: "spack.spec.Spec") -> str:
"""Return a valid, default image tag for a spec."""
return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
#: Default OCI index tag
default_index_tag = "index.spack"
def tag_is_spec(tag: str) -> bool:
"""Check if a tag is likely a Spec"""
return tag.endswith(".spack") and tag != default_index_tag
def default_config(architecture: str, os: str):
return {
"architecture": architecture,

View File

@@ -21,7 +21,7 @@
import spack.config
import spack.mirrors.mirror
import spack.tokenize
import spack.parser
import spack.util.web
from .image import ImageReference
@@ -57,7 +57,7 @@ def dispatch_open(fullurl, data=None, timeout=None):
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
class WwwAuthenticateTokens(spack.tokenize.TokenBase):
class TokenType(spack.parser.TokenBase):
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
TOKEN = rf"{tchar}+"
@@ -68,7 +68,9 @@ class WwwAuthenticateTokens(spack.tokenize.TokenBase):
ANY = r"."
WWW_AUTHENTICATE_TOKENIZER = spack.tokenize.Tokenizer(WwwAuthenticateTokens)
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
class State(Enum):
@@ -79,6 +81,18 @@ class State(Enum):
AUTH_PARAM_OR_SCHEME = auto()
def tokenize(input: str):
scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
for match in iter(scanner.match, None): # type: ignore[var-annotated]
yield spack.parser.Token(
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
match.group(), # type: ignore[attr-defined]
match.start(), # type: ignore[attr-defined]
match.end(), # type: ignore[attr-defined]
)
class Challenge:
__slots__ = ["scheme", "params"]
@@ -114,7 +128,7 @@ def parse_www_authenticate(input: str):
unquote = lambda s: _unquote(r"\1", s[1:-1])
mode: State = State.CHALLENGE
tokens = WWW_AUTHENTICATE_TOKENIZER.tokenize(input)
tokens = tokenize(input)
current_challenge = Challenge()
@@ -127,36 +141,36 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
return key, value
while True:
token: spack.tokenize.Token = next(tokens)
token: spack.parser.Token = next(tokens)
if mode == State.CHALLENGE:
if token.kind == WwwAuthenticateTokens.EOF:
if token.kind == TokenType.EOF:
raise ValueError(token)
elif token.kind == WwwAuthenticateTokens.TOKEN:
elif token.kind == TokenType.TOKEN:
current_challenge.scheme = token.value
mode = State.AUTH_PARAM_LIST_START
else:
raise ValueError(token)
elif mode == State.AUTH_PARAM_LIST_START:
if token.kind == WwwAuthenticateTokens.EOF:
if token.kind == TokenType.EOF:
challenges.append(current_challenge)
break
elif token.kind == WwwAuthenticateTokens.COMMA:
elif token.kind == TokenType.COMMA:
# Challenge without param list, followed by another challenge.
challenges.append(current_challenge)
current_challenge = Challenge()
mode = State.CHALLENGE
elif token.kind == WwwAuthenticateTokens.SPACE:
elif token.kind == TokenType.SPACE:
# A space means it must be followed by param list
mode = State.AUTH_PARAM
else:
raise ValueError(token)
elif mode == State.AUTH_PARAM:
if token.kind == WwwAuthenticateTokens.EOF:
if token.kind == TokenType.EOF:
raise ValueError(token)
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
elif token.kind == TokenType.AUTH_PARAM:
key, value = extract_auth_param(token.value)
current_challenge.params.append((key, value))
mode = State.NEXT_IN_LIST
@@ -164,22 +178,22 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
raise ValueError(token)
elif mode == State.NEXT_IN_LIST:
if token.kind == WwwAuthenticateTokens.EOF:
if token.kind == TokenType.EOF:
challenges.append(current_challenge)
break
elif token.kind == WwwAuthenticateTokens.COMMA:
elif token.kind == TokenType.COMMA:
mode = State.AUTH_PARAM_OR_SCHEME
else:
raise ValueError(token)
elif mode == State.AUTH_PARAM_OR_SCHEME:
if token.kind == WwwAuthenticateTokens.EOF:
if token.kind == TokenType.EOF:
raise ValueError(token)
elif token.kind == WwwAuthenticateTokens.TOKEN:
elif token.kind == TokenType.TOKEN:
challenges.append(current_challenge)
current_challenge = Challenge(token.value)
mode = State.AUTH_PARAM_LIST_START
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
elif token.kind == TokenType.AUTH_PARAM:
key, value = extract_auth_param(token.value)
current_challenge.params.append((key, value))
mode = State.NEXT_IN_LIST

View File

@@ -103,7 +103,7 @@
from spack.phase_callbacks import run_after, run_before
from spack.spec import InvalidSpecDetected, Spec
from spack.util.executable import *
from spack.util.filesystem import fix_darwin_install_name
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver

View File

@@ -24,10 +24,9 @@
import time
import traceback
import typing
import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
@@ -60,7 +59,6 @@
from spack.solver.version_order import concretization_version_order
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
from spack.util.package_hash import package_hash
from spack.util.typing import SupportsRichComparison
from spack.version import GitVersion, StandardVersion
FLAG_HANDLER_RETURN_TYPE = Tuple[
@@ -88,6 +86,32 @@
spack_times_log = "install_times.json"
def deprecated_version(pkg: "PackageBase", version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.
Arguments:
pkg: The package whose version is to be checked.
version: The version being checked
"""
if not isinstance(version, StandardVersion):
version = StandardVersion.from_string(version)
details = pkg.versions.get(version)
return details is not None and details.get("deprecated", False)
def preferred_version(pkg: "PackageBase"):
"""
Returns a sorted list of the preferred versions of the package.
Arguments:
pkg: The package whose versions are to be assessed.
"""
version, _ = max(pkg.versions.items(), key=concretization_version_order)
return version
class WindowsRPath:
"""Collection of functionality surrounding Windows RPATH specific features
@@ -392,77 +416,59 @@ def remove_files_from_view(self, view, merge_map):
Pb = TypeVar("Pb", bound="PackageBase")
# Some typedefs for dealing with when-indexed dictionaries
#
# Many of the dictionaries on PackageBase are of the form:
# { Spec: { K: V } }
#
# K might be a variant name, a version, etc. V is a definition of some Spack object.
# The methods below transform these types of dictionaries.
K = TypeVar("K", bound=SupportsRichComparison)
V = TypeVar("V")
WhenDict = Dict[spack.spec.Spec, Dict[str, Any]]
NameValuesDict = Dict[str, List[Any]]
NameWhenDict = Dict[str, Dict[spack.spec.Spec, List[Any]]]
def _by_subkey(
when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], when: bool = False
) -> Dict[K, Union[List[V], Dict[spack.spec.Spec, List[V]]]]:
"""Convert a dict of dicts keyed by when/subkey into a dict of lists keyed by subkey.
def _by_name(
when_indexed_dictionary: WhenDict, when: bool = False
) -> Union[NameValuesDict, NameWhenDict]:
"""Convert a dict of dicts keyed by when/name into a dict of lists keyed by name.
Optional Arguments:
when: if ``True``, don't discared the ``when`` specs; return a 2-level dictionary
keyed by subkey and when spec.
keyed by name and when spec.
"""
# very hard to define this type to be conditional on `when`
all_by_subkey: Dict[K, Any] = {}
all_by_name: Dict[str, Any] = {}
for when_spec, by_key in when_indexed_dictionary.items():
for key, value in by_key.items():
for when_spec, by_name in when_indexed_dictionary.items():
for name, value in by_name.items():
if when:
when_dict = all_by_subkey.setdefault(key, {})
when_dict = all_by_name.setdefault(name, {})
when_dict.setdefault(when_spec, []).append(value)
else:
all_by_subkey.setdefault(key, []).append(value)
all_by_name.setdefault(name, []).append(value)
# this needs to preserve the insertion order of whens
return dict(sorted(all_by_subkey.items()))
return dict(sorted(all_by_name.items()))
def _subkeys(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]]) -> List[K]:
def _names(when_indexed_dictionary: WhenDict) -> List[str]:
"""Get sorted names from dicts keyed by when/name."""
all_keys = set()
for when, by_key in when_indexed_dictionary.items():
for key in by_key:
all_keys.add(key)
all_names = set()
for when, by_name in when_indexed_dictionary.items():
for name in by_name:
all_names.add(name)
return sorted(all_keys)
return sorted(all_names)
def _has_subkey(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], key: K) -> bool:
return any(key in dictionary for dictionary in when_indexed_dictionary.values())
WhenVariantList = List[Tuple[spack.spec.Spec, spack.variant.Variant]]
def _num_definitions(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]]) -> int:
return sum(len(dictionary) for dictionary in when_indexed_dictionary.values())
def _remove_overridden_vdefs(variant_defs: WhenVariantList) -> None:
"""Remove variant defs from the list if their when specs are satisfied by later ones.
Any such variant definitions are *always* overridden by their successor, as it will
match everything the predecessor matches, and the solver will prefer it because of
its higher precedence.
def _precedence(obj) -> int:
"""Get either a 'precedence' attribute or item from an object."""
precedence = getattr(obj, "precedence", None)
if precedence is None:
raise KeyError(f"Couldn't get precedence from {type(obj)}")
return precedence
def _remove_overridden_defs(defs: List[Tuple[spack.spec.Spec, Any]]) -> None:
"""Remove definitions from the list if their when specs are satisfied by later ones.
Any such definitions are *always* overridden by their successor, as they will
match everything the predecessor matches, and the solver will prefer them because of
their higher precedence.
We can just remove these defs and avoid putting them in the solver. This is also
useful for, e.g., `spack info`, where we don't want to show a variant from a
superclass if it is always overridden by a variant defined in a subclass.
We can just remove these defs from variant definitions and avoid putting them in the
solver. This is also useful for, e.g., `spack info`, where we don't want to show a
variant from a superclass if it is always overridden by a variant defined in a
subclass.
Example::
@@ -480,33 +486,14 @@ class Hipblas:
"""
i = 0
while i < len(defs):
when, _ = defs[i]
if any(when.satisfies(successor) for successor, _ in defs[i + 1 :]):
del defs[i]
while i < len(variant_defs):
when, vdef = variant_defs[i]
if any(when.satisfies(successor) for successor, _ in variant_defs[i + 1 :]):
del variant_defs[i]
else:
i += 1
def _definitions(
when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], key: K
) -> List[Tuple[spack.spec.Spec, V]]:
"""Iterator over (when_spec, Value) for all values with a particular Key."""
# construct a list of defs sorted by precedence
defs: List[Tuple[spack.spec.Spec, V]] = []
for when, values_by_key in when_indexed_dictionary.items():
value_def = values_by_key.get(key)
if value_def:
defs.append((when, value_def))
# With multiple definitions, ensure precedence order and simplify overrides
if len(defs) > 1:
defs.sort(key=lambda v: _precedence(v[1]))
_remove_overridden_defs(defs)
return defs
#: Store whether a given Spec source/binary should not be redistributed.
class DisableRedistribute:
def __init__(self, source, binary):
@@ -647,14 +634,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: stubs directory are not bound by path."""
non_bindable_shared_objects: List[str] = []
#: List of fnmatch patterns of library file names (specifically DT_NEEDED entries) that are not
#: expected to be locatable in RPATHs. Generally this is a problem, and Spack install with
#: config:shared_linking:strict will cause install failures if such libraries are found.
#: However, in certain cases it can be hard if not impossible to avoid accidental linking
#: against system libraries; until that is resolved, this attribute can be used to suppress
#: errors.
unresolved_libraries: List[str] = []
#: List of prefix-relative file paths (or a single path). If these do
#: not exist after install, or if they exist but are not files,
#: sanity checks fail.
@@ -770,32 +749,44 @@ def __init__(self, spec):
@classmethod
def dependency_names(cls):
return _subkeys(cls.dependencies)
return _names(cls.dependencies)
@classmethod
def dependencies_by_name(cls, when: bool = False):
return _by_subkey(cls.dependencies, when=when)
return _by_name(cls.dependencies, when=when)
# Accessors for variants
# External code working with Variants should go through the methods below
# External code workingw with Variants should go through the methods below
@classmethod
def variant_names(cls) -> List[str]:
return _subkeys(cls.variants)
return _names(cls.variants)
@classmethod
def has_variant(cls, name) -> bool:
return _has_subkey(cls.variants, name)
return any(name in dictionary for dictionary in cls.variants.values())
@classmethod
def num_variant_definitions(cls) -> int:
"""Total number of variant definitions in this class so far."""
return _num_definitions(cls.variants)
return sum(len(variants_by_name) for variants_by_name in cls.variants.values())
@classmethod
def variant_definitions(cls, name: str) -> List[Tuple[spack.spec.Spec, spack.variant.Variant]]:
def variant_definitions(cls, name: str) -> WhenVariantList:
"""Iterator over (when_spec, Variant) for all variant definitions for a particular name."""
return _definitions(cls.variants, name)
# construct a list of defs sorted by precedence
defs: WhenVariantList = []
for when, variants_by_name in cls.variants.items():
variant_def = variants_by_name.get(name)
if variant_def:
defs.append((when, variant_def))
# With multiple definitions, ensure precedence order and simplify overrides
if len(defs) > 1:
defs.sort(key=lambda v: v[1].precedence)
_remove_overridden_vdefs(defs)
return defs
@classmethod
def variant_items(cls) -> Iterable[Tuple[spack.spec.Spec, Dict[str, spack.variant.Variant]]]:
@@ -1011,8 +1002,10 @@ def redistribute_binary(self):
return False
return True
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
# Python 3.8+, but we still support 3.6.
@property
def keep_werror(self) -> Optional[Literal["all", "specific", "none"]]:
def keep_werror(self) -> Optional[str]:
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
Valid return values are:
@@ -1367,6 +1360,24 @@ def tester(self):
self._tester = spack.install_test.PackageTest(self)
return self._tester
@property
def installed(self):
msg = (
'the "PackageBase.installed" property is deprecated and will be '
'removed in Spack v0.19, use "Spec.installed" instead'
)
warnings.warn(msg)
return self.spec.installed
@property
def installed_upstream(self):
msg = (
'the "PackageBase.installed_upstream" property is deprecated and will '
'be removed in Spack v0.19, use "Spec.installed_upstream" instead'
)
warnings.warn(msg)
return self.spec.installed_upstream
@property
def fetcher(self):
if not self.spec.versions.concrete:
@@ -1744,7 +1755,7 @@ def all_patches(cls):
return patches
def content_hash(self, content: Optional[bytes] = None) -> str:
def content_hash(self, content=None):
"""Create a hash based on the artifacts and patches used to build this package.
This includes:
@@ -2369,32 +2380,6 @@ def possible_dependencies(
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.
Arguments:
pkg: The package whose version is to be checked.
version: The version being checked
"""
if not isinstance(version, StandardVersion):
version = StandardVersion.from_string(version)
details = pkg.versions.get(version)
return details is not None and details.get("deprecated", False)
def preferred_version(pkg: PackageBase):
"""
Returns a sorted list of the preferred versions of the package.
Arguments:
pkg: The package whose versions are to be assessed.
"""
version, _ = max(pkg.versions.items(), key=concretization_version_order)
return version
class PackageStillNeededError(InstallError):
"""Raised when package is still needed by another on uninstall."""

View File

@@ -40,7 +40,7 @@ def compare_output(current_output, blessed_output):
def compare_output_file(current_output, blessed_output_file):
"""Same as above, but when the blessed output is given as a file."""
with open(blessed_output_file, "r", encoding="utf-8") as f:
with open(blessed_output_file, "r") as f:
blessed_output = f.read()
compare_output(current_output, blessed_output)

View File

@@ -57,11 +57,12 @@
specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
import enum
import json
import pathlib
import re
import sys
from typing import Iterator, List, Optional
from typing import Iterator, List, Match, Optional
from llnl.util.tty import color
@@ -69,8 +70,9 @@
import spack.error
import spack.spec
import spack.version
from spack.tokenize import Token, TokenBase, Tokenizer
from spack.error import SpecSyntaxError
IS_WINDOWS = sys.platform == "win32"
#: Valid name for specs and variants. Here we are not using
#: the previous "w[\w.-]*" since that would match most
#: characters that can be part of a word in any language
@@ -85,9 +87,22 @@
HASH = r"[a-zA-Z_0-9]+"
#: A filename starts either with a "." or a "/" or a "{name}/,
# or on Windows, a drive letter followed by a colon and "\"
# or "." or {name}\
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
if not IS_WINDOWS:
FILENAME = UNIX_FILENAME
else:
FILENAME = WINDOWS_FILENAME
#: These are legal values that *can* be parsed bare, without quotes on the command line.
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
#: Variant/flag values that match this can be left unquoted in Spack output
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
#: Quoted values can be *anything* in between quotes, including escaped quotes.
QUOTED_VALUE = r"(?:'(?:[^']|(?<=\\)')*'|\"(?:[^\"]|(?<=\\)\")*\")"
@@ -98,21 +113,60 @@
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
#: followed by a colon and "\" or "." or {name}\
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
FILENAME = WINDOWS_FILENAME if sys.platform == "win32" else UNIX_FILENAME
#: Regex to strip quotes. Group 2 will be the unquoted string.
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
#: Values that match this (e.g., variants, flags) can be left unquoted in Spack output
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
def strip_quotes_and_unescape(string: str) -> str:
"""Remove surrounding single or double quotes from string, if present."""
match = STRIP_QUOTES.match(string)
if not match:
return string
# replace any escaped quotes with bare quotes
quote, result = match.groups()
return result.replace(rf"\{quote}", quote)
class SpecTokens(TokenBase):
def quote_if_needed(value: str) -> str:
"""Add quotes around the value if it requires quotes.
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
This adds:
* single quotes by default
* double quotes around any value that contains single quotes
If double quotes are used, we json-escpae the string. That is, we escape ``\\``,
``"``, and control codes.
"""
if NO_QUOTES_NEEDED.match(value):
return value
return json.dumps(value) if "'" in value else f"'{value}'"
class TokenBase(enum.Enum):
"""Base class for an enum type with a regex value"""
def __new__(cls, *args, **kwargs):
# See
value = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = value
return obj
def __init__(self, regex):
self.regex = regex
def __str__(self):
return f"{self._name_}"
class TokenType(TokenBase):
"""Enumeration of the different token kinds in the spec grammar.
Order of declaration is extremely important, since text containing specs is parsed with a
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
"""
@@ -142,24 +196,79 @@ class SpecTokens(TokenBase):
DAG_HASH = rf"(?:/(?:{HASH}))"
# White spaces
WS = r"(?:\s+)"
# Unexpected character(s)
class ErrorTokenType(TokenBase):
"""Enum with regexes for error analysis"""
# Unexpected character
UNEXPECTED = r"(?:.[\s]*)"
#: Tokenizer that includes all the regexes in the SpecTokens enum
SPEC_TOKENIZER = Tokenizer(SpecTokens)
class Token:
"""Represents tokens; generated from input by lexer and fed to parse()."""
__slots__ = "kind", "value", "start", "end"
def __init__(
self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
):
self.kind = kind
self.value = value
self.start = start
self.end = end
def __repr__(self):
return str(self)
def __str__(self):
return f"({self.kind}, {self.value})"
def __eq__(self, other):
return (self.kind == other.kind) and (self.value == other.value)
#: List of all the regexes used to match spec parts, in order of precedence
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
#: List of all valid regexes followed by error analysis regexes
ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [
rf"(?P<{token}>{token.regex})" for token in ErrorTokenType
]
#: Regex to scan a valid text
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
#: Regex to analyze an invalid text
ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES))
def tokenize(text: str) -> Iterator[Token]:
"""Return a token generator from the text passed as input.
Raises:
SpecTokenizationError: when unexpected characters are found in the text
SpecTokenizationError: if we can't tokenize anymore, but didn't reach the
end of the input text.
"""
for token in SPEC_TOKENIZER.tokenize(text):
if token.kind == SpecTokens.UNEXPECTED:
raise SpecTokenizationError(list(SPEC_TOKENIZER.tokenize(text)), text)
yield token
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
match: Optional[Match] = None
for match in iter(scanner.match, None):
# The following two assertions are to help mypy
msg = (
"unexpected value encountered during parsing. Please submit a bug report "
"at https://github.com/spack/spack/issues/new/choose"
)
assert match is not None, msg
assert match.lastgroup is not None, msg
yield Token(
TokenType.__members__[match.lastgroup], match.group(), match.start(), match.end()
)
if match is None and not text:
# We just got an empty string
return
if match is None or match.end() != len(text):
scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined]
matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated]
raise SpecTokenizationError(matches, text)
class TokenContext:
@@ -177,7 +286,7 @@ def advance(self):
"""Advance one token"""
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
def accept(self, kind: SpecTokens):
def accept(self, kind: TokenType):
"""If the next token is of the specified kind, advance the stream and return True.
Otherwise return False.
"""
@@ -186,25 +295,10 @@ def accept(self, kind: SpecTokens):
return True
return False
def expect(self, *kinds: SpecTokens):
def expect(self, *kinds: TokenType):
return self.next_token and self.next_token.kind in kinds
class SpecTokenizationError(spack.error.SpecSyntaxError):
"""Syntax error in a spec string"""
def __init__(self, tokens: List[Token], text: str):
message = f"unexpected characters in the spec string\n{text}\n"
underline = ""
for token in tokens:
is_error = token.kind == SpecTokens.UNEXPECTED
underline += ("^" if is_error else " ") * (token.end - token.start)
message += color.colorize(f"@*r{{{underline}}}")
super().__init__(message)
class SpecParser:
"""Parse text into specs"""
@@ -212,13 +306,13 @@ class SpecParser:
def __init__(self, literal_str: str):
self.literal_str = literal_str
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str)))
self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str)))
def tokens(self) -> List[Token]:
"""Return the entire list of token from the initial text. White spaces are
filtered out.
"""
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
def next_spec(
self, initial_spec: Optional["spack.spec.Spec"] = None
@@ -245,14 +339,14 @@ def add_dependency(dep, **edge_properties):
initial_spec = initial_spec or spack.spec.Spec()
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
while True:
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
edge_properties.setdefault("depflag", 0)
edge_properties.setdefault("virtuals", ())
dependency = self._parse_node(root_spec)
add_dependency(dependency, **edge_properties)
elif self.ctx.accept(SpecTokens.DEPENDENCY):
elif self.ctx.accept(TokenType.DEPENDENCY):
dependency = self._parse_node(root_spec)
add_dependency(dependency, depflag=0, virtuals=())
@@ -300,7 +394,7 @@ def parse(
Return
The object passed as argument
"""
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
if not self.ctx.next_token or self.ctx.expect(TokenType.DEPENDENCY):
return initial_spec
if initial_spec is None:
@@ -308,17 +402,17 @@ def parse(
# If we start with a package name we have a named spec, we cannot
# accept another package name afterwards in a node
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
initial_spec.name = self.ctx.current_token.value
elif self.ctx.accept(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME):
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
parts = self.ctx.current_token.value.split(".")
name = parts[-1]
namespace = ".".join(parts[:-1])
initial_spec.name = name
initial_spec.namespace = namespace
elif self.ctx.accept(SpecTokens.FILENAME):
elif self.ctx.accept(TokenType.FILENAME):
return FileParser(self.ctx).parse(initial_spec)
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
@@ -333,7 +427,7 @@ def add_flag(name: str, value: str, propagate: bool):
raise_parsing_error(str(e), e)
while True:
if self.ctx.accept(SpecTokens.COMPILER):
if self.ctx.accept(TokenType.COMPILER):
if self.has_compiler:
raise_parsing_error("Spec cannot have multiple compilers")
@@ -341,7 +435,7 @@ def add_flag(name: str, value: str, propagate: bool):
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
self.has_compiler = True
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
if self.has_compiler:
raise_parsing_error("Spec cannot have multiple compilers")
@@ -352,9 +446,9 @@ def add_flag(name: str, value: str, propagate: bool):
self.has_compiler = True
elif (
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
or self.ctx.accept(SpecTokens.GIT_VERSION)
or self.ctx.accept(SpecTokens.VERSION)
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
or self.ctx.accept(TokenType.GIT_VERSION)
or self.ctx.accept(TokenType.VERSION)
):
if self.has_version:
raise_parsing_error("Spec cannot have multiple versions")
@@ -365,32 +459,32 @@ def add_flag(name: str, value: str, propagate: bool):
initial_spec.attach_git_version_lookup()
self.has_version = True
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
elif self.ctx.accept(TokenType.BOOL_VARIANT):
variant_value = self.ctx.current_token.value[0] == "+"
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
variant_value = self.ctx.current_token.value[0:2] == "++"
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
match = SPLIT_KVP.match(self.ctx.current_token.value)
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
name, _, value = match.groups()
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
match = SPLIT_KVP.match(self.ctx.current_token.value)
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
name, _, value = match.groups()
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
elif self.ctx.expect(SpecTokens.DAG_HASH):
elif self.ctx.expect(TokenType.DAG_HASH):
if initial_spec.abstract_hash:
break
self.ctx.accept(SpecTokens.DAG_HASH)
self.ctx.accept(TokenType.DAG_HASH)
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
else:
@@ -440,7 +534,7 @@ def __init__(self, ctx, literal_str):
def parse(self):
attributes = {}
while True:
if self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
name = name.strip("'\" ")
value = value.strip("'\" ").split(",")
@@ -452,7 +546,7 @@ def parse(self):
)
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
# TODO: Add code to accept bool variants here as soon as use variants are implemented
elif self.ctx.accept(SpecTokens.END_EDGE_PROPERTIES):
elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
break
else:
msg = "unexpected token in edge attributes"
@@ -507,7 +601,25 @@ def parse_one_or_raise(
return result
class SpecParsingError(spack.error.SpecSyntaxError):
class SpecTokenizationError(SpecSyntaxError):
"""Syntax error in a spec string"""
def __init__(self, matches, text):
message = "unexpected tokens in the spec string\n"
message += f"{text}"
underline = "\n"
for match in matches:
if match.lastgroup == str(ErrorTokenType.UNEXPECTED):
underline += f"{'^' * (match.end() - match.start())}"
continue
underline += f"{' ' * (match.end() - match.start())}"
message += color.colorize(f"@*r{{{underline}}}")
super().__init__(message)
class SpecParsingError(SpecSyntaxError):
"""Error when parsing tokens"""
def __init__(self, message, token, text):
@@ -515,33 +627,3 @@ def __init__(self, message, token, text):
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
message += color.colorize(f"@*r{{{underline}}}")
super().__init__(message)
def strip_quotes_and_unescape(string: str) -> str:
"""Remove surrounding single or double quotes from string, if present."""
match = STRIP_QUOTES.match(string)
if not match:
return string
# replace any escaped quotes with bare quotes
quote, result = match.groups()
return result.replace(rf"\{quote}", quote)
def quote_if_needed(value: str) -> str:
"""Add quotes around the value if it requires quotes.
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
This adds:
* single quotes by default
* double quotes around any value that contains single quotes
If double quotes are used, we json-escape the string. That is, we escape ``\\``,
``"``, and control codes.
"""
if NO_QUOTES_NEEDED.match(value):
return value
return json.dumps(value) if "'" in value else f"'{value}'"

View File

@@ -23,6 +23,7 @@
import spack.store
import spack.util.elf as elf
import spack.util.executable as executable
import spack.util.filesystem as ssys
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
@@ -349,6 +350,32 @@ def _set_elf_rpaths_and_interpreter(
return None
def needs_binary_relocation(m_type, m_subtype):
"""Returns True if the file with MIME type/subtype passed as arguments
needs binary relocation, False otherwise.
Args:
m_type (str): MIME type of the file
m_subtype (str): MIME subtype of the file
"""
subtypes = ("x-executable", "x-sharedlib", "x-mach-binary", "x-pie-executable")
if m_type == "application":
if m_subtype in subtypes:
return True
return False
def needs_text_relocation(m_type, m_subtype):
"""Returns True if the file with MIME type/subtype passed as arguments
needs text relocation, False otherwise.
Args:
m_type (str): MIME type of the file
m_subtype (str): MIME subtype of the file
"""
return m_type == "text"
def relocate_macho_binaries(
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
):
@@ -596,32 +623,24 @@ def relocate_text_bin(binaries, prefixes):
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
def is_macho_magic(magic: bytes) -> bool:
return (
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
magic.startswith(b"\xCF\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCF")
or magic.startswith(b"\xCE\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCE")
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
# the number of binaries; in JVM class files it's the java version number. We assume there
# are less than 10 binaries in a universal binary.
or (magic.startswith(b"\xCA\xFE\xBA\xBE") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xBE\xBA\xFE\xCA") and int.from_bytes(magic[4:8], "little") < 10)
)
def is_binary(filename):
"""Returns true if a file is binary, False otherwise
Args:
filename: file to be tested
def is_elf_magic(magic: bytes) -> bool:
return magic.startswith(b"\x7FELF")
Returns:
True or False
"""
m_type, _ = ssys.mime_type(filename)
msg = "[{0}] -> ".format(filename)
if m_type == "application":
tty.debug(msg + "BINARY FILE")
return True
def is_binary(filename: str) -> bool:
"""Returns true iff a file is likely binary"""
with open(filename, "rb") as f:
magic = f.read(8)
return is_macho_magic(magic) or is_elf_magic(magic)
tty.debug(msg + "TEXT FILE")
return False
# Memoize this due to repeated calls to libraries in the same directory.
@@ -630,14 +649,6 @@ def _exists_dir(dirname):
return os.path.isdir(dirname)
def is_macho_binary(path):
try:
with open(path, "rb") as f:
return is_macho_magic(f.read(4))
except OSError:
return False
def fixup_macos_rpath(root, filename):
"""Apply rpath fixups to the given file.
@@ -649,8 +660,7 @@ def fixup_macos_rpath(root, filename):
True if fixups were applied, else False
"""
abspath = os.path.join(root, filename)
if not is_macho_binary(abspath):
if ssys.mime_type(abspath) != ("application", "x-mach-binary"):
return False
# Get Mach-O header commands

View File

@@ -1031,7 +1031,7 @@ def is_prefix(self, fullname: str) -> bool:
def _read_config(self) -> Dict[str, str]:
"""Check for a YAML config file in this db's root directory."""
try:
with open(self.config_file, encoding="utf-8") as reponame_file:
with open(self.config_file) as reponame_file:
yaml_data = syaml.load(reponame_file)
if (
@@ -1365,7 +1365,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
packages_path = os.path.join(root, subdir)
fs.mkdirp(packages_path)
with open(config_path, "w", encoding="utf-8") as config:
with open(config_path, "w") as config:
config.write("repo:\n")
config.write(f" namespace: '{namespace}'\n")
if subdir != packages_dir_name:
@@ -1492,7 +1492,7 @@ def add_package(self, name, dependencies=None):
text = template.render(context)
package_py = self.recipe_filename(name)
fs.mkdirp(os.path.dirname(package_py))
with open(package_py, "w", encoding="utf-8") as f:
with open(package_py, "w") as f:
f.write(text)
def remove(self, name):

View File

@@ -191,9 +191,9 @@ def on_success(self, pkg, kwargs, package_record):
def fetch_log(self, pkg):
try:
if os.path.exists(pkg.install_log_path):
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
stream = gzip.open(pkg.install_log_path, "rt")
else:
stream = open(pkg.log_path, encoding="utf-8")
stream = open(pkg.log_path)
with stream as f:
return f.read()
except OSError:

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections
import hashlib
import os.path
@@ -10,7 +11,6 @@
import re
import socket
import time
import warnings
import xml.sax.saxutils
from typing import Dict, Optional
from urllib.parse import urlencode
@@ -124,15 +124,11 @@ def __init__(self, configuration: CDashConfiguration):
self.multiple_packages = False
def report_build_name(self, pkg_name):
buildname = (
return (
"{0} - {1}".format(self.base_buildname, pkg_name)
if self.multiple_packages
else self.base_buildname
)
if len(buildname) > 190:
warnings.warn("Build name exceeds CDash 190 character maximum and will be truncated.")
buildname = buildname[:190]
return buildname
def build_report_for_package(self, report_dir, package, duration):
if "stdout" not in package:
@@ -257,7 +253,7 @@ def clean_log_event(event):
report_file_name = report_name
phase_report = os.path.join(report_dir, report_file_name)
with open(phase_report, "w", encoding="utf-8") as f:
with codecs.open(phase_report, "w", "utf-8") as f:
env = spack.tengine.make_environment()
if phase != "update":
# Update.xml stores site information differently
@@ -321,7 +317,7 @@ def report_test_data(self, report_dir, package, phases, report_data):
report_file_name = "_".join([package["name"], package["id"], report_name])
phase_report = os.path.join(report_dir, report_file_name)
with open(phase_report, "w", encoding="utf-8") as f:
with codecs.open(phase_report, "w", "utf-8") as f:
env = spack.tengine.make_environment()
if phase not in ["update", "testing"]:
# Update.xml stores site information differently
@@ -403,7 +399,7 @@ def concretization_report(self, report_dir, msg):
update_template = posixpath.join(self.template_dir, "Update.xml")
t = env.get_template(update_template)
output_filename = os.path.join(report_dir, "Update.xml")
with open(output_filename, "w", encoding="utf-8") as f:
with open(output_filename, "w") as f:
f.write(t.render(report_data))
# We don't have a current package when reporting on concretization
# errors so refer to this report with the base buildname instead.

View File

@@ -24,7 +24,7 @@ def build_report(self, filename, specs):
filename = filename + ".xml"
report_data = {"specs": specs}
with open(filename, "w", encoding="utf-8") as f:
with open(filename, "w") as f:
env = spack.tengine.make_environment()
t = env.get_template(self._jinja_template)
f.write(t.render(report_data))

View File

@@ -4,8 +4,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
import shutil
import tempfile
from collections import OrderedDict
from llnl.util.symlink import readlink, symlink
import spack.binary_distribution as bindist
import spack.deptypes as dt
@@ -16,6 +20,19 @@
import spack.store
def _relocate_spliced_links(links, orig_prefix, new_prefix):
"""Re-linking function which differs from `relocate.relocate_links` by
reading the old link rather than the new link, since the latter wasn't moved
in our case. This still needs to be called after the copy to destination
because it expects the new directory structure to be in place."""
for link in links:
link_target = readlink(os.path.join(orig_prefix, link))
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
new_link_path = os.path.join(new_prefix, link)
os.unlink(new_link_path)
symlink(link_target, new_link_path)
def rewire(spliced_spec):
"""Given a spliced spec, this function conducts all the rewiring on all
nodes in the DAG of that spec."""
@@ -37,17 +54,13 @@ def rewire_node(spec, explicit):
the splice. The resulting package is then 'installed.'"""
tempdir = tempfile.mkdtemp()
# Copy spec.build_spec.prefix to spec.prefix through a temporary tarball
tarball = os.path.join(tempdir, f"{spec.dag_hash()}.tar.gz")
bindist.create_tarball(spec.build_spec, tarball)
# copy anything installed to a temporary directory
shutil.copytree(spec.build_spec.prefix, os.path.join(tempdir, spec.dag_hash()))
spack.hooks.pre_install(spec)
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
buildinfo = bindist.read_buildinfo_file(spec.prefix)
# compute prefix-to-prefix for every node from the build spec to the spliced
# spec
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
prefix_to_prefix = OrderedDict({spec.build_spec.prefix: spec.prefix})
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
for s in bindist.deps_to_relocate(spec):
analog = s
@@ -64,17 +77,19 @@ def rewire_node(spec, explicit):
prefix_to_prefix[analog.prefix] = s.prefix
manifest = bindist.get_buildfile_manifest(spec.build_spec)
platform = spack.platforms.by_name(spec.platform)
text_to_relocate = [
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_textfiles"]
os.path.join(tempdir, spec.dag_hash(), rel_path)
for rel_path in manifest.get("text_to_relocate", [])
]
if text_to_relocate:
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
links = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_links"]]
relocate.relocate_links(links, prefix_to_prefix)
bins_to_relocate = [
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_binaries"]
os.path.join(tempdir, spec.dag_hash(), rel_path)
for rel_path in manifest.get("binary_to_relocate", [])
]
if bins_to_relocate:
if "macho" in platform.binary_formats:
@@ -98,18 +113,22 @@ def rewire_node(spec, explicit):
spec.prefix,
)
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
shutil.rmtree(tempdir)
install_manifest = os.path.join(
# Copy package into place, except for spec.json (because spec.json
# describes the old spec and not the new spliced spec).
shutil.copytree(
os.path.join(tempdir, spec.dag_hash()),
spec.prefix,
spack.store.STORE.layout.metadata_dir,
spack.store.STORE.layout.manifest_file_name,
ignore=shutil.ignore_patterns("spec.json", "install_manifest.json"),
)
try:
os.unlink(install_manifest)
except FileNotFoundError:
pass
# Write the spliced spec into spec.json. Without this, Database.add would fail because it
# checks the spec.json in the prefix against the spec being added to look for mismatches
if manifest.get("link_to_relocate"):
_relocate_spliced_links(
manifest.get("link_to_relocate"), spec.build_spec.prefix, spec.prefix
)
shutil.rmtree(tempdir)
# Above, we did not copy spec.json: instead, here we write the new
# (spliced) spec into spec.json, without this, Database.add would fail on
# the next line (because it checks the spec.json in the prefix against the
# spec being added to look for mismatches)
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
# add to database, not sure about explicit
spack.store.STORE.db.add(spec, explicit=explicit)

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains jsonschema files for all of Spack's YAML formats."""
import copy
import typing
import warnings
@@ -26,14 +25,14 @@ def _validate_spec(validator, is_spec, instance, schema):
"""Check if the attributes on instance are valid specs."""
import jsonschema
import spack.spec_parser
import spack.parser
if not validator.is_type(instance, "object"):
return
for spec_str in instance:
try:
spack.spec_parser.parse(spec_str)
spack.parser.parse(spec_str)
except SpecSyntaxError as e:
yield jsonschema.ValidationError(str(e))
@@ -74,116 +73,3 @@ def _deprecated_properties(validator, deprecated, instance, schema):
Validator = llnl.util.lang.Singleton(_make_validator)
def _append(string: str) -> bool:
"""Test if a spack YAML string is an append.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values append lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : append lists.
"""
return getattr(string, "append", False)
def _prepend(string: str) -> bool:
"""Test if a spack YAML string is an prepend.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values prepend lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : prepend lists. (default behavior)
"""
return getattr(string, "prepend", False)
def override(string: str) -> bool:
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
and if they do, their values completely replace lower-precedence
configs instead of merging into them.
"""
return hasattr(string, "override") and string.override
def merge_yaml(dest, source, prepend=False, append=False):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = merge_yaml(dest, source)
In the result, elements from lists from ``source`` will appear before
elements of lists from ``dest``. Likewise, when iterating over keys
or items in merged ``OrderedDict`` objects, keys from ``source`` will
appear before keys from ``dest``.
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
`+:` will extend the default prepend merge strategy to include string concatenation
`-:` will change the merge strategy to append, it also includes string concatentation
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
# If source is None, overwrite with source.
if source is None:
return None
# Source list is prepended (for precedence)
if they_are(list):
if append:
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source] + source
else:
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
# save dest keys to reinsert later -- this ensures that source items
# come *before* dest in OrderdDicts
dest_keys = [dk for dk in dest.keys() if dk not in source]
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).
merge = sk in dest
old_dest_value = dest.pop(sk, None)
if merge and not override(sk):
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
else:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.deepcopy(sv)
# reinsert dest keys so they are last in the result
for dk in dest_keys:
dest[dk] = dest.pop(dk)
return dest
elif they_are(str):
# Concatenate strings in prepend mode
if prepend:
return source + dest
elif append:
return dest + source
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return copy.copy(source)

View File

@@ -11,7 +11,7 @@
from llnl.util.lang import union_dicts
import spack.schema
import spack.config
import spack.schema.projections
#: Properties for inclusion in other schemas
@@ -34,7 +34,6 @@
"properties": {
"type": {"type": "string", "enum": ["rpath", "runpath"]},
"bind": {"type": "boolean"},
"missing_library_policy": {"enum": ["error", "warn", "ignore"]},
},
},
]
@@ -158,7 +157,7 @@ def update(data):
# whether install_tree was updated or not
# we merge the yaml to ensure we don't invalidate other projections
update_data = data.get("install_tree", {})
update_data = spack.schema.merge_yaml(update_data, projections_data)
update_data = spack.config.merge_yaml(update_data, projections_data)
data["install_tree"] = update_data
changed = True

Some files were not shown because too many files have changed in this diff Show More