Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
54e5439dd6 Spec.format: conditional format strings 2023-08-22 11:22:36 -07:00
798 changed files with 5671 additions and 17718 deletions

View File

@@ -10,8 +10,3 @@ updates:
directory: "/lib/spack/docs"
schedule:
interval: "daily"
# Requirements to run style checks
- package-ecosystem: "pip"
directory: "/.github/workflows/style"
schedule:
interval: "daily"

View File

@@ -22,7 +22,7 @@ jobs:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{inputs.python_version}}
@@ -34,7 +34,6 @@ jobs:
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) audit externals
coverage combine
coverage xml
- name: Package audits (without coverage)
@@ -42,7 +41,6 @@ jobs:
run: |
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -179,11 +179,11 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -214,7 +214,7 @@ jobs:
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh

View File

@@ -56,7 +56,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -86,19 +86,19 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: dockerfiles
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -106,13 +106,13 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -14,7 +14,7 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1

View File

@@ -1,7 +0,0 @@
black==23.9.1
clingo==5.6.2
flake8==6.1.0
isort==5.12.0
mypy==1.5.1
types-six==1.16.21.9
vermin==1.5.2

View File

@@ -47,7 +47,7 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -94,7 +94,7 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -133,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -152,7 +152,7 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -187,7 +187,7 @@ jobs:
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2

View File

@@ -18,15 +18,15 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python Packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/style/requirements.txt
pip install --upgrade pip
pip install --upgrade vermin
- name: vermin (Spack's Core)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
@@ -35,17 +35,16 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/style/requirements.txt
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
@@ -69,7 +68,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -15,7 +15,7 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -39,7 +39,7 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -63,7 +63,7 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -75,5 +75,6 @@ jobs:
- name: Build Test
run: |
spack compiler find
spack -d external find cmake ninja
spack external find cmake
spack external find ninja
spack -d install abseil-cpp

View File

@@ -2,26 +2,24 @@
## Supported Versions
We provide security updates for `develop` and for the last two
stable (`0.x`) release series of Spack. Security updates will be
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
We provide security updates for the following releases.
For more on Spack's release structure, see
[`README.md`](https://github.com/spack/spack#releases).
| Version | Supported |
| ------- | ------------------ |
| develop | :white_check_mark: |
| 0.19.x | :white_check_mark: |
| 0.18.x | :white_check_mark: |
## Reporting a Vulnerability
You can report a vulnerability using GitHub's private reporting
feature:
To report a vulnerability or other security
issue, email maintainers@spack.io.
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
2. Click "Report a vulnerability" in the upper right corner of that page.
3. Fill out the form and submit your draft security advisory.
More details are available in
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
You can expect to hear back about security issues within two days.
If your security issue is accepted, we will do our best to release
a fix within a week. If fixing the issue will take longer than
this, we will discuss timeline options with you.
You can expect to hear back within two days.
If your security issue is accepted, we will do
our best to release a fix within a week. If
fixing the issue will take longer than this,
we will discuss timeline options with you.

View File

@@ -14,7 +14,7 @@
::
@echo off
set spack="%SPACK_ROOT%"\bin\spack
set spack=%SPACK_ROOT%\bin\spack
::#######################################################################
:: This is a wrapper around the spack command that forwards calls to

View File

@@ -39,26 +39,12 @@ function Read-SpackArgs {
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
}
function Set-SpackEnv {
# This method is responsible
# for processing the return from $(spack <command>)
# which are returned as System.Object[]'s containing
# a list of env commands
# Invoke-Expression can only handle one command at a time
# so we iterate over the list to invoke the env modification
# expressions one at a time
foreach($envop in $args[0]){
Invoke-Expression $envop
}
}
function Invoke-SpackCD {
if (Compare-CommonArgs $SpackSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" cd -h
python $Env:SPACK_ROOT/bin/spack cd -h
}
else {
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
if (($NULL -ne $LOC)){
if ( Test-Path -Path $LOC){
Set-Location $LOC
@@ -75,7 +61,7 @@ function Invoke-SpackCD {
function Invoke-SpackEnv {
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
python "$Env:SPACK_ROOT/bin/spack" env -h
python $Env:SPACK_ROOT/bin/spack env -h
}
else {
$SubCommandSubCommand = $SpackSubCommandArgs[0]
@@ -83,46 +69,46 @@ function Invoke-SpackEnv {
switch ($SubCommandSubCommand) {
"activate" {
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
elseif (!$SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
"deactivate" {
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
}
elseif($SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
python $Env:SPACK_ROOT/bin/spack env deactivate -h
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}
}
}
function Invoke-SpackLoad {
if (Compare-CommonArgs $SpackSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
}
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
@@ -130,7 +116,7 @@ function Invoke-SpackLoad {
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
if (Compare-CommonArgs $SpackCMD_params) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
exit $LASTEXITCODE
}
@@ -142,5 +128,5 @@ switch($SpackSubCommand)
"env" {Invoke-SpackEnv}
"load" {Invoke-SpackLoad}
"unload" {Invoke-SpackLoad}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}

View File

@@ -9,32 +9,9 @@
Bundle
------
``BundlePackage`` represents a set of packages that are expected to work
well together, such as a collection of commonly used software libraries.
The associated software is specified as dependencies.
If it makes sense, variants, conflicts, and requirements can be added to
the package. :ref:`Variants <variants>` ensure that common build options
are consistent across the packages supporting them. :ref:`Conflicts
and requirements <packaging_conflicts>` prevent attempts to build with known
bugs or limitations.
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
it could use the ``require`` directive as follows:
.. code-block:: python
require("platform=linux", msg="MyBundlePackage only builds on linux")
Spack has a number of built-in bundle packages, such as:
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
``Libc`` is a virtual bundle package for the C standard library.
``BundlePackage`` represents a set of packages that are expected to work well
together, such as a collection of commonly used software libraries. The
associated software is specified as bundle dependencies.
^^^^^^^^

View File

@@ -214,7 +214,6 @@ def setup(sphinx):
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"),
("py:class", "spack.spec.InstallStatus"),
("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
]

View File

@@ -1,113 +0,0 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
==========================
Using External GPU Support
==========================
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
configuration Spack will download and install the needed components.
It may be preferable to use existing system support: the following sections
help with using a system installation of GPU libraries.
-----------------------------------
Using an External ROCm Installation
-----------------------------------
Spack breaks down ROCm into many separate component packages. The following
is an example ``packages.yaml`` that organizes a consistent set of ROCm
components for use by dependent packages:
.. code-block:: yaml
packages:
all:
compiler: [rocmcc@=5.3.0]
variants: amdgpu_target=gfx90a
hip:
buildable: false
externals:
- spec: hip@5.3.0
prefix: /opt/rocm-5.3.0/hip
hsa-rocr-dev:
buildable: false
externals:
- spec: hsa-rocr-dev@5.3.0
prefix: /opt/rocm-5.3.0/
llvm-amdgpu:
buildable: false
externals:
- spec: llvm-amdgpu@5.3.0
prefix: /opt/rocm-5.3.0/llvm/
comgr:
buildable: false
externals:
- spec: comgr@5.3.0
prefix: /opt/rocm-5.3.0/
hipsparse:
buildable: false
externals:
- spec: hipsparse@5.3.0
prefix: /opt/rocm-5.3.0/
hipblas:
buildable: false
externals:
- spec: hipblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocblas:
buildable: false
externals:
- spec: rocblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocprim:
buildable: false
externals:
- spec: rocprim@5.3.0
prefix: /opt/rocm-5.3.0/rocprim/
This is in combination with the following compiler definition:
.. code-block:: yaml
compilers:
- compiler:
spec: rocmcc@=5.3.0
paths:
cc: /opt/rocm-5.3.0/bin/amdclang
cxx: /opt/rocm-5.3.0/bin/amdclang++
f77: null
fc: /opt/rocm-5.3.0/bin/amdflang
operating_system: rhel8
target: x86_64
This includes the following considerations:
- Each of the listed externals specifies ``buildable: false`` to force Spack
to use only the externals we defined.
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
packages, but not all of them, and furthermore not in a manner that
guarantees a complementary set if multiple ROCm installations are available.
- The ``prefix`` is the same for several components, but note that others
require listing one of the subdirectories as a prefix.
-----------------------------------
Using an External CUDA Installation
-----------------------------------
CUDA is split into fewer components and is simpler to specify:
.. code-block:: yaml
packages:
all:
variants:
- cuda_arch=70
cuda:
buildable: false
externals:
- spec: cuda@11.0.2
prefix: /opt/cuda/cuda-11.0.2/
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.

View File

@@ -77,7 +77,6 @@ or refer to the full manual below.
extensions
pipelines
signing
gpu_configuration
.. toctree::
:maxdepth: 2

View File

@@ -363,42 +363,6 @@ one of these::
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
``nano``, and ``notepad``, in that order.
^^^^^^^^^^^^^^^^^
Bundling software
^^^^^^^^^^^^^^^^^
If you have a collection of software expected to work well together with
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
Examples where bundle packages can be useful include defining suites of
applications (e.g, `EcpProxyApps
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
These versioned packages primarily consist of dependencies on the associated
software packages. They can include :ref:`variants <variants>` to ensure
common build options are consistently applied to dependencies. Known build
failures, such as not building on a platform or when certain compilers or
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
Build requirements, such as only building with specific compilers, can similarly
be flagged with :ref:`requires <packaging_conflicts>`.
The ``spack create --template bundle`` command will create a skeleton
``BundlePackage`` ``package.py`` for you:
.. code-block:: console
$ spack create --template bundle --name coolsdk
Now you can fill in the basic package documentation, version(s), and software
package dependencies along with any other relevant customizations.
.. note::
Remember that bundle packages have no software of their own so there
is nothing to download.
^^^^^^^^^^^^^^^^^^^^^^^^^
Non-downloadable software
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -646,16 +610,7 @@ add a line like this in the package class:
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
.. note::
By convention, we list versions in descending order, from newest to oldest.
.. note::
:ref:`Bundle packages <bundlepackage>` do not have source code so
there is nothing to fetch. Consequently, their version directives
consist solely of the version name (e.g., ``version("202309")``).
Versions should be listed in descending order, from newest to oldest.
^^^^^^^^^^^^^
Date Versions
@@ -2723,7 +2678,7 @@ Conflicts and requirements
--------------------------
Sometimes packages have known bugs, or limitations, that would prevent them
from building e.g. against other dependencies or with certain compilers. Spack
to build e.g. against other dependencies or with certain compilers. Spack
makes it possible to express such constraints with the ``conflicts`` directive.
Adding the following to a package:
@@ -4818,17 +4773,17 @@ For example, running:
results in spack checking that the installation created the following **file**:
* ``self.prefix.bin.reframe``
* ``self.prefix/bin/reframe``
and the following **directories**:
* ``self.prefix.bin``
* ``self.prefix.config``
* ``self.prefix.docs``
* ``self.prefix.reframe``
* ``self.prefix.tutorials``
* ``self.prefix.unittests``
* ``self.prefix.cscs-checks``
* ``self.prefix/bin``
* ``self.prefix/config``
* ``self.prefix/docs``
* ``self.prefix/reframe``
* ``self.prefix/tutorials``
* ``self.prefix/unittests``
* ``self.prefix/cscs-checks``
If **any** of these paths are missing, then Spack considers the installation
to have failed.
@@ -4972,7 +4927,7 @@ installed executable. The check is implemented as follows:
@on_package_attributes(run_tests=True)
def check_list(self):
with working_dir(self.stage.source_path):
reframe = Executable(self.prefix.bin.reframe)
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
reframe("-l")
.. warning::
@@ -5192,8 +5147,8 @@ embedded test parts.
for example in ["ex1", "ex2"]:
with test_part(
self,
f"test_example_{example}",
purpose=f"run installed {example}",
"test_example_{0}".format(example),
purpose="run installed {0}".format(example),
):
exe = which(join_path(self.prefix.bin, example))
exe()
@@ -5271,10 +5226,11 @@ Below illustrates using this feature to compile an example.
...
cxx = which(os.environ["CXX"])
cxx(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.cpp",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.cpp".format(exe),
"-o",
exe
)
cxx_example = which(exe)
cxx_example()
@@ -5291,14 +5247,14 @@ Saving build-time files
We highly recommend re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities since they reside
within the software's repository.
within the software's repository.
If that is not possible, you can add test-related files to the package
repository (see :ref:`adding custom files <cache_custom_files>`). It
will be important to maintain them so they work across listed or supported
versions of the package.
You can use the ``cache_extra_test_sources`` helper to copy directories
You can use the ``cache_extra_test_sources`` method to copy directories
and or files from the source build stage directory to the package's
installation directory.
@@ -5306,15 +5262,10 @@ The signature for ``cache_extra_test_sources`` is:
.. code-block:: python
def cache_extra_test_sources(pkg, srcs):
where each argument has the following meaning:
* ``pkg`` is an instance of the package for the spec under test.
* ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
def cache_extra_test_sources(self, srcs):
where ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
The paths must be relative to the staged source directory. Contents of
subdirectories and files are copied to a special test cache subdirectory
of the installation prefix. They are automatically copied to the appropriate
@@ -5335,18 +5286,21 @@ and using ``foo.c`` in a test method is illustrated below.
srcs = ["tests",
join_path("examples", "foo.c"),
join_path("examples", "bar.c")]
cache_extra_test_sources(self, srcs)
self.cache_extra_test_sources(srcs)
def test_foo(self):
exe = "foo"
src_dir = self.test_suite.current_test_cache_dir.examples
src_dir = join_path(
self.test_suite.current_test_cache_dir, "examples"
)
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.c",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.c".format(exe),
"-o",
exe
)
foo = which(exe)
foo()
@@ -5372,9 +5326,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
In our example above, test methods can use the following paths to reference
the copy of each entry listed in ``srcs``, respectively:
* ``self.test_suite.current_test_cache_dir.tests``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
.. admonition:: Library packages should build stand-alone tests
@@ -5393,7 +5347,7 @@ the copy of each entry listed in ``srcs``, respectively:
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the ``copy_test_sources`` method and
***after** the call to ``cache_extra_test_sources()``. This will
***after** the call to ``self.cache_extra_test_sources()``. This will
reduce the amount of unnecessary work in the test method **and** avoid
problems testing in shared instances and facility deployments.
@@ -5440,7 +5394,7 @@ property as shown below.
"""build and run custom-example"""
data_dir = self.test_suite.current_test_data_dir
exe = "custom-example"
src = datadir.join(f"{exe}.cpp")
src = datadir.join("{0}.cpp".format(exe))
...
# TODO: Build custom-example using src and exe
...
@@ -5456,7 +5410,7 @@ Reading expected output from a file
The helper function ``get_escaped_text_output`` is available for packages
to retrieve and properly format the text from a file that contains the
expected output from running an executable that may contain special
expected output from running an executable that may contain special
characters.
The signature for ``get_escaped_text_output`` is:
@@ -5490,7 +5444,7 @@ added to the package's ``test`` subdirectory.
db_filename, ".dump", output=str.split, error=str.split
)
for exp in expected:
assert re.search(exp, out), f"Expected '{exp}' in output"
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
If the file was instead copied from the ``tests`` subdirectory of the staged
source code, the path would be obtained as shown below.
@@ -5503,7 +5457,7 @@ source code, the path would be obtained as shown below.
db_filename = test_cache_dir.join("packages.db")
Alternatively, if the file was copied to the ``share/tests`` subdirectory
as part of the installation process, the test could access the path as
as part of the installation process, the test could access the path as
follows:
.. code-block:: python
@@ -5540,12 +5494,9 @@ Invoking the method is the equivalent of:
.. code-block:: python
errors = []
for check in expected:
if not re.search(check, actual):
errors.append(f"Expected '{check}' in output '{actual}'")
if errors:
raise RuntimeError("\n ".join(errors))
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
.. _accessing-files:
@@ -5585,7 +5536,7 @@ repository, and installation.
- ``self.test_suite.test_dir_for_spec(self.spec)``
* - Current Spec's Build-time Files
- ``self.test_suite.current_test_cache_dir``
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* - Current Spec's Custom Test Files
- ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
@@ -5600,7 +5551,7 @@ Inheriting stand-alone tests
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
packages that inherit from or provide interface implementations for those
packages, respectively.
packages, respectively.
The table below summarizes the stand-alone tests that will be executed along
with those implemented in the package itself.
@@ -5670,7 +5621,7 @@ for ``openmpi``:
SKIPPED: test_version_oshcc: oshcc is not installed
...
==> [2023-03-10-16:04:02.215227] Completed testing
==> [2023-03-10-16:04:02.215597]
==> [2023-03-10-16:04:02.215597]
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
Openmpi::test_bin_mpirun .. PASSED
Openmpi::test_bin_ompi_info .. PASSED
@@ -6120,7 +6071,7 @@ in the extra attributes can implement this method like this:
@classmethod
def validate_detected_spec(cls, spec, extra_attributes):
"""Check that "compilers" is in the extra attributes."""
msg = ("the extra attribute 'compilers' must be set for "
msg = ("the extra attribute "compilers" must be set for "
"the detected spec '{0}'".format(spec))
assert "compilers" in extra_attributes, msg
@@ -6196,100 +6147,7 @@ follows:
"foo-package@{0}".format(version_str)
)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Add detection tests to packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To ensure that software is detected correctly for multiple configurations
and on different systems users can write a ``detection_test.yaml`` file and
put it in the package directory alongside the ``package.py`` file.
This YAML file contains enough information for Spack to mock an environment
and try to check if the detection logic yields the results that are expected.
As a general rule, attributes at the top-level of ``detection_test.yaml``
represent search mechanisms and they each map to a list of tests that should confirm
the validity of the package's detection logic.
The detection tests can be run with the following command:
.. code-block:: console
$ spack audit externals
Errors that have been detected are reported to screen.
""""""""""""""""""""""""""
Tests for PATH inspections
""""""""""""""""""""""""""
Detection tests insisting on ``PATH`` inspections are listed under
the ``paths`` attribute:
.. code-block:: yaml
paths:
- layout:
- executables:
- "bin/clang-3.9"
- "bin/clang++-3.9"
script: |
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
echo "Target: x86_64-pc-linux-gnu"
echo "Thread model: posix"
echo "InstalledDir: /usr/bin"
results:
- spec: 'llvm@3.9.1 +clang~lld~lldb'
Each test is performed by first creating a temporary directory structure as
specified in the corresponding ``layout`` and by then running
package detection and checking that the outcome matches the expected
``results``. The exact details on how to specify both the ``layout`` and the
``results`` are reported in the table below:
.. list-table:: Test based on PATH inspections
:header-rows: 1
* - Option Name
- Description
- Allowed Values
- Required Field
* - ``layout``
- Specifies the filesystem tree used for the test
- List of objects
- Yes
* - ``layout:[0]:executables``
- Relative paths for the mock executables to be created
- List of strings
- Yes
* - ``layout:[0]:script``
- Mock logic for the executable
- Any valid shell script
- Yes
* - ``results``
- List of expected results
- List of objects (empty if no result is expected)
- Yes
* - ``results:[0]:spec``
- A spec that is expected from detection
- Any valid spec
- Yes
"""""""""""""""""""""""""""""""
Reuse tests from other packages
"""""""""""""""""""""""""""""""
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
``myrepo.llvm`` might look like:
.. code-block:: yaml
includes:
- "builtin.llvm"
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
those locally defined in the file.
.. _package-lifecycle:
-----------------------------
Style guidelines for packages

View File

@@ -1,13 +1,13 @@
sphinx==7.2.6
sphinx==7.2.2
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==1.3.0
python-levenshtein==0.21.1
docutils==0.18.1
pygments==2.16.1
urllib3==2.0.5
pytest==7.4.2
urllib3==2.0.4
pytest==7.4.0
isort==5.12.0
black==23.9.1
black==23.7.0
flake8==6.1.0
mypy==1.5.1

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
astunparse
----------------

View File

@@ -1,2 +1,2 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.1"
__version__ = "0.2.0"

View File

@@ -79,18 +79,14 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.features = features
self.compilers = compilers
self.generation = generation
# Cache the ancestor computation
self._ancestors = None
@property
def ancestors(self):
"""All the ancestors of this microarchitecture."""
if self._ancestors is None:
value = self.parents[:]
for parent in self.parents:
value.extend(a for a in parent.ancestors if a not in value)
self._ancestors = value
return self._ancestors
value = self.parents[:]
for parent in self.parents:
value.extend(a for a in parent.ancestors if a not in value)
return value
def _to_set(self):
"""Returns a set of the nodes in this microarchitecture DAG."""

View File

@@ -145,13 +145,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
}
],
"intel": [
{
"versions": "16.0:",
"name": "corei7",
"flags": "-march={name} -mtune=generic -mpopcnt"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -224,13 +217,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
}
],
"intel": [
{
"versions": "16.0:",
"name": "core-avx2",
"flags": "-march={name} -mtune={name} -fma -mf16c"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -314,13 +300,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
}
],
"intel": [
{
"versions": "16.0:",
"name": "skylake-avx512",
"flags": "-march={name} -mtune={name}"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -1433,92 +1412,6 @@
]
}
},
"sapphirerapids": {
"from": [
"icelake"
],
"vendor": "GenuineIntel",
"features": [
"mmx",
"sse",
"sse2",
"ssse3",
"sse4_1",
"sse4_2",
"popcnt",
"aes",
"pclmulqdq",
"avx",
"rdrand",
"f16c",
"movbe",
"fma",
"avx2",
"bmi1",
"bmi2",
"rdseed",
"adx",
"clflushopt",
"xsavec",
"xsaveopt",
"avx512f",
"avx512vl",
"avx512bw",
"avx512dq",
"avx512cd",
"avx512vbmi",
"avx512ifma",
"sha_ni",
"clwb",
"rdpid",
"gfni",
"avx512_vbmi2",
"avx512_vpopcntdq",
"avx512_bitalg",
"avx512_vnni",
"vpclmulqdq",
"vaes",
"avx512_bf16",
"cldemote",
"movdir64b",
"movdiri",
"pdcm",
"serialize",
"waitpkg"
],
"compilers": {
"gcc": [
{
"versions": "11.0:",
"flags": "-march={name} -mtune={name}"
}
],
"clang": [
{
"versions": "12.0:",
"flags": "-march={name} -mtune={name}"
}
],
"intel": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
],
"oneapi": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
],
"dpcpp": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
]
}
},
"k10": {
"from": ["x86_64"],
"vendor": "AuthenticAMD",
@@ -2172,6 +2065,8 @@
"pku",
"gfni",
"flush_l1d",
"erms",
"avic",
"avx512f",
"avx512dq",
"avx512ifma",
@@ -2188,12 +2083,12 @@
"compilers": {
"gcc": [
{
"versions": "10.3:12.2",
"versions": "10.3:13.0",
"name": "znver3",
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
},
{
"versions": "12.3:",
"versions": "13.1:",
"name": "znver4",
"flags": "-march={name} -mtune={name}"
}

View File

@@ -1,105 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Path primitives that just require Python standard library."""
import functools
import sys
from typing import List, Optional
from urllib.parse import urlparse
class Path:
"""Enum to identify the path-style."""
unix: int = 0
windows: int = 1
platform_path: int = windows if sys.platform == "win32" else unix
def format_os_path(path: str, mode: int = Path.unix) -> str:
"""Formats the input path to use consistent, platform specific separators.
Absolute paths are converted between drive letters and a prepended '/' as per platform
requirement.
Parameters:
path: the path to be normalized, must be a string or expose the replace method.
mode: the path file separator style to normalize the passed path to.
Default is unix style, i.e. '/'
"""
if not path:
return path
if mode == Path.windows:
path = path.replace("/", "\\")
else:
path = path.replace("\\", "/")
return path
def convert_to_posix_path(path: str) -> str:
"""Converts the input path to POSIX style."""
return format_os_path(path, mode=Path.unix)
def convert_to_windows_path(path: str) -> str:
"""Converts the input path to Windows style."""
return format_os_path(path, mode=Path.windows)
def convert_to_platform_path(path: str) -> str:
"""Converts the input path to the current platform's native style."""
return format_os_path(path, mode=Path.platform_path)
def path_to_os_path(*parameters: str) -> List[str]:
"""Takes an arbitrary number of positional parameters, converts each argument of type
string to use a normalized filepath separator, and returns a list of all values.
"""
def _is_url(path_or_url: str) -> bool:
if "\\" in path_or_url:
return False
url_tuple = urlparse(path_or_url)
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
result = []
for item in parameters:
if isinstance(item, str) and not _is_url(item):
item = convert_to_platform_path(item)
result.append(item)
return result
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
"""Filters function arguments to account for platform path separators.
Optional slicing range can be specified to select specific arguments
This decorator takes all (or a slice) of a method's positional arguments
and normalizes usage of filepath separators on a per platform basis.
Note: `**kwargs`, urls, and any type that is not a string are ignored
so in such cases where path normalization is required, that should be
handled by calling path_to_os_path directly as needed.
Parameters:
arg_slice: a slice object specifying the slice of arguments
in the decorated method over which filepath separators are
normalized
"""
def holder_func(func):
@functools.wraps(func)
def path_filter_caller(*args, **kwargs):
args = list(args)
if arg_slice:
args[arg_slice] = path_to_os_path(*args[arg_slice])
else:
args = path_to_os_path(*args)
return func(*args, **kwargs)
return path_filter_caller
if _func:
return holder_func(_func)
return holder_func

View File

@@ -1,67 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""String manipulation functions that do not have other dependencies than Python
standard library
"""
from typing import List, Optional
def comma_list(sequence: List[str], article: str = "") -> str:
if type(sequence) is not list:
sequence = list(sequence)
if not sequence:
return ""
if len(sequence) == 1:
return sequence[0]
out = ", ".join(str(s) for s in sequence[:-1])
if len(sequence) != 2:
out += "," # oxford comma
out += " "
if article:
out += article + " "
out += str(sequence[-1])
return out
def comma_or(sequence: List[str]) -> str:
"""Return a string with all the elements of the input joined by comma, but the last
one (which is joined by 'or').
"""
return comma_list(sequence, "or")
def comma_and(sequence: List[str]) -> str:
"""Return a string with all the elements of the input joined by comma, but the last
one (which is joined by 'and').
"""
return comma_list(sequence, "and")
def quote(sequence: List[str], q: str = "'") -> List[str]:
"""Quotes each item in the input list with the quote character passed as second argument."""
return [f"{q}{e}{q}" for e in sequence]
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
"""Pluralize <singular> word by adding an s if n != 1.
Arguments:
n: number of things there are
singular: singular form of word
plural: optional plural form, for when it's not just singular + 's'
show_n: whether to include n in the result string (default True)
Returns:
"1 thing" if n == 1 or "n things" if n != 1
"""
number = f"{n} " if show_n else ""
if n == 1:
return f"{number}{singular}"
elif plural is not None:
return f"{number}{plural}"
else:
return f"{number}{singular}s"

View File

@@ -1,459 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""URL primitives that just require Python standard library."""
import itertools
import os.path
import re
from typing import Optional, Set, Tuple
from urllib.parse import urlsplit, urlunsplit
# Archive extensions allowed in Spack
PREFIX_EXTENSIONS = ("tar", "TAR")
EXTENSIONS = ("gz", "bz2", "xz", "Z")
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
ALLOWED_ARCHIVE_TYPES = (
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
+ PREFIX_EXTENSIONS
+ EXTENSIONS
+ NO_TAR_EXTENSIONS
)
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
def find_list_urls(url: str) -> Set[str]:
r"""Find good list URLs for the supplied URL.
By default, returns the dirname of the archive path.
Provides special treatment for the following websites, which have a
unique list URL different from the dirname of the download URL:
========= =======================================================
GitHub https://github.com/<repo>/<name>/releases
GitLab https://gitlab.\*/<repo>/<name>/tags
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
PyPI https://pypi.org/simple/<name>/
LuaRocks https://luarocks.org/modules/<repo>/<name>
========= =======================================================
Note: this function is called by `spack versions`, `spack checksum`,
and `spack create`, but not by `spack fetch` or `spack install`.
Parameters:
url (str): The download URL for the package
Returns:
set: One or more list URLs for the package
"""
url_types = [
# GitHub
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
# GitLab API endpoint
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
(
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
),
# GitLab non-API endpoint
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
# BitBucket
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
# CRAN
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
(
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
lambda m: m.group(1) + "/Archive/" + m.group(2),
),
# PyPI
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
(
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
),
# LuaRocks
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
(
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
lambda m: "https://luarocks.org/modules/"
+ m.group("org")
+ "/"
+ m.group("name")
+ "/",
),
]
list_urls = {os.path.dirname(url)}
for pattern, fun in url_types:
match = re.search(pattern, url)
if match:
list_urls.add(fun(match))
return list_urls
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
"""Strips query and fragment from a url, then returns the base url and the suffix.
Args:
url: URL to be stripped
Raises:
ValueError: when there is any error parsing the URL
"""
components = urlsplit(url)
stripped = components[:3] + (None, None)
query, frag = components[3:5]
suffix = ""
if query:
suffix += "?" + query
if frag:
suffix += "#" + frag
return urlunsplit(stripped), suffix
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
returns the input URL and an empty string.
"""
match = SOURCEFORGE_RE.search(url)
if match is not None:
return match.groups()
return url, ""
def has_extension(path_or_url: str, ext: str) -> bool:
"""Returns true if the extension in input is present in path, false otherwise."""
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
if not ext.startswith(r"\."):
ext = rf"\.{ext}$"
if re.search(ext, prefix):
return True
return False
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
"""Tries to match an allowed archive extension to the input. Returns the first match,
or None if no match was found.
Raises:
ValueError: if the input is None
"""
if path_or_url is None:
raise ValueError("Can't call extension() on None")
for t in ALLOWED_ARCHIVE_TYPES:
if has_extension(path_or_url, t):
return t
return None
def remove_extension(path_or_url: str, *, extension: str) -> str:
"""Returns the input with the extension removed"""
suffix = rf"\.{extension}$"
return re.sub(suffix, "", path_or_url)
def check_and_remove_ext(path: str, *, extension: str) -> str:
"""Returns the input path with the extension removed, if the extension is present in path.
Otherwise, returns the input unchanged.
"""
if not has_extension(path, extension):
return path
path, _ = split_url_on_sourceforge_suffix(path)
return remove_extension(path, extension=extension)
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
"""If a path contains the extension in input, returns the path stripped of the extension.
Otherwise, returns the input path.
If extension is None, attempts to strip any allowed extension from path.
"""
if extension is None:
for t in ALLOWED_ARCHIVE_TYPES:
if has_extension(path_or_url, ext=t):
extension = t
break
else:
return path_or_url
return check_and_remove_ext(path_or_url, extension=extension)
def split_url_extension(url: str) -> Tuple[str, ...]:
"""Some URLs have a query string, e.g.:
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
In (1), the query string needs to be stripped to get at the
extension, but in (2) & (3), the filename is IN a single final query
argument.
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
The suffix contains anything that was stripped off the URL to
get at the file extension. In (1), it will be ``'?raw=true'``, but
in (2), it will be empty. In (3) the suffix is a parameter that follows
after the file extension, e.g.:
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
"""
# Strip off sourceforge download suffix.
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
prefix, suffix = split_url_on_sourceforge_suffix(url)
ext = extension_from_path(prefix)
if ext is not None:
prefix = strip_extension(prefix)
return prefix, ext, suffix
try:
prefix, suf = strip_query_and_fragment(prefix)
except ValueError:
# FIXME: tty.debug("Got error parsing path %s" % path)
# Ignore URL parse errors here
return url, ""
ext = extension_from_path(prefix)
prefix = strip_extension(prefix)
suffix = suf + suffix
if ext is None:
ext = ""
return prefix, ext, suffix
def strip_version_suffixes(path_or_url: str) -> str:
"""Some tarballs contain extraneous information after the version:
* ``bowtie2-2.2.5-source``
* ``libevent-2.0.21-stable``
* ``cuda_8.0.44_linux.run``
These strings are not part of the version number and should be ignored.
This function strips those suffixes off and returns the remaining string.
The goal is that the version is always the last thing in ``path``:
* ``bowtie2-2.2.5``
* ``libevent-2.0.21``
* ``cuda_8.0.44``
Args:
path_or_url: The filename or URL for the package
Returns:
The ``path`` with any extraneous suffixes removed
"""
# NOTE: This could be done with complicated regexes in parse_version_offset
# NOTE: The problem is that we would have to add these regexes to the end
# NOTE: of every single version regex. Easier to just strip them off
# NOTE: permanently
suffix_regexes = [
# Download type
r"[Ii]nstall",
r"all",
r"code",
r"[Ss]ources?",
r"file",
r"full",
r"single",
r"with[a-zA-Z_-]+",
r"rock",
r"src(_0)?",
r"public",
r"bin",
r"binary",
r"run",
r"[Uu]niversal",
r"jar",
r"complete",
r"dynamic",
r"oss",
r"gem",
r"tar",
r"sh",
# Download version
r"release",
r"bin",
r"stable",
r"[Ff]inal",
r"rel",
r"orig",
r"dist",
r"\+",
# License
r"gpl",
# Arch
# Needs to come before and after OS, appears in both orders
r"ia32",
r"intel",
r"amd64",
r"linux64",
r"x64",
r"64bit",
r"x86[_-]64",
r"i586_64",
r"x86",
r"i[36]86",
r"ppc64(le)?",
r"armv?(7l|6l|64)",
# Other
r"cpp",
r"gtk",
r"incubating",
# OS
r"[Ll]inux(_64)?",
r"LINUX",
r"[Uu]ni?x",
r"[Ss]un[Oo][Ss]",
r"[Mm]ac[Oo][Ss][Xx]?",
r"[Oo][Ss][Xx]",
r"[Dd]arwin(64)?",
r"[Aa]pple",
r"[Ww]indows",
r"[Ww]in(64|32)?",
r"[Cc]ygwin(64|32)?",
r"[Mm]ingw",
r"centos",
# Arch
# Needs to come before and after OS, appears in both orders
r"ia32",
r"intel",
r"amd64",
r"linux64",
r"x64",
r"64bit",
r"x86[_-]64",
r"i586_64",
r"x86",
r"i[36]86",
r"ppc64(le)?",
r"armv?(7l|6l|64)?",
# PyPI
r"[._-]py[23].*\.whl",
r"[._-]cp[23].*\.whl",
r"[._-]win.*\.exe",
]
for regex in suffix_regexes:
# Remove the suffix from the end of the path
# This may be done multiple times
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
return path_or_url
def expand_contracted_extension(extension: str) -> str:
"""Returns the expanded version of a known contracted extension.
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
return the input unmodified.
"""
extension = extension.strip(".")
return CONTRACTION_MAP.get(extension, extension)
def expand_contracted_extension_in_path(
path_or_url: str, *, extension: Optional[str] = None
) -> str:
"""Returns the input path or URL with any contraction extension expanded.
Args:
path_or_url: path or URL to be expanded
extension: if specified, only attempt to expand that extension
"""
extension = extension or extension_from_path(path_or_url)
if extension is None:
return path_or_url
expanded = expand_contracted_extension(extension)
if expanded != extension:
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
return path_or_url
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
"""Returns compression extension for a compressed archive"""
extension = expand_contracted_extension(extension)
for ext in [*EXTENSIONS]:
if ext in extension:
return ext
return None
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
"""Strips the compression extension from the input, and returns it. For instance,
"foo.tgz" becomes "foo.tar".
If no extension is given, try a default list of extensions.
Args:
path_or_url: input to be stripped
ext: if given, extension to be stripped
"""
if not extension_from_path(path_or_url):
return path_or_url
expanded_path = expand_contracted_extension_in_path(path_or_url)
candidates = [ext] if ext is not None else EXTENSIONS
for current_extension in candidates:
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
if modified_path != expanded_path:
return modified_path
return expanded_path
def allowed_archive(path_or_url: str) -> bool:
"""Returns true if the input is a valid archive, False otherwise."""
return (
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
)
def determine_url_file_extension(path: str) -> str:
"""This returns the type of archive a URL refers to. This is
sometimes confusing because of URLs like:
(1) https://github.com/petdance/ack/tarball/1.93_02
Where the URL doesn't actually contain the filename. We need
to know what type it is so that we can appropriately name files
in mirrors.
"""
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
if match:
if match.group(1) == "zip":
return "zip"
elif match.group(1) == "tar":
return "tar.gz"
prefix, ext, suffix = split_url_extension(path)
return ext

View File

@@ -11,7 +11,6 @@
import itertools
import numbers
import os
import pathlib
import posixpath
import re
import shutil
@@ -19,17 +18,14 @@
import sys
import tempfile
from contextlib import contextmanager
from itertools import accumulate
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
import llnl.util.symlink
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from llnl.util.symlink import islink, symlink
from spack.util.executable import Executable, which
from ..path import path_to_os_path, system_path_filter
from spack.util.path import path_to_os_path, system_path_filter
if sys.platform != "win32":
import grp
@@ -105,7 +101,7 @@ def _nop(args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (islink(src) and islink(dst))
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
@@ -173,7 +169,7 @@ def rename(src, dst):
if sys.platform == "win32":
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or islink(dst):
if os.path.exists(dst) or os.path.islink(dst):
os.remove(dst)
os.rename(src, dst)
@@ -337,7 +333,8 @@ def groupid_to_group(x):
if string:
regex = re.escape(regex)
for filename in path_to_os_path(*filenames):
filenames = path_to_os_path(*filenames)
for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
@@ -569,7 +566,7 @@ def set_install_permissions(path):
# If this points to a file maintained in a Spack prefix, it is assumed that
# this function will be invoked on the target. If the file is outside a
# Spack-maintained prefix, the permissions should not be modified.
if islink(path):
if os.path.islink(path):
return
if os.path.isdir(path):
os.chmod(path, 0o755)
@@ -638,7 +635,7 @@ def chmod_x(entry, perms):
@system_path_filter
def copy_mode(src, dest):
"""Set the mode of dest to that of src unless it is a link."""
if islink(dest):
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
@@ -724,12 +721,26 @@ def install(src, dest):
copy(src, dest, _permissions=True)
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = os.readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
@system_path_filter
def copy_tree(
src: str,
dest: str,
symlinks: bool = True,
allow_broken_symlinks: bool = sys.platform != "win32",
ignore: Optional[Callable[[str], bool]] = None,
_permissions: bool = False,
):
@@ -752,8 +763,6 @@ def copy_tree(
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception. Defaults to true on unix.
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
@@ -761,8 +770,6 @@ def copy_tree(
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if allow_broken_symlinks and sys.platform == "win32":
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
if _permissions:
tty.debug("Installing {0} to {1}".format(src, dest))
else:
@@ -776,11 +783,6 @@ def copy_tree(
if not files:
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
# symlinks at the end.
links = []
for src in files:
abs_src = os.path.abspath(src)
if not abs_src.endswith(os.path.sep):
@@ -803,7 +805,7 @@ def copy_tree(
ignore=ignore,
follow_nonexisting=True,
):
if islink(s):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = os.readlink(s)
@@ -817,9 +819,7 @@ def escaped_path(path):
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
target = new_target
links.append((target, d, s))
continue
symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
@@ -834,17 +834,9 @@ def escaped_path(path):
set_install_permissions(d)
copy_mode(s, d)
for target, d, s in links:
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
@system_path_filter
def install_tree(
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
):
def install_tree(src, dest, symlinks=True, ignore=None):
"""Recursively install an entire directory tree rooted at *src*.
Same as :py:func:`copy_tree` with the addition of setting proper
@@ -855,21 +847,12 @@ def install_tree(
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (typing.Callable): function indicating which files to ignore
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception.
Raises:
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(
src,
dest,
symlinks=symlinks,
allow_broken_symlinks=allow_broken_symlinks,
ignore=ignore,
_permissions=True,
)
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@system_path_filter
@@ -1273,12 +1256,7 @@ def traverse_tree(
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (typing.Callable): function indicating which files to ignore. This will also
ignore symlinks if they point to an ignored file (regardless of whether the symlink
is explicitly ignored); note this only supports one layer of indirection (i.e. if
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
but not x). To avoid this, make sure the ignore function also ignores the symlink
paths too.
ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
@@ -1305,24 +1283,11 @@ def traverse_tree(
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# If the source path is a link and the link's source is ignored, then ignore the link too,
# but only do this if the ignore is defined.
if ignore is not None:
if islink(source_child) and not follow_links:
target = readlink(source_child)
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
if any(map(ignore, all_parents)):
tty.warn(
f"Skipping {source_path} because the source or a part of the source's "
f"path is included in the ignores."
)
continue
# Treat as a directory
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
@@ -1348,11 +1313,7 @@ def traverse_tree(
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
if sys.platform == "win32":
if not os.path.lexists(path):
return False, False, False
return os.path.lexists(path), islink(path), os.path.isdir(path)
number of stat calls."""
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
@@ -1567,7 +1528,7 @@ def remove_if_dead_link(path):
Parameters:
path (str): The potential dead link
"""
if islink(path) and not os.path.exists(path):
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
@@ -1626,7 +1587,7 @@ def remove_linked_tree(path):
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
if os.path.exists(path):
if islink(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
@@ -2427,7 +2388,7 @@ def library_dependents(self):
"""
Set of directories where package binaries/libraries are located.
"""
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
def add_library_dependent(self, *dest):
"""
@@ -2440,9 +2401,9 @@ def add_library_dependent(self, *dest):
"""
for pth in dest:
if os.path.isfile(pth):
self._additional_library_dependents.add(pathlib.Path(pth).parent)
self._additional_library_dependents.add(os.path.dirname)
else:
self._additional_library_dependents.add(pathlib.Path(pth))
self._additional_library_dependents.add(pth)
@property
def rpaths(self):
@@ -2455,7 +2416,7 @@ def rpaths(self):
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
for extra_path in self._addl_rpaths:
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
return set([pathlib.Path(x) for x in dependent_libs])
return set(dependent_libs)
def add_rpath(self, *paths):
"""
@@ -2471,7 +2432,7 @@ def add_rpath(self, *paths):
"""
self._addl_rpaths = self._addl_rpaths | set(paths)
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
def _link(self, path, dest_dir):
"""Perform link step of simulated rpathing, installing
simlinks of file in path to the dest_dir
location. This method deliberately prevents
@@ -2479,35 +2440,27 @@ def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
This is because it is both meaningless from an rpath
perspective, and will cause an error when Developer
mode is not enabled"""
def report_already_linked():
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(str(dest_file))
tty.debug(
"Linking library %s to %s failed, " % (str(path), str(dest_file))
+ "already linked."
if already_linked
else "library with name %s already exists at location %s."
% (str(file_name), str(dest_dir))
)
file_name = path.name
dest_file = dest_dir / file_name
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
file_name = os.path.basename(path)
dest_file = os.path.join(dest_dir, file_name)
if os.path.exists(dest_dir) and not dest_file == path:
try:
symlink(str(path), str(dest_file))
symlink(path, dest_file)
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
# Catch OSErrors missed by the SymlinkError checks
except OSError as e:
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
report_already_linked()
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)
tty.debug(
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
if already_linked
else "library with name %s already exists at location %s."
% (file_name, dest_dir)
)
pass
else:
raise e
# catch errors we raise ourselves from Spack
except llnl.util.symlink.AlreadyExistsError:
report_already_linked()
def establish_link(self):
"""
@@ -2740,7 +2693,7 @@ def remove_directory_contents(dir):
"""Remove all contents of a directory."""
if os.path.exists(dir):
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
if os.path.isfile(entry) or islink(entry):
if os.path.isfile(entry) or os.path.islink(entry):
os.unlink(entry)
else:
shutil.rmtree(entry)

View File

@@ -14,7 +14,7 @@
from llnl.util import lang, tty
from ..string import plural
import spack.util.string
if sys.platform != "win32":
import fcntl
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
if nattempts <= 1:
return ""
attempts = plural(nattempts, "attempt")
attempts = spack.util.string.plural(nattempts, "attempt")
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)

View File

@@ -2,189 +2,77 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import os
import re
import shutil
import subprocess
import sys
import tempfile
from os.path import exists, join
from llnl.util import lang, tty
from ..path import system_path_filter
from llnl.util import lang
if sys.platform == "win32":
from win32file import CreateHardLink
is_windows = sys.platform == "win32"
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
def symlink(real_path, link_path):
"""
Create a link.
Create a symbolic link.
On non-Windows and Windows with System Administrator
privleges this will be a normal symbolic link via
os.symlink.
On Windows without privledges the link will be a
junction for a directory and a hardlink for a file.
On Windows the various link types are:
Symbolic Link: A link to a file or directory on the
same or different volume (drive letter) or even to
a remote file or directory (using UNC in its path).
Need System Administrator privileges to make these.
Hard Link: A link to a file on the same volume (drive
letter) only. Every file (file's data) has at least 1
hard link (file's name). But when this method creates
a new hard link there will be 2. Deleting all hard
links effectively deletes the file. Don't need System
Administrator privileges.
Junction: A link to a directory on the same or different
volume (drive letter) but not to a remote directory. Don't
need System Administrator privileges.
Parameters:
source_path (str): The real file or directory that the link points to.
Must be absolute OR relative to the link.
link_path (str): The path where the link will exist.
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
doesn't exist. This will still raise an exception on Windows.
On Windows, use junctions if os.symlink fails.
"""
source_path = os.path.normpath(source_path)
win_source_path = source_path
link_path = os.path.normpath(link_path)
# Never allow broken links on Windows.
if sys.platform == "win32" and allow_broken_symlinks:
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
if not allow_broken_symlinks:
# Perform basic checks to make sure symlinking will succeed
if os.path.lexists(link_path):
raise AlreadyExistsError(
f"Link path ({link_path}) already exists. Cannot create link."
)
if not os.path.exists(source_path):
if os.path.isabs(source_path) and not allow_broken_symlinks:
# An absolute source path that does not exist will result in a broken link.
raise SymlinkError(
f"Source path ({source_path}) is absolute but does not exist. Resulting "
f"link would be broken so not making link."
)
else:
# os.symlink can create a link when the given source path is relative to
# the link path. Emulate this behavior and check to see if the source exists
# relative to the link path ahead of link creation to prevent broken
# links from being made.
link_parent_dir = os.path.dirname(link_path)
relative_path = os.path.join(link_parent_dir, source_path)
if os.path.exists(relative_path):
# In order to work on windows, the source path needs to be modified to be
# relative because hardlink/junction dont resolve relative paths the same
# way as os.symlink. This is ignored on other operating systems.
win_source_path = relative_path
elif not allow_broken_symlinks:
raise SymlinkError(
f"The source path ({source_path}) is not relative to the link path "
f"({link_path}). Resulting link would be broken so not making link."
)
# Create the symlink
if sys.platform == "win32" and not _windows_can_symlink():
_windows_create_link(win_source_path, link_path)
if sys.platform != "win32":
os.symlink(real_path, link_path)
elif _win32_can_symlink():
# Windows requires target_is_directory=True when the target is a dir.
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
else:
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
try:
# Try to use junctions
_win32_junction(real_path, link_path)
except OSError as e:
if e.errno == errno.EEXIST:
# EEXIST error indicates that file we're trying to "link"
# is already present, don't bother trying to copy which will also fail
# just raise
raise
else:
# If all else fails, fall back to copying files
shutil.copyfile(real_path, link_path)
def islink(path: str) -> bool:
"""Override os.islink to give correct answer for spack logic.
For Non-Windows: a link can be determined with the os.path.islink method.
Windows-only methods will return false for other operating systems.
For Windows: spack considers symlinks, hard links, and junctions to
all be links, so if any of those are True, return True.
Args:
path (str): path to check if it is a link.
Returns:
bool - whether the path is any kind link or not.
"""
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
def islink(path):
return os.path.islink(path) or _win32_is_junction(path)
def _windows_is_hardlink(path: str) -> bool:
"""Determines if a path is a windows hard link. This is accomplished
by looking at the number of links using os.stat. A non-hard-linked file
will have a st_nlink value of 1, whereas a hard link will have a value
larger than 1. Note that both the original and hard-linked file will
return True because they share the same inode.
# '_win32' functions based on
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
def _win32_junction(path, link):
# junctions require absolute paths
if not os.path.isabs(link):
link = os.path.abspath(link)
Args:
path (str): Windows path to check for a hard link
# os.symlink will fail if link exists, emulate the behavior here
if exists(link):
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
Returns:
bool - Whether the path is a hard link or not.
"""
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
return False
if not os.path.isabs(path):
parent = os.path.join(link, os.pardir)
path = os.path.join(parent, path)
path = os.path.abspath(path)
return os.stat(path).st_nlink > 1
def _windows_is_junction(path: str) -> bool:
"""Determines if a path is a windows junction. A junction can be
determined using a bitwise AND operation between the file's
attribute bitmask and the known junction bitmask (0x400).
Args:
path (str): A non-file path
Returns:
bool - whether the path is a junction or not.
"""
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
return False
import ctypes.wintypes
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
get_file_attributes.restype = ctypes.wintypes.DWORD
invalid_file_attributes = 0xFFFFFFFF
reparse_point = 0x400
file_attr = get_file_attributes(str(path))
if file_attr == invalid_file_attributes:
return False
return file_attr & reparse_point > 0
CreateHardLink(link, path)
@lang.memoized
def _windows_can_symlink() -> bool:
"""
Determines if windows is able to make a symlink depending on
the system configuration and the level of the user's permissions.
"""
if sys.platform != "win32":
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
return False
def _win32_can_symlink():
tempdir = tempfile.mkdtemp()
dpath = os.path.join(tempdir, "dpath")
fpath = os.path.join(tempdir, "fpath.txt")
dpath = join(tempdir, "dpath")
fpath = join(tempdir, "fpath.txt")
dlink = os.path.join(tempdir, "dlink")
flink = os.path.join(tempdir, "flink.txt")
dlink = join(tempdir, "dlink")
flink = join(tempdir, "flink.txt")
import llnl.util.filesystem as fs
@@ -208,140 +96,24 @@ def _windows_can_symlink() -> bool:
return can_symlink_directories and can_symlink_files
def _windows_create_link(source: str, link: str):
def _win32_is_junction(path):
"""
Attempts to create a Hard Link or Junction as an alternative
to a symbolic link. This is called when symbolic links cannot
be created.
Determines if a path is a win32 junction
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
elif os.path.isdir(source):
_windows_create_junction(source=source, link=link)
elif os.path.isfile(source):
_windows_create_hard_link(path=source, link=link)
else:
raise SymlinkError(
f"Cannot create link from {source}. It is neither a file nor a directory."
)
if os.path.islink(path):
return False
if sys.platform == "win32":
import ctypes.wintypes
def _windows_create_junction(source: str, link: str):
"""Duly verify that the path and link are eligible to create a junction,
then create the junction.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
elif not os.path.exists(source):
raise SymlinkError("Source path does not exist, cannot create a junction.")
elif os.path.lexists(link):
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
elif not os.path.isdir(source):
raise SymlinkError("Source path is not a directory, cannot create a junction.")
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
GetFileAttributes.restype = ctypes.wintypes.DWORD
import subprocess
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
cmd = ["cmd", "/C", "mklink", "/J", link, source]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
tty.debug(out.decode())
if proc.returncode != 0:
err = err.decode()
tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err)
res = GetFileAttributes(path)
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
def _windows_create_hard_link(path: str, link: str):
"""Duly verify that the path and link are eligible to create a hard
link, then create the hard link.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
elif not os.path.exists(path):
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
elif os.path.lexists(link):
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
elif not os.path.isfile(path):
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
else:
tty.debug(f"Creating hard link {link} pointing to {path}")
CreateHardLink(link, path)
def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path):
return _windows_read_hard_link(path)
elif _windows_is_junction(path):
return _windows_read_junction(path)
else:
return os.readlink(path)
def _windows_read_hard_link(link: str) -> str:
"""Find all of the files that point to the same inode as the link"""
if sys.platform != "win32":
raise SymlinkError("Can't read hard link on non-Windows OS.")
link = os.path.abspath(link)
fsutil_cmd = ["fsutil", "hardlink", "list", link]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
# fsutil response does not include the drive name, so append it back to each linked file.
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
links.remove(link)
if len(links) == 1:
return links.pop()
elif len(links) > 1:
# TODO: How best to handle the case where 3 or more paths point to a single inode?
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
else:
raise SymlinkError("Cannot determine hard link source path.")
def _windows_read_junction(link: str):
"""Find the path that a junction points to."""
if sys.platform != "win32":
raise SymlinkError("Can't read junction on non-Windows OS.")
link = os.path.abspath(link)
link_basename = os.path.basename(link)
link_parent = os.path.dirname(link)
fsutil_cmd = ["dir", "/a:l", link_parent]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
if matches:
return matches.group(1)
else:
raise SymlinkError("Could not find junction path.")
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
class SymlinkError(RuntimeError):
"""Exception class for errors raised while creating symlinks,
junctions and hard links
"""
class AlreadyExistsError(SymlinkError):
"""Link path already exists."""
return False

View File

@@ -38,13 +38,10 @@ def _search_duplicate_compilers(error_cls):
import ast
import collections
import collections.abc
import glob
import inspect
import itertools
import pathlib
import pickle
import re
import warnings
from urllib.request import urlopen
import llnl.util.lang
@@ -801,76 +798,3 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
errors.append(err)
return errors
#: Sanity checks on package directives
external_detection = AuditClass(
group="externals",
tag="PKG-EXTERNALS",
description="Sanity checks for external software detection",
kwargs=("pkgs",),
)
def packages_with_detection_tests():
"""Return the list of packages with a corresponding detection_test.yaml file."""
import spack.config
import spack.util.path
to_be_tested = []
for current_repo in spack.repo.PATH.repos:
namespace = current_repo.namespace
packages_dir = pathlib.PurePath(current_repo.packages_path)
pattern = packages_dir / "**" / "detection_test.yaml"
pkgs_with_tests = [
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
]
to_be_tested.extend(pkgs_with_tests)
return to_be_tested
@external_detection
def _test_detection_by_executable(pkgs, error_cls):
"""Test drive external detection for packages"""
import spack.detection
errors = []
# Filter the packages and retain only the ones with detection tests
pkgs_with_tests = packages_with_detection_tests()
selected_pkgs = []
for current_package in pkgs_with_tests:
_, unqualified_name = spack.repo.partition_package_name(current_package)
# Check for both unqualified name and qualified name
if unqualified_name in pkgs or current_package in pkgs:
selected_pkgs.append(current_package)
selected_pkgs.sort()
if not selected_pkgs:
summary = "No detection test to run"
details = [f' "{p}" has no detection test' for p in pkgs]
warnings.warn("\n".join([summary] + details))
return errors
for pkg_name in selected_pkgs:
for idx, test_runner in enumerate(
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
):
specs = test_runner.execute()
expected_specs = test_runner.expected_specs
not_detected = set(expected_specs) - set(specs)
if not_detected:
summary = pkg_name + ": cannot detect some specs"
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
errors.append(error_cls(summary=summary, details=details))
not_expected = set(specs) - set(expected_specs)
if not_expected:
summary = pkg_name + ": detected unexpected specs"
msg = '"{0}" was detected, but was not expected [test_id={1}]'
details = [msg.format(s, idx) for s in sorted(not_expected)]
errors.append(error_cls(summary=summary, details=details))
return errors

View File

@@ -9,6 +9,7 @@
import io
import itertools
import json
import multiprocessing.pool
import os
import re
import shutil
@@ -34,7 +35,6 @@
import spack.cmd
import spack.config as config
import spack.database as spack_db
import spack.error
import spack.hooks
import spack.hooks.sbang
import spack.mirror
@@ -49,7 +49,6 @@
import spack.util.gpg
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
from spack.caches import misc_cache_location
@@ -647,7 +646,8 @@ class BuildManifestVisitor(BaseDirectoryVisitor):
directories."""
def __init__(self):
# Save unique identifiers of hardlinks to avoid relocating them multiple times
# Save unique identifiers of files to avoid
# relocating hardlink files for each path.
self.visited = set()
# Lists of files we will check
@@ -656,8 +656,6 @@ def __init__(self):
def seen_before(self, root, rel_path):
stat_result = os.lstat(os.path.join(root, rel_path))
if stat_result.st_nlink == 1:
return False
identifier = (stat_result.st_dev, stat_result.st_ino)
if identifier in self.visited:
return True
@@ -878,18 +876,32 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
db: A spack database used for adding specs and then writing the index.
temp_dir (str): Location to write index.json and hash for pushing
concurrency (int): Number of parallel processes to use when fetching
"""
for file in file_list:
contents = read_method(file)
# Need full spec.json name or this gets confused with index.json.
if file.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(contents)
fetched_spec = Spec.from_dict(specfile_json)
elif file.endswith(".json"):
fetched_spec = Spec.from_json(contents)
else:
continue
Return:
None
"""
def _fetch_spec_from_mirror(spec_url):
spec_file_contents = read_method(spec_url)
if spec_file_contents:
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
return Spec.from_dict(specfile_json)
if spec_url.endswith(".json"):
return Spec.from_json(spec_file_contents)
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
fetched_specs = tp.map(
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
)
finally:
tp.terminate()
tp.join()
for fetched_spec in fetched_specs:
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)
@@ -1419,7 +1431,7 @@ def try_fetch(url_to_fetch):
try:
stage.fetch()
except spack.error.FetchError:
except web_util.FetchError:
stage.destroy()
return None
@@ -1582,10 +1594,9 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
for rel_path in buildinfo[key]:
stat_result = os.lstat(os.path.join(root, rel_path))
identifier = (stat_result.st_dev, stat_result.st_ino)
if stat_result.st_nlink > 1:
if identifier in visited:
continue
visited.add(identifier)
if identifier in visited:
continue
visited.add(identifier)
new_list.append(rel_path)
buildinfo[key] = new_list
@@ -1802,11 +1813,10 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
m.linkname = m.linkname[result.end() :]
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
def extract_tarball(spec, download_result, unsigned=False, force=False):
"""
extract binary tarball for given package into install area
"""
timer.start("extract")
if os.path.exists(spec.prefix):
if force:
shutil.rmtree(spec.prefix)
@@ -1886,9 +1896,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
os.remove(tarfile_path)
os.remove(specfile_path)
timer.stop("extract")
timer.start("relocate")
try:
relocate_package(spec)
except Exception as e:
@@ -1909,7 +1917,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
if os.path.exists(filename):
os.remove(filename)
_delete_staged_downloads(download_result)
timer.stop("relocate")
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
@@ -2147,7 +2154,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
if not os.path.exists(stage.save_filename):
try:
stage.fetch()
except spack.error.FetchError:
except web_util.FetchError:
continue
tty.debug("Found key {0}".format(fingerprint))
@@ -2299,7 +2306,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
try:
stage.fetch()
break
except spack.error.FetchError as e:
except web_util.FetchError as e:
tty.debug(e)
else:
if fail_if_missing:

View File

@@ -476,16 +476,16 @@ def ensure_executables_in_path_or_raise(
def _add_externals_if_missing() -> None:
search_list = [
# clingo
"cmake",
"bison",
spack.repo.PATH.get_pkg_class("cmake"),
spack.repo.PATH.get_pkg_class("bison"),
# GnuPG
"gawk",
spack.repo.PATH.get_pkg_class("gawk"),
# develop deps
"git",
spack.repo.PATH.get_pkg_class("git"),
]
if IS_WINDOWS:
search_list.append("winbison")
externals = spack.detection.by_path(search_list)
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
externals = spack.detection.by_executable(search_list)
# System git is typically deprecated, so mark as non-buildable to force it as external
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)

View File

@@ -15,9 +15,9 @@
from llnl.util import tty
import spack.build_environment
import spack.environment
import spack.tengine
import spack.util.cpus
import spack.util.executable
from spack.environment import depfile
@@ -137,7 +137,7 @@ def _install_with_depfile(self) -> None:
"-C",
str(self.environment_root()),
"-j",
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
**kwargs,
)

View File

@@ -43,7 +43,6 @@
from typing import List, Tuple
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import join_path
from llnl.util.lang import dedupe
from llnl.util.symlink import symlink
@@ -69,7 +68,7 @@
from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.installer import InstallError
from spack.util.cpus import determine_number_of_jobs
from spack.util.cpus import cpus_available
from spack.util.environment import (
SYSTEM_DIRS,
EnvironmentModifications,
@@ -83,6 +82,7 @@
from spack.util.executable import Executable
from spack.util.log_parse import make_log_context, parse_log_events
from spack.util.module_cmd import load_module, module, path_from_modules
from spack.util.string import plural
#
# This can be set by the user to globally disable parallel builds.
@@ -537,6 +537,39 @@ def update_compiler_args_for_dep(dep):
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def determine_number_of_jobs(
parallel=False, command_line=None, config_default=None, max_cpus=None
):
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel (bool or None): true when package supports parallel builds
command_line (int or None): command line override
config_default (int or None): config default number of jobs
max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined.
"""
if not parallel:
return 1
if command_line is None and "command_line" in spack.config.scopes():
command_line = spack.config.get("config:build_jobs", scope="command_line")
if command_line is not None:
return command_line
max_cpus = max_cpus or cpus_available()
# in some rare cases _builtin config may not be set, so default to max 16
config_default = config_default or spack.config.get("config:build_jobs", 16)
return min(max_cpus, config_default)
def set_module_variables_for_package(pkg):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.

View File

@@ -274,6 +274,7 @@ def std_args(pkg, generator=None):
generator,
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
define("CMAKE_BUILD_TYPE", build_type),
define("BUILD_TESTING", pkg.run_tests),
]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
@@ -450,6 +451,7 @@ def cmake_args(self):
* CMAKE_INSTALL_PREFIX
* CMAKE_BUILD_TYPE
* BUILD_TESTING
which will be set automatically.
"""

View File

@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")

View File

@@ -95,7 +95,7 @@ def makefile_root(self):
return self.stage.source_path
@property
def makefile_name(self):
def nmakefile_name(self):
"""Name of the current makefile. This is currently an empty value.
If a project defines this value, it will be used with the /f argument
to provide nmake an explicit makefile. This is usefule in scenarios where
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
"""Run "nmake" on the build targets specified by the builder."""
opts = self.std_nmake_args
opts += self.nmake_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
if self.nmakefile_name:
opts.append("/f {}".format(self.nmakefile_name))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
opts = self.std_nmake_args
opts += self.nmake_args()
opts += self.nmake_install_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
if self.nmakefile_name:
opts.append("/f {}".format(self.nmakefile_name))
opts.append(self.define("PREFIX", prefix))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(

View File

@@ -61,11 +61,6 @@ def component_prefix(self):
"""Path to component <prefix>/<component>/<version>."""
return self.prefix.join(join_path(self.component_dir, self.spec.version))
@property
def env_script_args(self):
"""Additional arguments to pass to vars.sh script."""
return ()
def install(self, spec, prefix):
self.install_component(basename(self.url_for_version(spec.version)))
@@ -129,7 +124,7 @@ def setup_run_environment(self, env):
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
join_path(self.component_prefix, "env", "vars.sh")
)
)

View File

@@ -16,7 +16,6 @@
import spack.builder
import spack.config
import spack.deptypes as dt
import spack.detection
import spack.multimethod
import spack.package_base
@@ -227,48 +226,7 @@ def update_external_dependencies(self, extendee_spec=None):
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
def get_external_python_for_prefix(self):
"""
For an external package that extends python, find the most likely spec for the python
it depends on.
First search: an "installed" external that shares a prefix with this package
Second search: a configured external that shares a prefix with this package
Third search: search this prefix for a python package
Returns:
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
"""
python_externals_installed = [
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
]
if python_externals_installed:
return python_externals_installed[0]
python_external_config = spack.config.get("packages:python:externals", [])
python_externals_configured = [
spack.spec.parse_with_version_concrete(item["spec"])
for item in python_external_config
if item["prefix"] == self.spec.external_path
]
if python_externals_configured:
return python_externals_configured[0]
python_externals_detection = spack.detection.by_path(
["python"], path_hints=[self.spec.external_path]
)
python_externals_detected = [
d.spec
for d in python_externals_detection.get("python", [])
if d.prefix == self.spec.external_path
]
if python_externals_detected:
return python_externals_detected[0]
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
class PythonPackage(PythonExtension):
@@ -315,16 +273,54 @@ def list_url(cls):
name = cls.pypi.split("/")[0]
return "https://pypi.org/simple/" + name + "/"
def get_external_python_for_prefix(self):
"""
For an external package that extends python, find the most likely spec for the python
it depends on.
First search: an "installed" external that shares a prefix with this package
Second search: a configured external that shares a prefix with this package
Third search: search this prefix for a python package
Returns:
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
"""
python_externals_installed = [
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
]
if python_externals_installed:
return python_externals_installed[0]
python_external_config = spack.config.get("packages:python:externals", [])
python_externals_configured = [
spack.spec.parse_with_version_concrete(item["spec"])
for item in python_external_config
if item["prefix"] == self.spec.external_path
]
if python_externals_configured:
return python_externals_configured[0]
python_externals_detection = spack.detection.by_executable(
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
)
python_externals_detected = [
d.spec
for d in python_externals_detection.get("python", [])
if d.prefix == self.spec.external_path
]
if python_externals_detected:
return python_externals_detected[0]
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
@property
def headers(self):
"""Discover header files in platlib."""
# Remove py- prefix in package name
name = self.spec.name[3:]
# Headers may be in either location
include = self.prefix.join(self.spec["python"].package.include).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
include = self.prefix.join(self.spec["python"].package.include)
platlib = self.prefix.join(self.spec["python"].package.platlib)
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
if headers:
@@ -338,14 +334,13 @@ def libs(self):
"""Discover libraries in platlib."""
# Remove py- prefix in package name
name = self.spec.name[3:]
library = "lib" + self.spec.name[3:].replace("-", "?")
root = self.prefix.join(self.spec["python"].package.platlib)
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
libs = fs.find_all_libraries(root, recursive=True)
if libs:
return libs
for shared in [True, False]:
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
if libs:
return libs
msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))

View File

@@ -10,10 +10,9 @@
import llnl.util.tty as tty
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import build_system, extends, maintainers
from spack.package_base import PackageBase
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
@@ -93,7 +92,7 @@ def install(self, pkg, spec, prefix):
"--copy",
"-i",
"-j",
str(determine_number_of_jobs(parallel=parallel)),
str(determine_number_of_jobs(parallel)),
"--",
os.getcwd(),
]

View File

@@ -308,7 +308,7 @@ def append_dep(s, d):
dependencies.append({"spec": s, "depends": d})
for spec in spec_list:
for s in spec.traverse(deptype="all"):
for s in spec.traverse(deptype=all):
if s.external:
tty.msg("Will not stage external pkg: {0}".format(s))
continue
@@ -316,7 +316,7 @@ def append_dep(s, d):
skey = _spec_deps_key(s)
spec_labels[skey] = s
for d in s.dependencies(deptype="all"):
for d in s.dependencies(deptype=all):
dkey = _spec_deps_key(d)
if d.external:
tty.msg("Will not stage external dep: {0}".format(d))
@@ -1029,18 +1029,13 @@ def main_script_replacements(cmd):
job_vars = job_object.setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
job_object["needs"] = []
if spec_label in dependencies:
if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all
# available in the artifacts buildcache.
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.

View File

@@ -11,7 +11,6 @@
from textwrap import dedent
from typing import List, Match, Tuple
import llnl.string
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
from llnl.util.lang import attr_setdefault, index_by
@@ -30,6 +29,7 @@
import spack.user_environment as uenv
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.string
# cmd has a submodule called "list" so preserve the python list module
python_list = list
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture if architecture else "no arch",
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
f"{compiler.display_str}" if compiler else "no compiler",
)
@@ -516,7 +516,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
category, e.g. if pkg_type is "installed" then the message
would be "3 installed packages"
"""
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
def spack_is_git_repo():

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
import llnl.util.tty.colify
import llnl.util.tty.color as cl
import spack.audit
@@ -21,15 +20,6 @@ def setup_parser(subparser):
# Audit configuration files
sp.add_parser("configs", help="audit configuration files")
# Audit package recipes
external_parser = sp.add_parser("externals", help="check external detection in packages")
external_parser.add_argument(
"--list",
action="store_true",
dest="list_externals",
help="if passed, list which packages have detection tests",
)
# Https and other linting
https_parser = sp.add_parser("packages-https", help="check https in packages")
https_parser.add_argument(
@@ -39,7 +29,7 @@ def setup_parser(subparser):
# Audit package recipes
pkg_parser = sp.add_parser("packages", help="audit package recipes")
for group in [pkg_parser, https_parser, external_parser]:
for group in [pkg_parser, https_parser]:
group.add_argument(
"name",
metavar="PKG",
@@ -72,18 +62,6 @@ def packages_https(parser, args):
_process_reports(reports)
def externals(parser, args):
if args.list_externals:
msg = "@*{The following packages have detection tests:}"
tty.msg(cl.colorize(msg))
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
return
pkgs = args.name or spack.repo.PATH.all_package_names()
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
_process_reports(reports)
def list(parser, args):
for subcommand, check_tags in spack.audit.GROUPS.items():
print(cl.colorize("@*b{" + subcommand + "}:"))
@@ -100,7 +78,6 @@ def list(parser, args):
def audit(parser, args):
subcommands = {
"configs": configs,
"externals": externals,
"packages": packages,
"packages-https": packages_https,
"list": list,

View File

@@ -13,7 +13,6 @@
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.string import plural
from llnl.util.lang import elide_list
import spack.binary_distribution as bindist
@@ -21,7 +20,6 @@
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.error
import spack.mirror
import spack.relocate
import spack.repo
@@ -33,6 +31,7 @@
from spack.cmd import display_specs
from spack.spec import Spec, save_dependency_specfiles
from spack.stage import Stage
from spack.util.string import plural
description = "create, download and install binary packages"
section = "packaging"
@@ -79,11 +78,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
"Alternatively, one can decide to build a cache for only the package or only the "
"dependencies",
)
push.add_argument(
"--fail-fast",
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn)
@@ -302,7 +296,6 @@ def push_fn(args):
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
failed = []
# tty printing
color = clr.get_color_when()
@@ -333,17 +326,11 @@ def push_fn(args):
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
raise
failed.append((format_spec(spec), e))
if skipped:
if len(specs) == 1:
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
elif len(skipped) == len(specs):
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
else:
tty.info(
"The following {} specs were skipped as they already exist in the buildcache:\n"
@@ -353,17 +340,6 @@ def push_fn(args):
)
)
if failed:
if len(failed) == 1:
raise failed[0][1]
raise spack.error.SpackError(
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
"\n".join(
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
),
)
def install_fn(args):
"""install from a binary package"""
@@ -527,7 +503,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
temp_stage.create()
temp_stage.fetch()
web_util.push_to_url(local_path, dest_url, keep_original=True)
except spack.error.FetchError as e:
except web_util.FetchError as e:
# Expected, since we have to try all the possible extensions
tty.debug("no such file: {0}".format(src_url))
tty.debug(e)

View File

@@ -66,7 +66,7 @@ def setup_parser(subparser):
modes_parser.add_argument(
"--verify", action="store_true", default=False, help="verify known package checksums"
)
arguments.add_common_arguments(subparser, ["package", "jobs"])
arguments.add_common_arguments(subparser, ["package"])
subparser.add_argument(
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
)
@@ -96,7 +96,7 @@ def checksum(parser, args):
# Add latest version if requested
if args.latest:
remote_versions = pkg.fetch_remote_versions(args.jobs)
remote_versions = pkg.fetch_remote_versions()
if len(remote_versions) > 0:
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
versions.append(latest_version)
@@ -119,13 +119,13 @@ def checksum(parser, args):
# if we get here, it's because no valid url was provided by the package
# do expensive fallback to try to recover
if remote_versions is None:
remote_versions = pkg.fetch_remote_versions(args.jobs)
remote_versions = pkg.fetch_remote_versions()
if version in remote_versions:
url_dict[version] = remote_versions[version]
if len(versions) <= 0:
if remote_versions is None:
remote_versions = pkg.fetch_remote_versions(args.jobs)
remote_versions = pkg.fetch_remote_versions()
url_dict = remote_versions
if not url_dict:
@@ -239,7 +239,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
parsed_version = Version(contents_version.group(1))
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
num_versions_added += 1
elif parsed_version == new_versions[0][0]:

View File

@@ -19,7 +19,6 @@
import spack.hash_types as ht
import spack.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
@@ -254,8 +253,6 @@ def ci_rebuild(args):
check a single spec against the remote mirror, and rebuild it from source if the mirror does
not contain the hash
"""
rebuild_timer = timer.Timer()
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
# Make sure the environment is "gitlab-enabled", or else there's nothing
@@ -739,14 +736,6 @@ def ci_rebuild(args):
print(reproduce_msg)
rebuild_timer.stop()
try:
with open("install_timers.json", "w") as timelog:
extra_attributes = {"name": ".ci-rebuild"}
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
# Tie job success/failure to the success/failure of building the spec
return install_exit_code

View File

@@ -812,9 +812,6 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)

View File

@@ -12,7 +12,7 @@
import spack.cmd
import spack.config
import spack.deptypes as dt
import spack.dependency as dep
import spack.environment as ev
import spack.mirror
import spack.modules
@@ -114,13 +114,16 @@ def __call__(self, parser, namespace, jobs, option_string):
class DeptypeAction(argparse.Action):
"""Creates a flag of valid dependency types from a deptype argument."""
"""Creates a tuple of valid dependency types from a deptype argument."""
def __call__(self, parser, namespace, values, option_string=None):
if not values or values == "all":
deptype = dt.ALL
else:
deptype = dt.canonicalize(values.split(","))
deptype = dep.all_deptypes
if values:
deptype = tuple(x.strip() for x in values.split(","))
if deptype == ("all",):
deptype = "all"
deptype = dep.canonical_deptype(deptype)
setattr(namespace, self.dest, deptype)
@@ -282,8 +285,9 @@ def deptype():
return Args(
"--deptype",
action=DeptypeAction,
default=dt.ALL,
help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
default=dep.all_deptypes,
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
% ",".join(dep.all_deptypes),
)

View File

@@ -10,7 +10,6 @@
import spack.build_environment as build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.error
import spack.paths
import spack.spec
@@ -47,9 +46,9 @@ def __init__(self, context="build"):
raise ValueError("context can only be build or test")
if context == "build":
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
self.direct_deps = ("build", "link", "run")
else:
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
self.direct_deps = ("build", "test", "link", "run")
self.has_uninstalled_deps = False
@@ -72,8 +71,8 @@ def accept(self, item):
def neighbors(self, item):
# Direct deps: follow build & test edges.
# Transitive deps: follow link / run.
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
return item.edge.spec.edges_to_dependencies(depflag=depflag)
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
def emulate_env_utility(cmd_name, context, args):

View File

@@ -185,7 +185,7 @@ def compiler_list(args):
os_str = os
if target:
os_str += "-%s" % target
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.spec.display_str for c in compilers)))

View File

@@ -822,7 +822,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = spack.url.find_versions_of_archive(args.url)
url_dict = spack.util.web.find_versions_of_archive(args.url)
except UndetectableVersionError:
# Use fake versions
tty.warn("Couldn't detect version in: {0}".format(args.url))

View File

@@ -74,7 +74,7 @@ def dependencies(parser, args):
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
depflag=args.deptype,
deptype=args.deptype,
)
if spec.name in dependencies:

View File

@@ -9,7 +9,6 @@
import sys
import tempfile
import llnl.string as string
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -29,6 +28,7 @@
import spack.schema.env
import spack.spec
import spack.tengine
import spack.util.string as string
from spack.util.environment import EnvironmentModifications
description = "manage virtual environments"
@@ -239,13 +239,6 @@ def env_deactivate_setup_parser(subparser):
const="bat",
help="print bat commands to activate the environment",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to activate the environment",
)
def env_deactivate(args):

View File

@@ -5,9 +5,7 @@
import argparse
import errno
import os
import re
import sys
from typing import List, Optional
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -56,7 +54,7 @@ def setup_parser(subparser):
find_parser.add_argument(
"--all", action="store_true", help="search for all packages that Spack knows about"
)
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
find_parser.epilog = (
'The search is by default on packages tagged with the "build-tools" or '
@@ -122,23 +120,46 @@ def external_find(args):
else:
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
# Outside the Cray manifest, the search is done by tag for performance reasons,
# since tags are cached.
# If the user specified both --all and --tag, then --all has precedence
if args.all or args.packages:
# Each detectable package has at least the detectable tag
args.tags = ["detectable"]
elif not args.tags:
# If the user didn't specify anything, search for build tools by default
# If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages:
args.tags = ["core-packages", "build-tools"]
candidate_packages = packages_to_search_for(
names=args.packages, tags=args.tags, exclude=args.exclude
)
detected_packages = spack.detection.by_path(
candidate_packages, path_hints=args.path, max_workers=args.jobs
)
# If the user specified both --all and --tag, then --all has precedence
if args.all and args.tags:
args.tags = []
# Construct the list of possible packages to be detected
pkg_cls_to_check = []
# Add the packages that have been required explicitly
if args.packages:
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
if args.tags:
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
if args.tags and not pkg_cls_to_check:
# If we arrived here we didn't have any explicit package passed
# as argument, which means to search all packages.
# Since tags are cached it's much faster to construct what we need
# to search directly, rather than filtering after the fact
pkg_cls_to_check = [
spack.repo.PATH.get_pkg_class(pkg_name)
for tag in args.tags
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
]
pkg_cls_to_check = list(set(pkg_cls_to_check))
# If the list of packages is empty, search for every possible package
if not args.tags and not pkg_cls_to_check:
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
# If the user specified any packages to exclude from external find, add them here
if args.exclude:
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
@@ -152,28 +173,6 @@ def external_find(args):
tty.msg("No new external packages detected")
def packages_to_search_for(
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
):
result = []
for current_tag in tags:
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
if names:
# Match both fully qualified and unqualified
parts = [rf"(^{x}$|[.]{x}$)" for x in names]
select_re = re.compile("|".join(parts))
result = [x for x in result if select_re.search(x)]
if exclude:
# Match both fully qualified and unqualified
parts = [rf"(^{x}$|[.]{x}$)" for x in exclude]
select_re = re.compile("|".join(parts))
result = [x for x in result if not select_re.search(x)]
return result
def external_read_cray_manifest(args):
_collect_and_consume_cray_manifest_files(
manifest_file=args.file,

View File

@@ -74,19 +74,19 @@ def graph(parser, args):
if args.static:
args.dot = True
static_graph_dot(specs, depflag=args.deptype)
static_graph_dot(specs, deptype=args.deptype)
return
if args.dot:
builder = SimpleDAG()
if args.color:
builder = DAGWithDependencyTypes()
graph_dot(specs, builder=builder, depflag=args.deptype)
graph_dot(specs, builder=builder, deptype=args.deptype)
return
# ascii is default: user doesn't need to provide it explicitly
debug = spack.config.get("config:debug")
graph_ascii(specs[0], debug=debug, depflag=args.deptype)
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
for spec in specs[1:]:
print() # extra line bt/w independent graphs
graph_ascii(spec, debug=debug)

View File

@@ -11,7 +11,6 @@
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.fetch_strategy as fs
import spack.install_test
import spack.repo
@@ -65,11 +64,11 @@ def section_title(s):
def version(s):
return spack.spec.VERSION_COLOR + s + plain_format
return spack.spec.version_color + s + plain_format
def variant(s):
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
return spack.spec.enabled_variant_color + s + plain_format
class VariantFormatter:
@@ -161,7 +160,7 @@ def print_dependencies(pkg):
for deptype in ("build", "link", "run"):
color.cprint("")
color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:

View File

@@ -16,7 +16,7 @@
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.dependency
import spack.repo
from spack.version import VersionList
@@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
def get_dependencies(pkg):
all_deps = {}
for deptype in dt.ALL_TYPES:
deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
for deptype in spack.dependency.all_deptypes:
deps = pkg.dependencies_of_type(deptype)
all_deps[deptype] = [d for d in deps]
return all_deps
@@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
out.write("\n")
out.write("</dd>\n")
for deptype in dt.ALL_TYPES:
deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
for deptype in spack.dependency.all_deptypes:
deps = pkg_cls.dependencies_of_type(deptype)
if deps:
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
out.write("<dd>\n")

View File

@@ -52,13 +52,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"--first",

View File

@@ -6,11 +6,10 @@
import posixpath
import sys
from llnl.path import convert_to_posix_path
import spack.paths
import spack.util.executable
from spack.spec import Spec
from spack.util.path import convert_to_posix_path
description = "generate Windows installer"
section = "admin"

View File

@@ -443,7 +443,7 @@ def mirror_create(args):
)
# When no directory is provided, the source dir is used
path = args.directory or spack.caches.fetch_cache_location()
path = args.directory or spack.caches.FETCH_CACHE_location()
if args.all and not ev.active_environment():
create_mirror_for_all_specs(

View File

@@ -137,7 +137,7 @@ def solve(parser, args):
# these are the same options as `spack spec`
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt
@@ -176,29 +176,17 @@ def solve(parser, args):
output = sys.stdout if "asp" in show else None
setup_only = set(show) == {"asp"}
unify = spack.config.get("concretizer:unify")
allow_deprecated = spack.config.get("config:deprecated", False)
if unify != "when_possible":
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
allow_deprecated=allow_deprecated,
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
for idx, result in enumerate(
solver.solve_in_rounds(
specs,
out=output,
timers=args.timers,
stats=args.stats,
allow_deprecated=allow_deprecated,
)
solver.solve_in_rounds(specs, out=output, timers=args.timers, stats=args.stats)
):
if "solutions" in show:
tty.msg("ROUND {0}".format(idx))

View File

@@ -77,7 +77,7 @@ def setup_parser(subparser):
def spec(parser, args):
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt

View File

@@ -5,7 +5,6 @@
import io
import sys
import llnl.string
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -25,7 +24,7 @@ def report_tags(category, tags):
if isatty:
num = len(tags)
fmt = "{0} package tag".format(category)
buffer.write("{0}:\n".format(llnl.string.plural(num, fmt)))
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
if tags:
colify.colify(tags, output=buffer, tty=isatty, indent=4)

View File

@@ -51,13 +51,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"-a", "--all", action="store_true", help="unload all loaded Spack packages"

View File

@@ -12,7 +12,6 @@
import spack.fetch_strategy as fs
import spack.repo
import spack.spec
import spack.url
import spack.util.crypto as crypto
from spack.url import (
UndetectableNameError,
@@ -27,6 +26,7 @@
substitution_offsets,
)
from spack.util.naming import simplify_name
from spack.util.web import find_versions_of_archive
description = "debugging tool for url parsing"
section = "developer"
@@ -139,7 +139,7 @@ def url_parse(args):
if args.spider:
print()
tty.msg("Spidering for versions:")
versions = spack.url.find_versions_of_archive(url)
versions = find_versions_of_archive(url)
if not versions:
print(" Found no versions for {0}".format(name))

View File

@@ -37,7 +37,10 @@ def setup_parser(subparser):
action="store_true",
help="only list remote versions newer than the latest checksummed version",
)
arguments.add_common_arguments(subparser, ["package", "jobs"])
subparser.add_argument(
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
)
arguments.add_common_arguments(subparser, ["package"])
def versions(parser, args):
@@ -65,7 +68,7 @@ def versions(parser, args):
if args.safe:
return
fetched_versions = pkg.fetch_remote_versions(args.jobs)
fetched_versions = pkg.fetch_remote_versions(args.concurrency)
if args.new:
if sys.stdout.isatty():

View File

@@ -13,7 +13,6 @@
import tempfile
from typing import List, Optional, Sequence
import llnl.path
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
@@ -25,6 +24,7 @@
import spack.util.module_cmd
import spack.version
from spack.util.environment import filter_system_paths
from spack.util.path import system_path_filter
__all__ = ["Compiler"]
@@ -39,17 +39,10 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
version_arg (str): the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
compiler_invocation_args = {
"output": str,
"error": str,
"ignore_errors": ignore_errors,
"timeout": 120,
"fail_on_error": True,
}
if version_arg:
output = compiler(version_arg, **compiler_invocation_args)
output = compiler(version_arg, output=str, error=str, ignore_errors=ignore_errors)
else:
output = compiler(**compiler_invocation_args)
output = compiler(output=str, error=str, ignore_errors=ignore_errors)
return output
@@ -160,7 +153,7 @@ def _parse_link_paths(string):
return implicit_link_dirs
@llnl.path.system_path_filter
@system_path_filter
def _parse_non_system_link_dirs(string: str) -> List[str]:
"""Parses link paths out of compiler debug output.
@@ -236,9 +229,6 @@ class Compiler:
# by any compiler
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
#: Platform matcher for Platform objects supported by compiler
is_supported_on_platform = lambda x: True
# Default flags used by a compiler to set an rpath
@property
def cc_rpath_arg(self):
@@ -604,6 +594,8 @@ def search_regexps(cls, language):
compiler_names = getattr(cls, "{0}_names".format(language))
prefixes = [""] + cls.prefixes
suffixes = [""]
# Windows compilers generally have an extension of some sort
# as do most files on Windows, handle that case here
if sys.platform == "win32":
ext = r"\.(?:exe|bat)"
cls_suf = [suf + ext for suf in cls.suffixes]

View File

@@ -10,7 +10,7 @@
import itertools
import multiprocessing.pool
import os
from typing import Dict, List
from typing import Dict
import archspec.cpu
@@ -298,7 +298,7 @@ def select_new_compilers(compilers, scope=None):
return compilers_not_in_config
def supported_compilers() -> List[str]:
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
See available_compilers() to get a list of all the available
@@ -306,41 +306,10 @@ def supported_compilers() -> List[str]:
"""
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
return sorted(all_compiler_names())
def supported_compilers_for_host_platform() -> List[str]:
"""Return a set of compiler class objects supported by Spack
that are also supported by the current host platform
"""
host_plat = spack.platforms.real_host()
return supported_compilers_for_platform(host_plat)
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
"""Return a set of compiler class objects supported by Spack
that are also supported by the provided platform
Args:
platform (str): string representation of platform
for which compiler compatability should be determined
"""
return [
name
for name in supported_compilers()
if class_for_compiler_name(name).is_supported_on_platform(platform)
]
def all_compiler_names() -> List[str]:
def replace_apple_clang(name):
return name if name != "apple_clang" else "apple-clang"
return [replace_apple_clang(name) for name in all_compiler_module_names()]
def all_compiler_module_names() -> List[str]:
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
return sorted(
name if name != "apple_clang" else "apple-clang"
for name in llnl.util.lang.list_modules(spack.paths.compilers_path)
)
@_auto_compiler_spec
@@ -659,7 +628,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
def _default(search_paths):
command_arguments = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
for compiler_name in spack.compilers.supported_compilers():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
@@ -718,11 +687,9 @@ def _default(fn_args):
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = f"Couldn't get version for compiler {path}".format(path)
error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except spack.util.executable.ProcessTimeoutError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.

View File

@@ -112,7 +112,6 @@ def extract_version_from_output(cls, output):
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
if match:
return ".".join(match.groups())
return "unknown"
@classmethod
def fc_version(cls, fortran_compiler):

View File

@@ -99,28 +99,6 @@ def cxx17_flag(self):
else:
return "-std=c++17"
@property
def cxx20_flag(self):
if self.real_version < Version("8.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++20 standard", "cxx20_flag", "< 8.0"
)
elif self.real_version < Version("11.0"):
return "-std=c++2a"
else:
return "-std=c++20"
@property
def cxx23_flag(self):
if self.real_version < Version("11.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++23 standard", "cxx23_flag", "< 11.0"
)
elif self.real_version < Version("14.0"):
return "-std=c++2b"
else:
return "-std=c++23"
@property
def c99_flag(self):
if self.real_version < Version("4.5"):

View File

@@ -7,6 +7,7 @@
import re
import subprocess
import sys
from distutils.version import StrictVersion
from typing import Dict, List, Set
import spack.compiler
@@ -28,97 +29,13 @@
}
class CmdCall:
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
def __init__(self, *cmds):
if not cmds:
raise RuntimeError(
"""Attempting to run commands from CMD without specifying commands.
Please add commands to be run."""
)
self._cmds = cmds
def __call__(self):
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
return out.decode("utf-16le", errors="replace") # novermin
@property
def cmd_line(self):
base_call = "cmd /u /c "
commands = " && ".join([x.command_str() for x in self._cmds])
# If multiple commands are being invoked by a single subshell
# they must be encapsulated by a double quote. Always double
# quote to be sure of proper handling
# cmd will properly resolve nested double quotes as needed
#
# `set`` writes out the active env to the subshell stdout,
# and in this context we are always trying to obtain env
# state so it should always be appended
return base_call + f'"{commands} && set"'
class VarsInvocation:
def __init__(self, script):
self._script = script
def command_str(self):
return f'"{self._script}"'
@property
def script(self):
return self._script
class VCVarsInvocation(VarsInvocation):
def __init__(self, script, arch, msvc_version):
super(VCVarsInvocation, self).__init__(script)
self._arch = arch
self._msvc_version = msvc_version
@property
def sdk_ver(self):
"""Accessor for Windows SDK version property
Note: This property may not be set by
the calling context and as such this property will
return an empty string
This property will ONLY be set if the SDK package
is a dependency somewhere in the Spack DAG of the package
for which we are constructing an MSVC compiler env.
Otherwise this property should be unset to allow the VCVARS
script to use its internal heuristics to determine appropriate
SDK version
"""
if getattr(self, "_sdk_ver", None):
return self._sdk_ver + ".0"
return ""
@sdk_ver.setter
def sdk_ver(self, val):
self._sdk_ver = val
@property
def arch(self):
return self._arch
@property
def vcvars_ver(self):
return f"-vcvars_ver={self._msvc_version}"
def command_str(self):
script = super(VCVarsInvocation, self).command_str()
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: Version(fc_ver)
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
for ver in sort_fc_ver:
if ver in fortran_mapping:
if Version(cl_ver) <= Version(fortran_mapping[ver]):
if StrictVersion(cl_ver) <= StrictVersion(fortran_mapping[ver]):
return fc_path[ver]
return None
@@ -153,58 +70,27 @@ class Msvc(Compiler):
#: Regex used to extract version from compiler's output
version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
# The MSVC compiler class overrides this to prevent instances
# of erroneous matching on executable names that cannot be msvc
# compilers
suffixes = []
is_supported_on_platform = lambda x: isinstance(x, spack.platforms.Windows)
# Initialize, deferring to base class but then adding the vcvarsallfile
# file based on compiler executable path.
def __init__(self, *args, **kwargs):
# This positional argument "paths" is later parsed and process by the base class
# via the call to `super` later in this method
paths = args[3]
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
# Initialize, deferring to base class but then adding the vcvarsallfile
# file based on compiler executable path.
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
args[3][:] = new_pth
super().__init__(*args, **kwargs)
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
env_cmds = []
compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
env_cmds.append(self.vcvars_call)
# Below is a check for a valid fortran path
# paths has c, cxx, fc, and f77 paths in that order
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
if os.getenv("ONEAPI_ROOT"):
# If this found, it sets all the vars
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
# env first
env_cmds.extend(
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
else:
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
@property
def msvc_version(self):
@@ -233,29 +119,15 @@ def platform_toolset_ver(self):
"""
return self.msvc_version[:2].joined.string[:3]
def _compiler_version(self, compiler):
"""Returns version object for given compiler"""
# ignore_errors below is true here due to ifx's
# non zero return code if it is not provided
# and input file
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(
compiler, version_arg=None, ignore_errors=True
),
).group(1)
)
@property
def cl_version(self):
"""Cl toolset version"""
return self._compiler_version(self.cc)
@property
def ifx_version(self):
"""Ifx compiler version associated with this version of MSVC"""
return self._compiler_version(self.fc)
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
).group(1)
)
@property
def vs_root(self):
@@ -274,12 +146,27 @@ def setup_custom_environment(self, pkg, env):
# output, sort into dictionary, use that to make the build
# environment.
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
sdk_ver = (
""
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
else pkg.spec["win-sdk"].version.string + ".0"
)
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} {} {} && set'.format(
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
out = self.msvc_compiler_environment()
int_env = dict(
(key, value)
for key, _, value in (line.partition("=") for line in out.splitlines())

View File

@@ -744,11 +744,8 @@ def concretize_specs_together(*abstract_specs, **kwargs):
def _concretize_specs_together_new(*abstract_specs, **kwargs):
import spack.solver.asp
allow_deprecated = spack.config.get("config:deprecated", False)
solver = spack.solver.asp.Solver()
result = solver.solve(
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
)
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
result.raise_if_unsat()
return [s.copy() for s in result.specs]

View File

@@ -857,12 +857,12 @@ def add_from_file(filename, scope=None):
def add(fullpath, scope=None):
"""Add the given configuration to the specified config scope.
Add accepts a path. If you want to add from a filename, use add_from_file"""
components = process_config_path(fullpath)
has_existing_value = True
path = ""
override = False
value = syaml.load_config(components[-1])
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = "::" if override else ":" if path else ""
@@ -883,14 +883,14 @@ def add(fullpath, scope=None):
existing = get_valid_type(path)
# construct value from this point down
value = syaml.load_config(components[-1])
for component in reversed(components[idx + 1 : -1]):
value = {component: value}
break
if override:
path += "::"
if has_existing_value:
path, _, value = fullpath.rpartition(":")
value = syaml.load_config(value)
existing = get(path, scope=scope)
# append values to lists
@@ -1231,17 +1231,11 @@ def they_are(t):
return copy.copy(source)
#
# Process a path argument to config.set() that may contain overrides ('::' or
# trailing ':')
#
def process_config_path(path):
"""Process a path argument to config.set() that may contain overrides ('::' or
trailing ':')
Note: quoted value path components will be processed as a single value (escaping colons)
quoted path components outside of the value will be considered ill formed and will
raise.
e.g. `this:is:a:path:'value:with:colon'` will yield:
[this, is, a, path, value:with:colon]
"""
result = []
if path.startswith(":"):
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
@@ -1269,17 +1263,6 @@ def process_config_path(path):
front.append = True
result.append(front)
quote = "['\"]"
not_quote = "[^'\"]"
if re.match(f"^{quote}", path):
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
if not m:
raise ValueError("Quotes indicate value, but there are additional path entries")
result.append(m.group(1))
break
return result

View File

@@ -4,9 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import json
import os
import traceback
import warnings
import jsonschema
import jsonschema.exceptions
@@ -14,7 +11,6 @@
import llnl.util.tty as tty
import spack.cmd
import spack.deptypes as dt
import spack.error
import spack.hash_types as hash_types
import spack.platforms
@@ -49,29 +45,9 @@ def translated_compiler_name(manifest_compiler_name):
)
def compiler_from_entry(entry: dict, manifest_path: str):
# Note that manifest_path is only passed here to compose a
# useful warning message when paths appear to be missing.
def compiler_from_entry(entry):
compiler_name = translated_compiler_name(entry["name"])
if "prefix" in entry:
prefix = entry["prefix"]
paths = dict(
(lang, os.path.join(prefix, relpath))
for (lang, relpath) in entry["executables"].items()
)
else:
paths = entry["executables"]
# Do a check for missing paths. Note that this isn't possible for
# all compiler entries, since their "paths" might actually be
# exe names like "cc" that depend on modules being loaded. Cray
# manifest entries are always paths though.
missing_paths = []
for path in paths.values():
if not os.path.exists(path):
missing_paths.append(path)
paths = entry["executables"]
# to instantiate a compiler class we may need a concrete version:
version = "={}".format(entry["version"])
arch = entry["arch"]
@@ -80,18 +56,8 @@ def compiler_from_entry(entry: dict, manifest_path: str):
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
if missing_paths:
warnings.warn(
"Manifest entry refers to nonexistent paths:\n\t"
+ "\n\t".join(missing_paths)
+ f"\nfor {str(spec)}"
+ f"\nin {manifest_path}"
+ "\nPlease report this issue"
)
return compiler_cls(spec, operating_system, target, path_list)
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
return compiler_cls(spec, operating_system, target, paths)
def spec_from_entry(entry):
@@ -192,13 +158,13 @@ def entries_to_specs(entries):
dependencies = entry["dependencies"]
for name, properties in dependencies.items():
dep_hash = properties["hash"]
depflag = dt.canonicalize(properties["type"])
deptypes = properties["type"]
if dep_hash in spec_dict:
if entry["hash"] not in spec_dict:
continue
parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
for spec in spec_dict.values():
spack.spec.reconstruct_virtuals_on_edges(spec)
@@ -220,21 +186,12 @@ def read(path, apply_updates):
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
compilers = list()
if "compilers" in json_data:
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
# Filter out the compilers that already appear in the configuration
compilers = spack.compilers.select_new_compilers(compilers)
if apply_updates and compilers:
for compiler in compilers:
try:
spack.compilers.add_compilers_to_config([compiler], init_config=False)
except Exception:
warnings.warn(
f"Could not add compiler {str(compiler.spec)}: "
f"\n\tfrom manifest: {path}"
"\nPlease reexecute with 'spack -d' and include the stack trace"
)
tty.debug(f"Include this\n{traceback.format_exc()}")
spack.compilers.add_compilers_to_config(compilers, init_config=False)
if apply_updates:
for spec in specs.values():
spack.store.STORE.db.add(spec, directory_layout=None)

View File

@@ -27,8 +27,6 @@
import time
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
import spack.deptypes as dt
try:
import uuid
@@ -91,7 +89,7 @@
#: Types of dependencies tracked by the database
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
_TRACKED_DEPENDENCIES = ht.dag_hash.depflag
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
#: Default list of fields written for each install record
DEFAULT_INSTALL_RECORD_FIELDS = (
@@ -797,7 +795,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
tty.warn(msg)
continue
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data.
@@ -1148,7 +1146,7 @@ def _add(
# Retrieve optional arguments
installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
@@ -1156,7 +1154,7 @@ def _add(
# is missing a build dep; there's no need to install the
# build dep's build dep first, and there's no need to warn
# about it missing.
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
self._add(
edge.spec,
directory_layout,
@@ -1200,10 +1198,10 @@ def _add(
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
if not upstream:
record.ref_count += 1
@@ -1373,13 +1371,7 @@ def deprecate(self, spec, deprecator):
return self._deprecate(spec, deprecator)
@_autospec
def installed_relatives(
self,
spec,
direction="children",
transitive=True,
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
):
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
"""Return installed specs related to this one."""
if direction not in ("parents", "children"):
raise ValueError("Invalid direction: %s" % direction)

View File

@@ -3,11 +3,64 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships."""
from typing import Dict, List
from typing import Dict, List, Optional, Set, Tuple, Union
import spack.deptypes as dt
import spack.spec
#: The types of dependency relationships that Spack understands.
all_deptypes = ("build", "link", "run", "test")
#: Default dependency type if none is specified
default_deptype = ("build", "link")
#: Type hint for the arguments accepting a dependency type
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
def deptype_chars(*type_tuples: str) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged).
"""
types: Set[str] = set()
for t in type_tuples:
if t:
types.update(t)
return "".join(t[0] if t in types else " " for t in all_deptypes)
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, str):
if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)
elif isinstance(deptype, (tuple, list, set)):
bad = [d for d in deptype if d not in all_deptypes]
if bad:
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
return tuple(sorted(set(deptype)))
raise ValueError("Invalid dependency type: %s" % repr(deptype))
class Dependency:
"""Class representing metadata for a dependency on a package.
@@ -40,7 +93,7 @@ def __init__(
self,
pkg: "spack.package_base.PackageBase",
spec: "spack.spec.Spec",
depflag: dt.DepFlag = dt.DEFAULT,
type: Optional[Tuple[str, ...]] = default_deptype,
):
"""Create a new Dependency.
@@ -57,7 +110,11 @@ def __init__(
# This dict maps condition specs to lists of Patch objects, just
# as the patches dict on packages does.
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
self.depflag = depflag
if type is None:
self.type = set(default_deptype)
else:
self.type = set(type)
@property
def name(self) -> str:
@@ -67,7 +124,7 @@ def name(self) -> str:
def merge(self, other: "Dependency"):
"""Merge constraints, deptypes, and patches of other into self."""
self.spec.constrain(other.spec)
self.depflag |= other.depflag
self.type |= other.type
# concatenate patch lists, or just copy them in
for cond, p in other.patches.items():
@@ -78,5 +135,5 @@ def merge(self, other: "Dependency"):
self.patches[cond] = other.patches[cond]
def __repr__(self) -> str:
types = dt.flag_to_chars(self.depflag)
types = deptype_chars(*self.type)
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"

View File

@@ -1,123 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's edge types."""
from typing import Iterable, List, Tuple, Union
#: Type hint for the low-level dependency input (enum.Flag is too slow)
DepFlag = int
#: Type hint for the high-level dependency input
DepTypes = Union[str, List[str], Tuple[str, ...]]
#: Individual dependency types
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
# the order (link, run, build, test) when depending on the same package multiple times,
# and we rely on default integer comparison to sort dependency types.
# New dependency types should be appended.
LINK = 0b0001
RUN = 0b0010
BUILD = 0b0100
TEST = 0b1000
#: The types of dependency relationships that Spack understands.
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
#: Default dependency type if none is specified
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
#: A flag with all dependency types set
ALL: DepFlag = BUILD | LINK | RUN | TEST
#: Default dependency type if none is specified
DEFAULT: DepFlag = BUILD | LINK
#: An iterator of all flag components
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
def flag_from_string(s: str) -> DepFlag:
if s == "build":
return BUILD
elif s == "link":
return LINK
elif s == "run":
return RUN
elif s == "test":
return TEST
else:
raise ValueError(f"Invalid dependency type: {s}")
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
"""Transform an iterable of deptype strings into a flag."""
flag = 0
for deptype_str in deptype:
flag |= flag_from_string(deptype_str)
return flag
def canonicalize(deptype: DepTypes) -> DepFlag:
"""Convert deptype user input to a DepFlag, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return ALL
if isinstance(deptype, str):
return flag_from_string(deptype)
if isinstance(deptype, (tuple, list, set)):
return flag_from_strings(deptype)
raise ValueError(f"Invalid dependency type: {deptype!r}")
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
deptype: List[DepType] = []
if x & BUILD:
deptype.append("build")
if x & LINK:
deptype.append("link")
if x & RUN:
deptype.append("run")
if x & TEST:
deptype.append("test")
return tuple(deptype)
def flag_to_string(x: DepFlag) -> DepType:
if x == BUILD:
return "build"
elif x == LINK:
return "link"
elif x == RUN:
return "run"
elif x == TEST:
return "test"
else:
raise ValueError(f"Invalid dependency type flag: {x}")
def flag_to_chars(depflag: DepFlag) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged)."""
return "".join(
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
)

View File

@@ -3,14 +3,13 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from .common import DetectedPackage, executable_prefix, update_configuration
from .path import by_path, executables_in_path
from .test import detection_tests
from .path import by_executable, by_library, executables_in_path
__all__ = [
"DetectedPackage",
"by_path",
"by_library",
"by_executable",
"executables_in_path",
"executable_prefix",
"update_configuration",
"detection_tests",
]

View File

@@ -13,13 +13,13 @@
The module also contains other functions that might be useful across different
detection mechanisms.
"""
import collections
import glob
import itertools
import os
import os.path
import re
import sys
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.tty
@@ -29,28 +29,12 @@
import spack.util.spack_yaml
import spack.util.windows_registry
class DetectedPackage(NamedTuple):
"""Information on a package that has been detected."""
#: Spec that was detected
spec: spack.spec.Spec
#: Prefix of the spec
prefix: str
def __reduce__(self):
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
@staticmethod
def restore(
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
) -> "DetectedPackage":
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
return DetectedPackage(spec=spec, prefix=prefix)
#: Information on a package that has been detected
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
"""Returns all the specs mentioned as externals in packages.yaml"""
def _externals_in_packages_yaml():
"""Return all the specs mentioned as externals in packages.yaml"""
packages_yaml = spack.config.get("packages")
already_defined_specs = set()
for pkg_name, package_configuration in packages_yaml.items():
@@ -59,12 +43,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
return already_defined_specs
ExternalEntryType = Union[str, Dict[str, str]]
def _pkg_config_dict(
external_pkg_entries: List[DetectedPackage],
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
def _pkg_config_dict(external_pkg_entries):
"""Generate a package specific config dict according to the packages.yaml schema.
This does not generate the entire packages.yaml. For example, given some
@@ -86,10 +65,7 @@ def _pkg_config_dict(
if not _spec_is_valid(e.spec):
continue
external_items: List[Tuple[str, ExternalEntryType]] = [
("spec", str(e.spec)),
("prefix", e.prefix),
]
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
if e.spec.external_modules:
external_items.append(("modules", e.spec.external_modules))
@@ -107,14 +83,15 @@ def _pkg_config_dict(
return pkg_dict
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
def _spec_is_valid(spec):
try:
str(spec)
except spack.error.SpackError:
# It is assumed here that we can at least extract the package name from the spec so we
# can look up the implementation of determine_spec_details
msg = f"Constructed spec for {spec.name} does not have a string representation"
llnl.util.tty.warn(msg)
# It is assumed here that we can at least extract the package name from
# the spec so we can look up the implementation of
# determine_spec_details
msg = "Constructed spec for {0} does not have a string representation"
llnl.util.tty.warn(msg.format(spec.name))
return False
try:
@@ -129,7 +106,7 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
return True
def path_to_dict(search_paths: List[str]):
def path_to_dict(search_paths):
"""Return dictionary[fullpath]: basename from list of paths"""
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
@@ -147,7 +124,7 @@ def path_to_dict(search_paths: List[str]):
return path_to_lib
def is_executable(file_path: str) -> bool:
def is_executable(file_path):
"""Return True if the path passed as argument is that of an executable"""
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
@@ -169,7 +146,7 @@ def _convert_to_iterable(single_val_or_multiple):
return [x]
def executable_prefix(executable_dir: str) -> str:
def executable_prefix(executable_dir):
"""Given a directory where an executable is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
@@ -190,12 +167,12 @@ def executable_prefix(executable_dir: str) -> str:
return os.sep.join(components[:idx])
def library_prefix(library_dir: str) -> str:
"""Given a directory where a library is found, guess the prefix
def library_prefix(library_dir):
"""Given a directory where an library is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
Args:
library_dir: directory where a library is found
library_dir: directory where an library is found
"""
# Given a prefix where an library is found, assuming that prefix
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
@@ -218,17 +195,13 @@ def library_prefix(library_dir: str) -> str:
return library_dir
def update_configuration(
detected_packages: Dict[str, List[DetectedPackage]],
scope: Optional[str] = None,
buildable: bool = True,
) -> List[spack.spec.Spec]:
def update_configuration(detected_packages, scope=None, buildable=True):
"""Add the packages passed as arguments to packages.yaml
Args:
detected_packages: list of DetectedPackage objects to be added
scope: configuration scope where to add the detected packages
buildable: whether the detected packages are buildable or not
detected_packages (list): list of DetectedPackage objects to be added
scope (str): configuration scope where to add the detected packages
buildable (bool): whether the detected packages are buildable or not
"""
predefined_external_specs = _externals_in_packages_yaml()
pkg_to_cfg, all_new_specs = {}, []
@@ -236,10 +209,7 @@ def update_configuration(
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
pkg_config = _pkg_config_dict(new_entries)
external_entries = pkg_config.get("externals", [])
assert not isinstance(external_entries, bool), "unexpected value for external entry"
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
if buildable is False:
pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config
@@ -252,19 +222,16 @@ def update_configuration(
return all_new_specs
def _windows_drive() -> str:
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
which is guaranteed to be defined for all logins.
"""
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
if match is None:
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
return match.group(1)
def _windows_drive():
"""Return Windows drive string extracted from PROGRAMFILES
env var, which is garunteed to be defined for all logins"""
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
return drive
class WindowsCompilerExternalPaths:
@staticmethod
def find_windows_compiler_root_paths() -> List[str]:
def find_windows_compiler_root_paths():
"""Helper for Windows compiler installation root discovery
At the moment simply returns location of VS install paths from VSWhere
@@ -272,7 +239,7 @@ def find_windows_compiler_root_paths() -> List[str]:
return list(winOs.WindowsOs.vs_install_paths)
@staticmethod
def find_windows_compiler_cmake_paths() -> List[str]:
def find_windows_compiler_cmake_paths():
"""Semi hard-coded search path for cmake bundled with MSVC"""
return [
os.path.join(
@@ -282,7 +249,7 @@ def find_windows_compiler_cmake_paths() -> List[str]:
]
@staticmethod
def find_windows_compiler_ninja_paths() -> List[str]:
def find_windows_compiler_ninja_paths():
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
return [
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
@@ -290,7 +257,7 @@ def find_windows_compiler_ninja_paths() -> List[str]:
]
@staticmethod
def find_windows_compiler_bundled_packages() -> List[str]:
def find_windows_compiler_bundled_packages():
"""Return all MSVC compiler bundled packages"""
return (
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
@@ -299,39 +266,36 @@ def find_windows_compiler_bundled_packages() -> List[str]:
class WindowsKitExternalPaths:
if sys.platform == "win32":
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots() -> List[str]:
def find_windows_kit_roots():
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if sys.platform != "win32":
return []
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(program_files, "Windows Kits", "**")
return glob.glob(kit_base)
kit_base = os.path.join(
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
)
return kit_base
@staticmethod
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
def find_windows_kit_bin_paths(kit_base=None):
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base, "Unexpectedly empty value for Windows kit base path"
kit_paths = []
for kit in kit_base:
kit_bin = os.path.join(kit, "bin")
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
return kit_paths
kit_bin = os.path.join(kit_base, "bin")
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
@staticmethod
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
def find_windows_kit_lib_paths(kit_base=None):
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base, "Unexpectedly empty value for Windows kit base path"
kit_paths = []
for kit in kit_base:
kit_lib = os.path.join(kit, "Lib")
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
return kit_paths
kit_lib = os.path.join(kit_base, "Lib")
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
@staticmethod
def find_windows_driver_development_kit_paths() -> List[str]:
def find_windows_driver_development_kit_paths():
"""Provides a list of all installation paths
for the WDK by version and architecture
"""
@@ -339,7 +303,7 @@ def find_windows_driver_development_kit_paths() -> List[str]:
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
@staticmethod
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
def find_windows_kit_reg_installed_roots_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
@@ -347,33 +311,26 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
if not reg:
# couldn't find key, return empty list
return []
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
root_paths = []
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
root_paths.extend(
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
)
return root_paths
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
)
@staticmethod
def find_windows_kit_reg_sdk_paths() -> List[str]:
sdk_paths = []
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
windows_reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
def find_windows_kit_reg_sdk_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
% WindowsKitExternalPaths.plat_major_ver,
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
)
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
reg = windows_reg.get_subkey(key)
sdk_paths.extend(
WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("InstallationFolder").value
)
)
return sdk_paths
if not reg:
# couldn't find key, return empty list
return []
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("InstallationFolder").value
)
def find_win32_additional_install_paths() -> List[str]:
def find_win32_additional_install_paths():
"""Not all programs on Windows live on the PATH
Return a list of other potential install locations.
"""
@@ -400,12 +357,13 @@ def find_win32_additional_install_paths() -> List[str]:
return windows_search_ext
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
"""Given a package, attempts to compute its Windows program files location,
and returns the list of best guesses.
def compute_windows_program_path_for_package(pkg):
"""Given a package, attempt to compute its Windows
program files location, return list of best guesses
Args:
pkg: package for which Program Files location is to be computed
pkg (spack.package_base.PackageBase): package for which
Program Files location is to be computed
"""
if sys.platform != "win32":
return []
@@ -420,7 +378,7 @@ def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBas
]
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
def compute_windows_user_path_for_package(pkg):
"""Given a package attempt to compute its user scoped
install location, return list of potential locations based
on common heuristics. For more info on Windows user specific

View File

@@ -2,17 +2,15 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Detection of software installed in the system, based on paths inspections
"""Detection of software installed in the system based on paths inspections
and running executables.
"""
import collections
import concurrent.futures
import os
import os.path
import re
import sys
import warnings
from typing import Dict, List, Optional, Set, Tuple
import llnl.util.filesystem
import llnl.util.tty
@@ -20,7 +18,7 @@
import spack.util.environment
import spack.util.ld_so_conf
from .common import (
from .common import ( # find_windows_compiler_bundled_packages,
DetectedPackage,
WindowsCompilerExternalPaths,
WindowsKitExternalPaths,
@@ -33,13 +31,8 @@
path_to_dict,
)
#: Timeout used for package detection (seconds)
DETECTION_TIMEOUT = 60
if sys.platform == "win32":
DETECTION_TIMEOUT = 120
def common_windows_package_paths() -> List[str]:
def common_windows_package_paths():
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
paths.extend(find_win32_additional_install_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
@@ -48,7 +41,7 @@ def common_windows_package_paths() -> List[str]:
return paths
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
def executables_in_path(path_hints):
"""Get the paths of all executables available from the current PATH.
For convenience, this is constructed as a dictionary where the keys are
@@ -59,7 +52,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
assumed there are two different instances of the executable.
Args:
path_hints: list of paths to be searched. If None the list will be
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if sys.platform == "win32":
@@ -68,9 +61,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def libraries_in_ld_and_system_library_path(
path_hints: Optional[List[str]] = None,
) -> Dict[str, str]:
def libraries_in_ld_and_system_library_path(path_hints=None):
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
standard system library paths.
@@ -83,7 +74,7 @@ def libraries_in_ld_and_system_library_path(
assumed there are two different instances of the library.
Args:
path_hints: list of paths to be searched. If None the list will be
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables as well as the standard system library paths.
@@ -99,7 +90,7 @@ def libraries_in_ld_and_system_library_path(
return path_to_dict(search_paths)
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
def libraries_in_windows_paths(path_hints):
path_hints.extend(spack.util.environment.get_path("PATH"))
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
@@ -115,253 +106,218 @@ def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
def _group_by_prefix(paths):
groups = collections.defaultdict(set)
for p in paths:
groups[os.path.dirname(p)].add(p)
return groups
return groups.items()
class Finder:
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
# TODO consolidate this with by_executable
# Packages should be able to define both .libraries and .executables in the future
# determine_spec_details should get all relevant libraries and executables in one call
def by_library(packages_to_check, path_hints=None):
# Techniques for finding libraries is determined on a per recipe basis in
# the determine_version class method. Some packages will extract the
# version number from a shared libraries filename.
# Other libraries could use the strings function to extract it as described
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
"""Return the list of packages that have been detected on the system,
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
def path_hints(
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
) -> List[str]:
"""Returns the list of paths to be searched.
Args:
packages_to_check (list): list of packages to be detected
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
and standard system library paths.
"""
# If no path hints from command line, intialize to empty list so
# we can add default hints on a per package basis
path_hints = [] if path_hints is None else path_hints
Args:
pkg: package being detected
initial_guess: initial list of paths from caller
"""
result = initial_guess or []
result.extend(compute_windows_user_path_for_package(pkg))
result.extend(compute_windows_program_path_for_package(pkg))
return result
lib_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "libraries"):
for lib in pkg.libraries:
lib_pattern_to_pkgs[lib].append(pkg)
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
"""Returns the list of patterns used to match candidate files.
path_to_lib_name = (
libraries_in_ld_and_system_library_path(path_hints=path_hints)
if sys.platform != "win32"
else libraries_in_windows_paths(path_hints)
)
Args:
pkg: package being detected
"""
raise NotImplementedError("must be implemented by derived classes")
pkg_to_found_libs = collections.defaultdict(set)
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
compiled_re = re.compile(lib_pattern)
for path, lib in path_to_lib_name.items():
if compiled_re.search(lib):
for pkg in pkgs:
pkg_to_found_libs[pkg].add(path)
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
"""Returns a list of candidate files found on the system.
pkg_to_entries = collections.defaultdict(list)
resolved_specs = {} # spec -> lib found for the spec
Args:
patterns: search patterns to be used for matching files
paths: paths where to search for files
"""
raise NotImplementedError("must be implemented by derived classes")
def prefix_from_path(self, *, path: str) -> str:
"""Given a path where a file was found, returns the corresponding prefix.
Args:
path: path of a detected file
"""
raise NotImplementedError("must be implemented by derived classes")
def detect_specs(
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
) -> List[DetectedPackage]:
"""Given a list of files matching the search patterns, returns a list of detected specs.
Args:
pkg: package being detected
paths: files matching the package search patterns
"""
for pkg, libs in pkg_to_found_libs.items():
if not hasattr(pkg, "determine_spec_details"):
warnings.warn(
f"{pkg.name} must define 'determine_spec_details' in order"
f" for Spack to detect externally-provided instances"
f" of the package."
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
" of the package.".format(pkg.name)
)
return []
continue
result = []
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
try:
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
except Exception as e:
specs = []
msg = 'error detecting "{0}" from prefix {1} [{2}]'
warnings.warn(msg.format(pkg.name, prefix, str(e)))
if not specs:
llnl.util.tty.debug(
"The following libraries in {0} were decidedly not "
"part of the package {1}: {2}".format(
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
)
)
for spec in specs:
pkg_prefix = library_prefix(prefix)
if not pkg_prefix:
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
"add it as a Spack package"
llnl.util.tty.debug(msg.format(prefix))
continue
if spec in resolved_specs:
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
"Libraries in {0} and {1} are both associated"
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
)
continue
else:
resolved_specs[spec] = prefix
try:
spec.validate_detection()
except Exception as e:
msg = (
'"{0}" has been detected on the system but will '
"not be added to packages.yaml [reason={1}]"
)
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
pkg_prefix = spec.external_path
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return pkg_to_entries
def by_executable(packages_to_check, path_hints=None):
"""Return the list of packages that have been detected on the system,
searching by path.
Args:
packages_to_check (list): list of package classes to be detected
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
exe_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "executables"):
for exe in pkg.platform_executables():
exe_pattern_to_pkgs[exe].append(pkg)
# Add Windows specific, package related paths to the search paths
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
path_to_exe_name = executables_in_path(path_hints=path_hints)
pkg_to_found_exes = collections.defaultdict(set)
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
compiled_re = re.compile(exe_pattern)
for path, exe in path_to_exe_name.items():
if compiled_re.search(exe):
for pkg in pkgs:
pkg_to_found_exes[pkg].add(path)
pkg_to_entries = collections.defaultdict(list)
resolved_specs = {} # spec -> exe found for the spec
for pkg, exes in pkg_to_found_exes.items():
if not hasattr(pkg, "determine_spec_details"):
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
" of the package.".format(pkg.name)
)
continue
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
# TODO: multiple instances of a package can live in the same
# prefix, and a package implementation can return multiple specs
# for one prefix, but without additional details (e.g. about the
# naming scheme which differentiates them), the spec won't be
# usable.
try:
specs = _convert_to_iterable(
pkg.determine_spec_details(candidate_path, items_in_prefix)
)
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
except Exception as e:
specs = []
warnings.warn(
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
)
msg = 'error detecting "{0}" from prefix {1} [{2}]'
warnings.warn(msg.format(pkg.name, prefix, str(e)))
if not specs:
files = ", ".join(_convert_to_iterable(items_in_prefix))
llnl.util.tty.debug(
f"The following files in {candidate_path} were decidedly not "
f"part of the package {pkg.name}: {files}"
"The following executables in {0} were decidedly not "
"part of the package {1}: {2}".format(
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
)
)
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
for spec in specs:
prefix = self.prefix_from_path(path=candidate_path)
if not prefix:
pkg_prefix = executable_prefix(prefix)
if not pkg_prefix:
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg.format(prefix))
continue
if spec in resolved_specs:
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
f"Files in {candidate_path} and {prior_prefix} are both associated"
f" with the same spec {str(spec)}"
"Executables in {0} and {1} are both associated"
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
)
continue
else:
resolved_specs[spec] = prefix
resolved_specs[spec] = candidate_path
try:
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '
f"not be added to packages.yaml [reason={str(e)}]"
'"{0}" has been detected on the system but will '
"not be added to packages.yaml [reason={1}]"
)
warnings.warn(msg)
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
prefix = spec.external_path
pkg_prefix = spec.external_path
result.append(DetectedPackage(spec=spec, prefix=prefix))
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return result
def find(
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
) -> List[DetectedPackage]:
"""For a given package, returns a list of detected specs.
Args:
pkg_name: package being detected
initial_guess: initial list of paths to search from the caller
"""
import spack.repo
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
patterns = self.search_patterns(pkg=pkg_cls)
if not patterns:
return []
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
return result
class ExecutablesFinder(Finder):
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
result = pkg.platform_executables()
return result
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
executables_by_path = executables_in_path(path_hints=paths)
patterns = [re.compile(x) for x in patterns]
result = []
for compiled_re in patterns:
for path, exe in executables_by_path.items():
if compiled_re.search(exe):
result.append(path)
return list(sorted(set(result)))
def prefix_from_path(self, *, path: str) -> str:
result = executable_prefix(path)
if not result:
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg)
return result
class LibrariesFinder(Finder):
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
"""
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "libraries"):
result = pkg.libraries
return result
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
libraries_by_path = (
libraries_in_ld_and_system_library_path(path_hints=paths)
if sys.platform != "win32"
else libraries_in_windows_paths(paths)
)
patterns = [re.compile(x) for x in patterns]
result = []
for compiled_re in patterns:
for path, exe in libraries_by_path.items():
if compiled_re.search(exe):
result.append(path)
return result
def prefix_from_path(self, *, path: str) -> str:
result = library_prefix(path)
if not result:
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg)
return result
def by_path(
packages_to_search: List[str],
*,
path_hints: Optional[List[str]] = None,
max_workers: Optional[int] = None,
) -> Dict[str, List[DetectedPackage]]:
"""Return the list of packages that have been detected on the system, keyed by
unqualified package name.
Args:
packages_to_search: list of packages to be detected. Each package can be either unqualified
of fully qualified
path_hints: initial list of paths to be searched
max_workers: maximum number of workers to search for packages in parallel
"""
# TODO: Packages should be able to define both .libraries and .executables in the future
# TODO: determine_spec_details should get all relevant libraries and executables in one call
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
executables_path_guess = (
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
)
libraries_path_guess = [] if path_hints is None else path_hints
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
result = collections.defaultdict(list)
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
for pkg in packages_to_search:
executable_future = executor.submit(
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
)
library_future = executor.submit(
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
)
detected_specs_by_package[pkg] = executable_future, library_future
for pkg_name, futures in detected_specs_by_package.items():
for future in futures:
try:
detected = future.result(timeout=DETECTION_TIMEOUT)
if detected:
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
result[unqualified_name].extend(detected)
except Exception:
llnl.util.tty.debug(
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
)
return result
return pkg_to_entries

View File

@@ -1,187 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Create and run mock e2e tests for package detection."""
import collections
import contextlib
import pathlib
import tempfile
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
import jinja2
from llnl.util import filesystem
import spack.repo
import spack.spec
from spack.util import spack_yaml
from .path import by_path
class MockExecutables(NamedTuple):
"""Mock executables to be used in detection tests"""
#: Relative paths for mock executables to be created
executables: List[str]
#: Shell script for the mock executable
script: str
class ExpectedTestResult(NamedTuple):
"""Data structure to model assertions on detection tests"""
#: Spec to be detected
spec: str
class DetectionTest(NamedTuple):
"""Data structure to construct detection tests by PATH inspection.
Packages may have a YAML file containing the description of one or more detection tests
to be performed. Each test creates a few mock executable scripts in a temporary folder,
and checks that detection by PATH gives the expected results.
"""
pkg_name: str
layout: List[MockExecutables]
results: List[ExpectedTestResult]
class Runner:
"""Runs an external detection test"""
def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None:
self.test = test
self.repository = repository
self.tmpdir = tempfile.TemporaryDirectory()
def execute(self) -> List[spack.spec.Spec]:
"""Executes a test and returns the specs that have been detected.
This function sets-up a test in a temporary directory, according to the prescriptions
in the test layout, then performs a detection by executables and returns the specs that
have been detected.
"""
with self._mock_layout() as path_hints:
entries = by_path([self.test.pkg_name], path_hints=path_hints)
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
specs = set(x.spec for x in entries[unqualified_name])
return list(specs)
@contextlib.contextmanager
def _mock_layout(self) -> Generator[List[str], None, None]:
hints = set()
try:
for entry in self.test.layout:
exes = self._create_executable_scripts(entry)
for mock_executable in exes:
hints.add(str(mock_executable.parent))
yield list(hints)
finally:
self.tmpdir.cleanup()
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
relative_paths = mock_executables.executables
script = mock_executables.script
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
result = []
for mock_exe_path in relative_paths:
rel_path = pathlib.Path(mock_exe_path)
abs_path = pathlib.Path(self.tmpdir.name) / rel_path
abs_path.parent.mkdir(parents=True, exist_ok=True)
abs_path.write_text(script_template.render(script=script))
filesystem.set_executable(abs_path)
result.append(abs_path)
return result
@property
def expected_specs(self) -> List[spack.spec.Spec]:
return [spack.spec.Spec(r.spec) for r in self.test.results]
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
"""Returns a list of test runners for a given package.
Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``,
alongside the ``package.py`` file.
This function reads that file to create a bunch of ``Runner`` objects.
Args:
pkg_name: name of the package to test
repository: repository where the package lives
"""
result = []
detection_tests_content = read_detection_tests(pkg_name, repository)
tests_by_path = detection_tests_content.get("paths", [])
for single_test_data in tests_by_path:
mock_executables = []
for layout in single_test_data["layout"]:
mock_executables.append(
MockExecutables(executables=layout["executables"], script=layout["script"])
)
expected_results = []
for assertion in single_test_data["results"]:
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
current_test = DetectionTest(
pkg_name=pkg_name, layout=mock_executables, results=expected_results
)
result.append(Runner(test=current_test, repository=repository))
return result
def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]:
"""Returns the normalized content of the detection_tests.yaml associated with the package
passed in input.
The content is merged with that of any package that is transitively included using the
"includes" attribute.
Args:
pkg_name: name of the package to test
repository: repository in which to search for packages
"""
content_stack, seen = [], set()
included_packages: Deque[str] = collections.deque()
root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository)
included_packages.extend(result.get("includes", []))
seen |= set(result.get("includes", []))
while included_packages:
current_package = included_packages.popleft()
try:
current_detection_yaml, content = _detection_tests_yaml(current_package, repository)
except FileNotFoundError as e:
msg = (
f"cannot read the detection tests from the '{current_package}' package, "
f"included by {root_detection_yaml}"
)
raise FileNotFoundError(msg + f"\n\n\t{e}\n")
content_stack.append((current_package, content))
included_packages.extend(x for x in content.get("includes", []) if x not in seen)
seen |= set(content.get("includes", []))
result.setdefault("paths", [])
for pkg_name, content in content_stack:
result["paths"].extend(content.get("paths", []))
return result
def _detection_tests_yaml(
pkg_name: str, repository: spack.repo.RepoPath
) -> Tuple[pathlib.Path, Dict[str, Any]]:
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
detection_tests_yaml = pkg_dir / "detection_test.yaml"
with open(str(detection_tests_yaml)) as f:
content = spack_yaml.load(f)
return detection_tests_yaml, content

View File

@@ -38,14 +38,12 @@ class OpenMpi(Package):
import llnl.util.lang
import llnl.util.tty.color
import spack.deptypes as dt
import spack.error
import spack.patch
import spack.spec
import spack.url
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency
from spack.dependency import Dependency, canonical_deptype, default_deptype
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
@@ -409,7 +407,10 @@ def version(
def _execute_version(pkg, ver, **kwargs):
if (
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
any(
s in kwargs
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
)
and hasattr(pkg, "has_code")
and not pkg.has_code
):
@@ -437,7 +438,7 @@ def _execute_version(pkg, ver, **kwargs):
pkg.versions[version] = kwargs
def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -448,7 +449,7 @@ def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
if pkg.name == dep_spec.name:
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
depflag = dt.canonicalize(type)
type = canonical_deptype(type)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
# call this patches here for clarity -- we want patch to be a list,
@@ -478,12 +479,12 @@ def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
# this is where we actually add the dependency to this package
if when_spec not in conditions:
dependency = Dependency(pkg, dep_spec, depflag=depflag)
dependency = Dependency(pkg, dep_spec, type=type)
conditions[when_spec] = dependency
else:
dependency = conditions[when_spec]
dependency.spec.constrain(dep_spec, deps=False)
dependency.depflag |= depflag
dependency.type |= set(type)
# apply patches to the dependency
for execute_patch in patches:
@@ -526,7 +527,7 @@ def _execute_conflicts(pkg):
@directive(("dependencies"))
def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
Args:
@@ -759,7 +760,7 @@ def _execute_variant(pkg):
when_spec = make_when_spec(when)
when_specs = [when_spec]
if not re.match(spack.spec.IDENTIFIER_RE, name):
if not re.match(spack.spec.identifier_re, name):
directive = "variant"
msg = "Invalid variant name in {0}: '{1}'"
raise DirectiveError(directive, msg.format(pkg.name, name))

View File

@@ -120,8 +120,10 @@ def write_host_environment(self, spec):
versioning. We use it in the case that an analysis later needs to
easily access this information.
"""
from spack.util.environment import get_host_environment_metadata
env_file = self.env_metadata_path(spec)
environ = spack.spec.get_host_environment_metadata()
environ = get_host_environment_metadata()
with open(env_file, "w") as fd:
sjson.dump(environ, fd)

View File

@@ -12,7 +12,6 @@
from enum import Enum
from typing import List, Optional
import spack.deptypes as dt
import spack.environment.environment as ev
import spack.spec
import spack.traverse as traverse
@@ -37,9 +36,7 @@ def from_string(s: str) -> "UseBuildCache":
def _deptypes(use_buildcache: UseBuildCache):
"""What edges should we follow for a given node? If it's a cache-only
node, then we can drop build type deps."""
return (
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
)
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run")
class DepfileNode:
@@ -72,13 +69,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
self.adjacency_list: List[DepfileNode] = []
self.pkg_buildcache = pkg_buildcache
self.deps_buildcache = deps_buildcache
self.depflag_root = _deptypes(pkg_buildcache)
self.depflag_deps = _deptypes(deps_buildcache)
self.deptypes_root = _deptypes(pkg_buildcache)
self.deptypes_deps = _deptypes(deps_buildcache)
def neighbors(self, node):
"""Produce a list of spec to follow from node"""
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
def accept(self, node):
self.adjacency_list.append(

View File

@@ -28,7 +28,6 @@
import spack.compilers
import spack.concretize
import spack.config
import spack.deptypes as dt
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht
@@ -404,7 +403,7 @@ def _write_yaml(data, str_or_file):
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.spec.get_host_environment()
valid_variables = spack.util.environment.get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
return eval(string, valid_variables)
@@ -1396,10 +1395,7 @@ def _concretize_together_where_possible(
result_by_user_spec = {}
solver = spack.solver.asp.Solver()
allow_deprecated = spack.config.get("config:deprecated", False)
for result in solver.solve_in_rounds(
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
):
for result in solver.solve_in_rounds(specs_to_concretize, tests=tests):
result_by_user_spec.update(result.specs_by_input)
result = []
@@ -1508,7 +1504,7 @@ def _concretize_separately(self, tests=False):
start = time.time()
max_processes = min(
len(arguments), # Number of specs
spack.util.cpus.determine_number_of_jobs(parallel=True),
spack.config.get("config:build_jobs"), # Cap on build jobs
)
# TODO: revisit this print as soon as darwin is parallel too
@@ -1540,13 +1536,13 @@ def _concretize_separately(self, tests=False):
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
test_edges = node.edges_to_dependencies(deptype="test")
for current_edge in test_edges:
test_dependency = current_edge.spec
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
)
results = [
@@ -2062,7 +2058,7 @@ def matching_spec(self, spec):
# If multiple root specs match, it is assumed that the abstract
# spec will most-succinctly summarize the difference between them
# (and the user can enter one of these to disambiguate)
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
fmt_str = "{hash:7} " + spack.spec.default_format
color = clr.get_color_when()
match_strings = [
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
@@ -2194,7 +2190,7 @@ def _read_lockfile_dict(self, d):
name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
)
# Traverse the root specs one at a time in the order they appear.
@@ -2370,7 +2366,7 @@ def display_specs(concretized_specs):
def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
format=spack.spec.DISPLAY_FORMAT,
format=spack.spec.display_format,
status_fn=spack.spec.Spec.install_status,
hashlen=7,
hashes=True,
@@ -2668,26 +2664,6 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.yaml_content = with_defaults_added
self.changed = False
def _all_matches(self, user_spec: str) -> List[str]:
"""Maps the input string to the first equivalent user spec in the manifest,
and returns it.
Args:
user_spec: user spec to be found
Raises:
ValueError: if no equivalent match is found
"""
result = []
for yaml_spec_str in self.pristine_configuration["specs"]:
if Spec(yaml_spec_str) == Spec(user_spec):
result.append(yaml_spec_str)
if not result:
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
return result
def add_user_spec(self, user_spec: str) -> None:
"""Appends the user spec passed as input to the list of root specs.
@@ -2708,9 +2684,8 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list
"""
try:
for key in self._all_matches(user_spec):
self.pristine_configuration["specs"].remove(key)
self.configuration["specs"].remove(key)
self.pristine_configuration["specs"].remove(user_spec)
self.configuration["specs"].remove(user_spec)
except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e

View File

@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
cmds += "$Env:SPACK_ENV=%s\n" % env.path
else:
if "color" in os.getenv("TERM", "") and prompt:
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
@@ -82,7 +82,7 @@ def deactivate_header(shell):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "Set-Item -Path Env:SPACK_ENV\n"
cmds += "Remove-Item Env:SPACK_ENV"
else:
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"

View File

@@ -128,7 +128,3 @@ def __init__(self, provided, required, constraint_type):
self.provided = provided
self.required = required
self.constraint_type = constraint_type
class FetchError(SpackError):
"""Superclass for fetch-related errors."""

View File

@@ -31,11 +31,9 @@
import urllib.parse
from typing import List, Optional
import llnl.url
import llnl.util
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.string import comma_and, quote
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
from llnl.util.symlink import symlink
@@ -48,8 +46,9 @@
import spack.util.web as web_util
import spack.version
import spack.version.git_ref_lookup
from spack.util.compression import decompressor_for
from spack.util.compression import decompressor_for, extension_from_path
from spack.util.executable import CommandNotFoundError, which
from spack.util.string import comma_and, quote
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
@@ -401,7 +400,7 @@ def _fetch_curl(self, url):
try:
web_util.check_curl_code(curl.returncode)
except spack.error.FetchError as err:
except web_util.FetchError as err:
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
self._check_headers(headers)
@@ -442,7 +441,7 @@ def expand(self):
# TODO: replace this by mime check.
if not self.extension:
self.extension = llnl.url.determine_url_file_extension(self.url)
self.extension = spack.url.determine_url_file_extension(self.url)
if self.stage.expanded:
tty.debug("Source already staged to %s" % self.stage.source_path)
@@ -571,7 +570,7 @@ def expand(self):
@_needs_stage
def archive(self, destination, **kwargs):
assert llnl.url.extension_from_path(destination) == "tar.gz"
assert extension_from_path(destination) == "tar.gz"
assert self.stage.source_path.startswith(self.stage.path)
tar = which("tar", required=True)
@@ -734,11 +733,7 @@ def version_from_git(git_exe):
@property
def git(self):
if not self._git:
try:
self._git = spack.util.git.git(required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
raise
self._git = spack.util.git.git()
# Disable advice for a quieter fetch
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
@@ -1294,7 +1289,7 @@ def fetch(self):
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
tty.debug("Fetching {0}".format(self.url))
@@ -1341,7 +1336,7 @@ def fetch(self):
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
tty.debug("Fetching {0}".format(self.url))
@@ -1435,7 +1430,7 @@ def from_kwargs(**kwargs):
on attribute names (e.g., ``git``, ``hg``, etc.)
Raises:
spack.error.FetchError: If no ``fetch_strategy`` matches the args.
spack.util.web.FetchError: If no ``fetch_strategy`` matches the args.
"""
for fetcher in all_strategies:
if fetcher.matches(kwargs):
@@ -1542,7 +1537,7 @@ def for_package_version(pkg, version=None):
# if it's a commit, we must use a GitFetchStrategy
if isinstance(version, spack.version.GitVersion):
if not hasattr(pkg, "git"):
raise spack.error.FetchError(
raise web_util.FetchError(
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
)
# Populate the version with comparisons to other commits
@@ -1692,11 +1687,11 @@ def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)
class NoCacheError(spack.error.FetchError):
class NoCacheError(web_util.FetchError):
"""Raised when there is no cached archive for a package."""
class FailedDownloadError(spack.error.FetchError):
class FailedDownloadError(web_util.FetchError):
"""Raised when a download fails."""
def __init__(self, url, msg=""):
@@ -1704,23 +1699,23 @@ def __init__(self, url, msg=""):
self.url = url
class NoArchiveFileError(spack.error.FetchError):
class NoArchiveFileError(web_util.FetchError):
"""Raised when an archive file is expected but none exists."""
class NoDigestError(spack.error.FetchError):
class NoDigestError(web_util.FetchError):
"""Raised after attempt to checksum when URL has no digest."""
class ExtrapolationError(spack.error.FetchError):
class ExtrapolationError(web_util.FetchError):
"""Raised when we can't extrapolate a version for a package."""
class FetcherConflict(spack.error.FetchError):
class FetcherConflict(web_util.FetchError):
"""Raised for packages with invalid fetch attributes."""
class InvalidArgsError(spack.error.FetchError):
class InvalidArgsError(web_util.FetchError):
"""Raised when a version can't be deduced from a set of arguments."""
def __init__(self, pkg=None, version=None, **args):
@@ -1733,11 +1728,11 @@ def __init__(self, pkg=None, version=None, **args):
super().__init__(msg, long_msg)
class ChecksumError(spack.error.FetchError):
class ChecksumError(web_util.FetchError):
"""Raised when archive fails to checksum."""
class NoStageError(spack.error.FetchError):
class NoStageError(web_util.FetchError):
"""Raised when fetch operations are called before set_stage()."""
def __init__(self, method):

View File

@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture,
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
compiler,
)
tty.hline(colorize(header), char="-")

View File

@@ -0,0 +1,28 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.parse
import urllib.response
from urllib.error import URLError
from urllib.request import BaseHandler
def gcs_open(req, *args, **kwargs):
"""Open a reader stream to a blob object on GCS"""
import spack.util.gcs as gcs_util
url = urllib.parse.urlparse(req.get_full_url())
gcsblob = gcs_util.GCSBlob(url)
if not gcsblob.exists():
raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path))
stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers()
return urllib.response.addinfourl(stream, headers, url)
class GCSHandler(BaseHandler):
def gs_open(self, req):
return gcs_open(req)

View File

@@ -38,12 +38,11 @@
"""
import enum
import sys
from typing import List, Optional, Set, TextIO, Tuple
from typing import List, Optional, Set, TextIO, Tuple, Union
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.dependency
import spack.spec
import spack.tengine
@@ -79,7 +78,7 @@ def __init__(self):
self.node_character = "o"
self.debug = False
self.indent = 0
self.depflag = dt.ALL
self.deptype = spack.dependency.all_deptypes
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@@ -327,7 +326,7 @@ def write(self, spec, color=None, out=None):
nodes_in_topological_order = [
edge.spec
for edge in spack.traverse.traverse_edges_topo(
[spec], direction="children", deptype=self.depflag
[spec], direction="children", deptype=self.deptype
)
]
nodes_in_topological_order.reverse()
@@ -425,7 +424,7 @@ def write(self, spec, color=None, out=None):
# Replace node with its dependencies
self._frontier.pop(i)
edges = sorted(node.edges_to_dependencies(depflag=self.depflag), reverse=True)
edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True)
if edges:
deps = [e.spec.dag_hash() for e in edges]
self._connect_deps(i, deps, "new-deps") # anywhere.
@@ -434,14 +433,13 @@ def write(self, spec, color=None, out=None):
self._collapse_line(i)
def graph_ascii(
spec, node="o", out=None, debug=False, indent=0, color=None, depflag: dt.DepFlag = dt.ALL
):
def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
graph.node_character = node
graph.depflag = depflag
if deptype:
graph.deptype = spack.dependency.canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
@@ -515,7 +513,7 @@ def __init__(self):
def visit(self, edge):
if edge.parent is None:
for node in spack.traverse.traverse_nodes([edge.spec], deptype=dt.LINK | dt.RUN):
for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")):
self.main_unified_space.add(node.dag_hash())
super().visit(edge)
@@ -531,38 +529,40 @@ def edge_entry(self, edge):
return (
edge.parent.dag_hash(),
edge.spec.dag_hash(),
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
f"[color=\"{':'.join(colormap[x] for x in edge.deptypes)}\"]",
)
def _static_edges(specs, depflag):
def _static_edges(specs, deptype):
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),
spack.spec.Spec(dependency_name),
depflag=depflag,
deptypes=deptype,
virtuals=(),
)
def static_graph_dot(
specs: List[spack.spec.Spec], depflag: dt.DepFlag = dt.ALL, out: Optional[TextIO] = None
specs: List[spack.spec.Spec],
deptype: Optional[Union[str, Tuple[str, ...]]] = "all",
out: Optional[TextIO] = None,
):
"""Static DOT graph with edges to all possible dependencies.
Args:
specs: abstract specs to be represented
depflag: dependency types to consider
deptype: dependency types to consider
out: optional output stream. If None sys.stdout is used
"""
out = out or sys.stdout
builder = StaticDag()
for edge in _static_edges(specs, depflag):
for edge in _static_edges(specs, deptype):
builder.visit(edge)
out.write(builder.render())
@@ -570,7 +570,7 @@ def static_graph_dot(
def graph_dot(
specs: List[spack.spec.Spec],
builder: Optional[DotGraphBuilder] = None,
depflag: dt.DepFlag = dt.ALL,
deptype: spack.dependency.DependencyArgument = "all",
out: Optional[TextIO] = None,
):
"""DOT graph of the concrete specs passed as input.
@@ -578,7 +578,7 @@ def graph_dot(
Args:
specs: specs to be represented
builder: builder to use to render the graph
depflag: dependency types to consider
deptype: dependency types to consider
out: optional output stream. If None sys.stdout is used
"""
if not specs:
@@ -587,9 +587,10 @@ def graph_dot(
if out is None:
out = sys.stdout
deptype = spack.dependency.canonical_deptype(deptype)
builder = builder or SimpleDAG()
for edge in spack.traverse.traverse_edges(
specs, cover="edges", order="breadth", deptype=depflag
specs, cover="edges", order="breadth", deptype=deptype
):
builder.visit(edge)

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Definitions that control how Spack creates Spec hashes."""
import spack.deptypes as dt
import spack.dependency as dp
import spack.repo
hashes = []
@@ -20,8 +20,8 @@ class SpecHashDescriptor:
We currently use different hashes for different use cases."""
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
self.depflag = depflag
def __init__(self, deptype, package_hash, name, override=None):
self.deptype = dp.canonical_deptype(deptype)
self.package_hash = package_hash
self.name = name
hashes.append(self)
@@ -39,12 +39,12 @@ def __call__(self, spec):
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash")
#: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
)
@@ -56,7 +56,7 @@ def _content_hash_override(spec):
#: Package hash used as part of dag hash
package_hash = SpecHashDescriptor(
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
deptype=(), package_hash=True, name="package_hash", override=_content_hash_override
)
@@ -64,10 +64,10 @@ def _content_hash_override(spec):
# spec formats
full_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="full_hash"
deptype=("build", "link", "run"), package_hash=True, name="full_hash"
)
build_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=False, name="build_hash"
deptype=("build", "link", "run"), package_hash=False, name="build_hash"
)

View File

@@ -79,7 +79,8 @@ class ElfFilesWithRPathVisitor(BaseDirectoryVisitor):
"""Visitor that collects all elf files that have an rpath"""
def __init__(self):
# Keep track of what hardlinked files we've already visited.
# Map from (ino, dev) -> path. We need 1 path per file, if there are hardlinks,
# we don't need to store the path multiple times.
self.visited = set()
def visit_file(self, root, rel_path, depth):
@@ -88,10 +89,10 @@ def visit_file(self, root, rel_path, depth):
identifier = (s.st_ino, s.st_dev)
# We're hitting a hardlink or symlink of an excluded lib, no need to parse.
if s.st_nlink > 1:
if identifier in self.visited:
return
self.visited.add(identifier)
if identifier in self.visited:
return
self.visited.add(identifier)
result = drop_redundant_rpaths(filepath)

View File

@@ -17,7 +17,6 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.lang import nullcontext
from llnl.util.tty.color import colorize
@@ -27,6 +26,7 @@
from spack.installer import InstallError
from spack.spec import Spec
from spack.util.prefix import Prefix
from spack.util.string import plural
#: Stand-alone test failure info type
TestFailureType = Tuple[BaseException, str]

View File

@@ -50,7 +50,6 @@
import spack.compilers
import spack.config
import spack.database
import spack.deptypes as dt
import spack.error
import spack.hooks
import spack.mirror
@@ -91,16 +90,6 @@
STATUS_REMOVED = "removed"
def _write_timer_json(pkg, timer, cache):
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
try:
with open(pkg.times_log_path, "w") as timelog:
timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
return
class InstallAction:
#: Don't perform an install
NONE = 0
@@ -314,7 +303,7 @@ def _packages_needed_to_bootstrap_compiler(
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
)
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
@@ -410,8 +399,6 @@ def _install_from_cache(
return False
t.stop()
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
_write_timer_json(pkg, t, True)
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec, explicit)
@@ -494,7 +481,7 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
pkg.spec, download_result, unsigned=unsigned, force=False
)
pkg.installed_from_binary_cache = True
@@ -605,9 +592,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
if node is spec:
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
elif source_pkg_dir:
fs.install_tree(
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(source_pkg_dir, dest_pkg_dir)
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
@@ -789,9 +774,10 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# Save off dependency package ids for quick checks since traversals
# are not able to return full dependents for all packages across
# environment specs.
deptypes = self.get_deptypes(self.pkg)
self.dependencies = set(
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
for d in self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id
)
@@ -830,7 +816,7 @@ def _add_default_args(self) -> None:
]:
_ = self.install_args.setdefault(arg, default)
def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]:
"""Determine the required dependency types for the associated package.
Args:
@@ -839,7 +825,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
Returns:
tuple: required dependency type(s) for the package
"""
depflag = dt.LINK | dt.RUN
deptypes = ["link", "run"]
include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg):
@@ -847,15 +833,14 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
else:
cache_only = self.install_args.get("dependencies_cache_only")
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
):
depflag |= dt.BUILD
# Include build dependencies if pkg is not installed and cache_only
# is False, or if build depdencies are explicitly called for
# by include_build_deps.
if include_build_deps or not (cache_only or pkg.spec.installed):
deptypes.append("build")
if self.run_tests(pkg):
depflag |= dt.TEST
return depflag
deptypes.append("test")
return tuple(sorted(deptypes))
def has_dependency(self, dep_id) -> bool:
"""Returns ``True`` if the package id represents a known dependency
@@ -888,8 +873,9 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
spec = self.spec
if visited is None:
visited = set()
deptype = self.get_deptypes(spec.package)
for dep in spec.dependencies(deptype=self.get_depflags(spec.package)):
for dep in spec.dependencies(deptype=deptype):
hash = dep.dag_hash()
if hash in visited:
continue
@@ -973,9 +959,10 @@ def __init__(
# Be consistent wrt use of dependents and dependencies. That is,
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
deptypes = self.request.get_deptypes(self.pkg)
self.dependencies = set(
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
for d in self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id
)
@@ -1329,6 +1316,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
Preparation includes cleaning up installation and stage directories
and ensuring the database is up-to-date.
@@ -2104,6 +2092,7 @@ def install(self) -> None:
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
ltype, lock = self._ensure_locked("read", pkg)
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
@@ -2383,7 +2372,8 @@ def run(self) -> bool:
# Stop the timer and save results
self.timer.stop()
_write_timer_json(self.pkg, self.timer, False)
with open(self.pkg.times_log_path, "w") as timelog:
self.timer.write_json(timelog)
print_install_test_log(self.pkg)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
@@ -2404,9 +2394,7 @@ def _install_source(self) -> None:
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
fs.install_tree(
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(pkg.stage.source_path, src_target)
def _real_install(self) -> None:
import spack.builder

View File

@@ -30,6 +30,7 @@
import llnl.util.tty.color as color
from llnl.util.tty.log import log_output
import spack
import spack.cmd
import spack.config
import spack.environment as ev
@@ -50,7 +51,7 @@
stat_names = pstats.Stats.sort_arg_dict_default
#: top-level aliases for Spack commands
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
aliases = {"rm": "remove"}
#: help levels in order of detail (i.e., number of commands shown)
levels = ["short", "long"]
@@ -715,7 +716,7 @@ def __call__(self, *argv, **kwargs):
out = io.StringIO()
try:
with log_output(out, echo=True):
with log_output(out):
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
except SystemExit as e:
@@ -774,7 +775,7 @@ def _profile_wrapper(command, parser, args, unknown_args):
pr.disable()
# print out profile stats.
stats = pstats.Stats(pr, stream=sys.stderr)
stats = pstats.Stats(pr)
stats.sort_stats(*sortby)
stats.print_stats(nlines)

View File

@@ -20,7 +20,6 @@
import urllib.parse
from typing import Optional, Union
import llnl.url
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -30,6 +29,7 @@
import spack.fetch_strategy as fs
import spack.mirror
import spack.spec
import spack.url as url
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
@@ -375,7 +375,7 @@ def _determine_extension(fetcher):
if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
ext = url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots

View File

@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
if spec.satisfies(constraint):
if hasattr(constraint, "override") and constraint.override:
spec_configuration = {}
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
update_dictionary_extending_lists(spec_configuration, action)
# Transform keywords for dependencies or prerequisites into a list of spec

View File

@@ -142,7 +142,6 @@ def __init__(self):
"11": "bigsur",
"12": "monterey",
"13": "ventura",
"14": "sonoma",
}
version = macos_version()

View File

@@ -67,7 +67,7 @@
from spack.build_systems.waf import WafPackage
from spack.build_systems.xorg import XorgPackage
from spack.builder import run_after, run_before
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.dependency import all_deptypes
from spack.directives import *
from spack.install_test import (
SkipTest,
@@ -96,7 +96,6 @@
on_package_attributes,
)
from spack.spec import InvalidSpecDetected, Spec
from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.variant import (
any_combination_of,

Some files were not shown because too many files have changed in this diff Show More