Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
54e5439dd6 Spec.format: conditional format strings 2023-08-22 11:22:36 -07:00
600 changed files with 4276 additions and 11781 deletions

View File

@@ -22,7 +22,7 @@ jobs:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{inputs.python_version}}

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -179,7 +179,7 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
set -ex
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh

View File

@@ -56,7 +56,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -86,7 +86,7 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
with:
name: dockerfiles
path: dockerfiles
@@ -95,7 +95,7 @@ jobs:
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
@@ -112,7 +112,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@0a97817b6ade9f46837855d676c4cca3a2471fc9 # @v2
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -14,7 +14,7 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1

View File

@@ -47,7 +47,7 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -94,7 +94,7 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -133,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -152,7 +152,7 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -187,7 +187,7 @@ jobs:
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2

View File

@@ -18,7 +18,7 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
@@ -35,7 +35,7 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -68,7 +68,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -15,7 +15,7 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -39,7 +39,7 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -63,7 +63,7 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -75,5 +75,6 @@ jobs:
- name: Build Test
run: |
spack compiler find
spack -d external find cmake ninja
spack external find cmake
spack external find ninja
spack -d install abseil-cpp

View File

@@ -2,26 +2,24 @@
## Supported Versions
We provide security updates for `develop` and for the last two
stable (`0.x`) release series of Spack. Security updates will be
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
We provide security updates for the following releases.
For more on Spack's release structure, see
[`README.md`](https://github.com/spack/spack#releases).
| Version | Supported |
| ------- | ------------------ |
| develop | :white_check_mark: |
| 0.19.x | :white_check_mark: |
| 0.18.x | :white_check_mark: |
## Reporting a Vulnerability
You can report a vulnerability using GitHub's private reporting
feature:
To report a vulnerability or other security
issue, email maintainers@spack.io.
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
2. Click "Report a vulnerability" in the upper right corner of that page.
3. Fill out the form and submit your draft security advisory.
More details are available in
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
You can expect to hear back about security issues within two days.
If your security issue is accepted, we will do our best to release
a fix within a week. If fixing the issue will take longer than
this, we will discuss timeline options with you.
You can expect to hear back within two days.
If your security issue is accepted, we will do
our best to release a fix within a week. If
fixing the issue will take longer than this,
we will discuss timeline options with you.

View File

@@ -14,7 +14,7 @@
::
@echo off
set spack="%SPACK_ROOT%"\bin\spack
set spack=%SPACK_ROOT%\bin\spack
::#######################################################################
:: This is a wrapper around the spack command that forwards calls to

View File

@@ -39,26 +39,12 @@ function Read-SpackArgs {
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
}
function Set-SpackEnv {
# This method is responsible
# for processing the return from $(spack <command>)
# which are returned as System.Object[]'s containing
# a list of env commands
# Invoke-Expression can only handle one command at a time
# so we iterate over the list to invoke the env modification
# expressions one at a time
foreach($envop in $args[0]){
Invoke-Expression $envop
}
}
function Invoke-SpackCD {
if (Compare-CommonArgs $SpackSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" cd -h
python $Env:SPACK_ROOT/bin/spack cd -h
}
else {
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
if (($NULL -ne $LOC)){
if ( Test-Path -Path $LOC){
Set-Location $LOC
@@ -75,7 +61,7 @@ function Invoke-SpackCD {
function Invoke-SpackEnv {
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
python "$Env:SPACK_ROOT/bin/spack" env -h
python $Env:SPACK_ROOT/bin/spack env -h
}
else {
$SubCommandSubCommand = $SpackSubCommandArgs[0]
@@ -83,46 +69,46 @@ function Invoke-SpackEnv {
switch ($SubCommandSubCommand) {
"activate" {
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
elseif (!$SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
"deactivate" {
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
}
elseif($SubCommandSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
python $Env:SPACK_ROOT/bin/spack env deactivate -h
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}
}
}
function Invoke-SpackLoad {
if (Compare-CommonArgs $SpackSubCommandArgs) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
}
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
}
else {
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
@@ -130,7 +116,7 @@ function Invoke-SpackLoad {
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
if (Compare-CommonArgs $SpackCMD_params) {
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
exit $LASTEXITCODE
}
@@ -142,5 +128,5 @@ switch($SpackSubCommand)
"env" {Invoke-SpackEnv}
"load" {Invoke-SpackLoad}
"unload" {Invoke-SpackLoad}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}

View File

@@ -9,32 +9,9 @@
Bundle
------
``BundlePackage`` represents a set of packages that are expected to work
well together, such as a collection of commonly used software libraries.
The associated software is specified as dependencies.
If it makes sense, variants, conflicts, and requirements can be added to
the package. :ref:`Variants <variants>` ensure that common build options
are consistent across the packages supporting them. :ref:`Conflicts
and requirements <packaging_conflicts>` prevent attempts to build with known
bugs or limitations.
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
it could use the ``require`` directive as follows:
.. code-block:: python
require("platform=linux", msg="MyBundlePackage only builds on linux")
Spack has a number of built-in bundle packages, such as:
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
``Libc`` is a virtual bundle package for the C standard library.
``BundlePackage`` represents a set of packages that are expected to work well
together, such as a collection of commonly used software libraries. The
associated software is specified as bundle dependencies.
^^^^^^^^

View File

@@ -214,7 +214,6 @@ def setup(sphinx):
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"),
("py:class", "spack.spec.InstallStatus"),
("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
]

View File

@@ -1,113 +0,0 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
==========================
Using External GPU Support
==========================
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
configuration Spack will download and install the needed components.
It may be preferable to use existing system support: the following sections
help with using a system installation of GPU libraries.
-----------------------------------
Using an External ROCm Installation
-----------------------------------
Spack breaks down ROCm into many separate component packages. The following
is an example ``packages.yaml`` that organizes a consistent set of ROCm
components for use by dependent packages:
.. code-block:: yaml
packages:
all:
compiler: [rocmcc@=5.3.0]
variants: amdgpu_target=gfx90a
hip:
buildable: false
externals:
- spec: hip@5.3.0
prefix: /opt/rocm-5.3.0/hip
hsa-rocr-dev:
buildable: false
externals:
- spec: hsa-rocr-dev@5.3.0
prefix: /opt/rocm-5.3.0/
llvm-amdgpu:
buildable: false
externals:
- spec: llvm-amdgpu@5.3.0
prefix: /opt/rocm-5.3.0/llvm/
comgr:
buildable: false
externals:
- spec: comgr@5.3.0
prefix: /opt/rocm-5.3.0/
hipsparse:
buildable: false
externals:
- spec: hipsparse@5.3.0
prefix: /opt/rocm-5.3.0/
hipblas:
buildable: false
externals:
- spec: hipblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocblas:
buildable: false
externals:
- spec: rocblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocprim:
buildable: false
externals:
- spec: rocprim@5.3.0
prefix: /opt/rocm-5.3.0/rocprim/
This is in combination with the following compiler definition:
.. code-block:: yaml
compilers:
- compiler:
spec: rocmcc@=5.3.0
paths:
cc: /opt/rocm-5.3.0/bin/amdclang
cxx: /opt/rocm-5.3.0/bin/amdclang++
f77: null
fc: /opt/rocm-5.3.0/bin/amdflang
operating_system: rhel8
target: x86_64
This includes the following considerations:
- Each of the listed externals specifies ``buildable: false`` to force Spack
to use only the externals we defined.
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
packages, but not all of them, and furthermore not in a manner that
guarantees a complementary set if multiple ROCm installations are available.
- The ``prefix`` is the same for several components, but note that others
require listing one of the subdirectories as a prefix.
-----------------------------------
Using an External CUDA Installation
-----------------------------------
CUDA is split into fewer components and is simpler to specify:
.. code-block:: yaml
packages:
all:
variants:
- cuda_arch=70
cuda:
buildable: false
externals:
- spec: cuda@11.0.2
prefix: /opt/cuda/cuda-11.0.2/
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.

View File

@@ -77,7 +77,6 @@ or refer to the full manual below.
extensions
pipelines
signing
gpu_configuration
.. toctree::
:maxdepth: 2

View File

@@ -363,42 +363,6 @@ one of these::
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
``nano``, and ``notepad``, in that order.
^^^^^^^^^^^^^^^^^
Bundling software
^^^^^^^^^^^^^^^^^
If you have a collection of software expected to work well together with
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
Examples where bundle packages can be useful include defining suites of
applications (e.g, `EcpProxyApps
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
These versioned packages primarily consist of dependencies on the associated
software packages. They can include :ref:`variants <variants>` to ensure
common build options are consistently applied to dependencies. Known build
failures, such as not building on a platform or when certain compilers or
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
Build requirements, such as only building with specific compilers, can similarly
be flagged with :ref:`requires <packaging_conflicts>`.
The ``spack create --template bundle`` command will create a skeleton
``BundlePackage`` ``package.py`` for you:
.. code-block:: console
$ spack create --template bundle --name coolsdk
Now you can fill in the basic package documentation, version(s), and software
package dependencies along with any other relevant customizations.
.. note::
Remember that bundle packages have no software of their own so there
is nothing to download.
^^^^^^^^^^^^^^^^^^^^^^^^^
Non-downloadable software
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -646,16 +610,7 @@ add a line like this in the package class:
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
.. note::
By convention, we list versions in descending order, from newest to oldest.
.. note::
:ref:`Bundle packages <bundlepackage>` do not have source code so
there is nothing to fetch. Consequently, their version directives
consist solely of the version name (e.g., ``version("202309")``).
Versions should be listed in descending order, from newest to oldest.
^^^^^^^^^^^^^
Date Versions
@@ -2723,7 +2678,7 @@ Conflicts and requirements
--------------------------
Sometimes packages have known bugs, or limitations, that would prevent them
from building e.g. against other dependencies or with certain compilers. Spack
to build e.g. against other dependencies or with certain compilers. Spack
makes it possible to express such constraints with the ``conflicts`` directive.
Adding the following to a package:
@@ -4818,17 +4773,17 @@ For example, running:
results in spack checking that the installation created the following **file**:
* ``self.prefix.bin.reframe``
* ``self.prefix/bin/reframe``
and the following **directories**:
* ``self.prefix.bin``
* ``self.prefix.config``
* ``self.prefix.docs``
* ``self.prefix.reframe``
* ``self.prefix.tutorials``
* ``self.prefix.unittests``
* ``self.prefix.cscs-checks``
* ``self.prefix/bin``
* ``self.prefix/config``
* ``self.prefix/docs``
* ``self.prefix/reframe``
* ``self.prefix/tutorials``
* ``self.prefix/unittests``
* ``self.prefix/cscs-checks``
If **any** of these paths are missing, then Spack considers the installation
to have failed.
@@ -4972,7 +4927,7 @@ installed executable. The check is implemented as follows:
@on_package_attributes(run_tests=True)
def check_list(self):
with working_dir(self.stage.source_path):
reframe = Executable(self.prefix.bin.reframe)
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
reframe("-l")
.. warning::
@@ -5192,8 +5147,8 @@ embedded test parts.
for example in ["ex1", "ex2"]:
with test_part(
self,
f"test_example_{example}",
purpose=f"run installed {example}",
"test_example_{0}".format(example),
purpose="run installed {0}".format(example),
):
exe = which(join_path(self.prefix.bin, example))
exe()
@@ -5271,10 +5226,11 @@ Below illustrates using this feature to compile an example.
...
cxx = which(os.environ["CXX"])
cxx(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.cpp",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.cpp".format(exe),
"-o",
exe
)
cxx_example = which(exe)
cxx_example()
@@ -5291,14 +5247,14 @@ Saving build-time files
We highly recommend re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities since they reside
within the software's repository.
within the software's repository.
If that is not possible, you can add test-related files to the package
repository (see :ref:`adding custom files <cache_custom_files>`). It
will be important to maintain them so they work across listed or supported
versions of the package.
You can use the ``cache_extra_test_sources`` helper to copy directories
You can use the ``cache_extra_test_sources`` method to copy directories
and or files from the source build stage directory to the package's
installation directory.
@@ -5306,15 +5262,10 @@ The signature for ``cache_extra_test_sources`` is:
.. code-block:: python
def cache_extra_test_sources(pkg, srcs):
where each argument has the following meaning:
* ``pkg`` is an instance of the package for the spec under test.
* ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
def cache_extra_test_sources(self, srcs):
where ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
The paths must be relative to the staged source directory. Contents of
subdirectories and files are copied to a special test cache subdirectory
of the installation prefix. They are automatically copied to the appropriate
@@ -5335,18 +5286,21 @@ and using ``foo.c`` in a test method is illustrated below.
srcs = ["tests",
join_path("examples", "foo.c"),
join_path("examples", "bar.c")]
cache_extra_test_sources(self, srcs)
self.cache_extra_test_sources(srcs)
def test_foo(self):
exe = "foo"
src_dir = self.test_suite.current_test_cache_dir.examples
src_dir = join_path(
self.test_suite.current_test_cache_dir, "examples"
)
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.c",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.c".format(exe),
"-o",
exe
)
foo = which(exe)
foo()
@@ -5372,9 +5326,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
In our example above, test methods can use the following paths to reference
the copy of each entry listed in ``srcs``, respectively:
* ``self.test_suite.current_test_cache_dir.tests``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
.. admonition:: Library packages should build stand-alone tests
@@ -5393,7 +5347,7 @@ the copy of each entry listed in ``srcs``, respectively:
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the ``copy_test_sources`` method and
***after** the call to ``cache_extra_test_sources()``. This will
***after** the call to ``self.cache_extra_test_sources()``. This will
reduce the amount of unnecessary work in the test method **and** avoid
problems testing in shared instances and facility deployments.
@@ -5440,7 +5394,7 @@ property as shown below.
"""build and run custom-example"""
data_dir = self.test_suite.current_test_data_dir
exe = "custom-example"
src = datadir.join(f"{exe}.cpp")
src = datadir.join("{0}.cpp".format(exe))
...
# TODO: Build custom-example using src and exe
...
@@ -5456,7 +5410,7 @@ Reading expected output from a file
The helper function ``get_escaped_text_output`` is available for packages
to retrieve and properly format the text from a file that contains the
expected output from running an executable that may contain special
expected output from running an executable that may contain special
characters.
The signature for ``get_escaped_text_output`` is:
@@ -5490,7 +5444,7 @@ added to the package's ``test`` subdirectory.
db_filename, ".dump", output=str.split, error=str.split
)
for exp in expected:
assert re.search(exp, out), f"Expected '{exp}' in output"
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
If the file was instead copied from the ``tests`` subdirectory of the staged
source code, the path would be obtained as shown below.
@@ -5503,7 +5457,7 @@ source code, the path would be obtained as shown below.
db_filename = test_cache_dir.join("packages.db")
Alternatively, if the file was copied to the ``share/tests`` subdirectory
as part of the installation process, the test could access the path as
as part of the installation process, the test could access the path as
follows:
.. code-block:: python
@@ -5540,12 +5494,9 @@ Invoking the method is the equivalent of:
.. code-block:: python
errors = []
for check in expected:
if not re.search(check, actual):
errors.append(f"Expected '{check}' in output '{actual}'")
if errors:
raise RuntimeError("\n ".join(errors))
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
.. _accessing-files:
@@ -5585,7 +5536,7 @@ repository, and installation.
- ``self.test_suite.test_dir_for_spec(self.spec)``
* - Current Spec's Build-time Files
- ``self.test_suite.current_test_cache_dir``
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* - Current Spec's Custom Test Files
- ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
@@ -5600,7 +5551,7 @@ Inheriting stand-alone tests
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
packages that inherit from or provide interface implementations for those
packages, respectively.
packages, respectively.
The table below summarizes the stand-alone tests that will be executed along
with those implemented in the package itself.
@@ -5670,7 +5621,7 @@ for ``openmpi``:
SKIPPED: test_version_oshcc: oshcc is not installed
...
==> [2023-03-10-16:04:02.215227] Completed testing
==> [2023-03-10-16:04:02.215597]
==> [2023-03-10-16:04:02.215597]
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
Openmpi::test_bin_mpirun .. PASSED
Openmpi::test_bin_ompi_info .. PASSED
@@ -6120,7 +6071,7 @@ in the extra attributes can implement this method like this:
@classmethod
def validate_detected_spec(cls, spec, extra_attributes):
"""Check that "compilers" is in the extra attributes."""
msg = ("the extra attribute 'compilers' must be set for "
msg = ("the extra attribute "compilers" must be set for "
"the detected spec '{0}'".format(spec))
assert "compilers" in extra_attributes, msg

View File

@@ -1,4 +1,4 @@
sphinx==7.2.6
sphinx==7.2.2
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==1.3.0
@@ -6,8 +6,8 @@ python-levenshtein==0.21.1
docutils==0.18.1
pygments==2.16.1
urllib3==2.0.4
pytest==7.4.2
pytest==7.4.0
isort==5.12.0
black==23.9.1
black==23.7.0
flake8==6.1.0
mypy==1.5.1

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
astunparse
----------------

View File

@@ -1,2 +1,2 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.1"
__version__ = "0.2.0"

View File

@@ -79,18 +79,14 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.features = features
self.compilers = compilers
self.generation = generation
# Cache the ancestor computation
self._ancestors = None
@property
def ancestors(self):
"""All the ancestors of this microarchitecture."""
if self._ancestors is None:
value = self.parents[:]
for parent in self.parents:
value.extend(a for a in parent.ancestors if a not in value)
self._ancestors = value
return self._ancestors
value = self.parents[:]
for parent in self.parents:
value.extend(a for a in parent.ancestors if a not in value)
return value
def _to_set(self):
"""Returns a set of the nodes in this microarchitecture DAG."""

View File

@@ -145,13 +145,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
}
],
"intel": [
{
"versions": "16.0:",
"name": "corei7",
"flags": "-march={name} -mtune=generic -mpopcnt"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -224,13 +217,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
}
],
"intel": [
{
"versions": "16.0:",
"name": "core-avx2",
"flags": "-march={name} -mtune={name} -fma -mf16c"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -314,13 +300,6 @@
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
}
],
"intel": [
{
"versions": "16.0:",
"name": "skylake-avx512",
"flags": "-march={name} -mtune={name}"
}
],
"oneapi": [
{
"versions": "2021.2.0:",
@@ -1433,92 +1412,6 @@
]
}
},
"sapphirerapids": {
"from": [
"icelake"
],
"vendor": "GenuineIntel",
"features": [
"mmx",
"sse",
"sse2",
"ssse3",
"sse4_1",
"sse4_2",
"popcnt",
"aes",
"pclmulqdq",
"avx",
"rdrand",
"f16c",
"movbe",
"fma",
"avx2",
"bmi1",
"bmi2",
"rdseed",
"adx",
"clflushopt",
"xsavec",
"xsaveopt",
"avx512f",
"avx512vl",
"avx512bw",
"avx512dq",
"avx512cd",
"avx512vbmi",
"avx512ifma",
"sha_ni",
"clwb",
"rdpid",
"gfni",
"avx512_vbmi2",
"avx512_vpopcntdq",
"avx512_bitalg",
"avx512_vnni",
"vpclmulqdq",
"vaes",
"avx512_bf16",
"cldemote",
"movdir64b",
"movdiri",
"pdcm",
"serialize",
"waitpkg"
],
"compilers": {
"gcc": [
{
"versions": "11.0:",
"flags": "-march={name} -mtune={name}"
}
],
"clang": [
{
"versions": "12.0:",
"flags": "-march={name} -mtune={name}"
}
],
"intel": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
],
"oneapi": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
],
"dpcpp": [
{
"versions": "2021.2:",
"flags": "-march={name} -mtune={name}"
}
]
}
},
"k10": {
"from": ["x86_64"],
"vendor": "AuthenticAMD",
@@ -2172,6 +2065,8 @@
"pku",
"gfni",
"flush_l1d",
"erms",
"avic",
"avx512f",
"avx512dq",
"avx512ifma",
@@ -2188,12 +2083,12 @@
"compilers": {
"gcc": [
{
"versions": "10.3:12.2",
"versions": "10.3:13.0",
"name": "znver3",
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
},
{
"versions": "12.3:",
"versions": "13.1:",
"name": "znver4",
"flags": "-march={name} -mtune={name}"
}

View File

@@ -1,459 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""URL primitives that just require Python standard library."""
import itertools
import os.path
import re
from typing import Optional, Set, Tuple
from urllib.parse import urlsplit, urlunsplit
# Archive extensions allowed in Spack
PREFIX_EXTENSIONS = ("tar", "TAR")
EXTENSIONS = ("gz", "bz2", "xz", "Z")
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
ALLOWED_ARCHIVE_TYPES = (
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
+ PREFIX_EXTENSIONS
+ EXTENSIONS
+ NO_TAR_EXTENSIONS
)
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
def find_list_urls(url: str) -> Set[str]:
r"""Find good list URLs for the supplied URL.
By default, returns the dirname of the archive path.
Provides special treatment for the following websites, which have a
unique list URL different from the dirname of the download URL:
========= =======================================================
GitHub https://github.com/<repo>/<name>/releases
GitLab https://gitlab.\*/<repo>/<name>/tags
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
PyPI https://pypi.org/simple/<name>/
LuaRocks https://luarocks.org/modules/<repo>/<name>
========= =======================================================
Note: this function is called by `spack versions`, `spack checksum`,
and `spack create`, but not by `spack fetch` or `spack install`.
Parameters:
url (str): The download URL for the package
Returns:
set: One or more list URLs for the package
"""
url_types = [
# GitHub
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
# GitLab API endpoint
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
(
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
),
# GitLab non-API endpoint
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
# BitBucket
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
# CRAN
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
(
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
lambda m: m.group(1) + "/Archive/" + m.group(2),
),
# PyPI
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
(
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
),
# LuaRocks
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
(
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
lambda m: "https://luarocks.org/modules/"
+ m.group("org")
+ "/"
+ m.group("name")
+ "/",
),
]
list_urls = {os.path.dirname(url)}
for pattern, fun in url_types:
match = re.search(pattern, url)
if match:
list_urls.add(fun(match))
return list_urls
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
"""Strips query and fragment from a url, then returns the base url and the suffix.
Args:
url: URL to be stripped
Raises:
ValueError: when there is any error parsing the URL
"""
components = urlsplit(url)
stripped = components[:3] + (None, None)
query, frag = components[3:5]
suffix = ""
if query:
suffix += "?" + query
if frag:
suffix += "#" + frag
return urlunsplit(stripped), suffix
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
returns the input URL and an empty string.
"""
match = SOURCEFORGE_RE.search(url)
if match is not None:
return match.groups()
return url, ""
def has_extension(path_or_url: str, ext: str) -> bool:
"""Returns true if the extension in input is present in path, false otherwise."""
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
if not ext.startswith(r"\."):
ext = rf"\.{ext}$"
if re.search(ext, prefix):
return True
return False
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
"""Tries to match an allowed archive extension to the input. Returns the first match,
or None if no match was found.
Raises:
ValueError: if the input is None
"""
if path_or_url is None:
raise ValueError("Can't call extension() on None")
for t in ALLOWED_ARCHIVE_TYPES:
if has_extension(path_or_url, t):
return t
return None
def remove_extension(path_or_url: str, *, extension: str) -> str:
"""Returns the input with the extension removed"""
suffix = rf"\.{extension}$"
return re.sub(suffix, "", path_or_url)
def check_and_remove_ext(path: str, *, extension: str) -> str:
"""Returns the input path with the extension removed, if the extension is present in path.
Otherwise, returns the input unchanged.
"""
if not has_extension(path, extension):
return path
path, _ = split_url_on_sourceforge_suffix(path)
return remove_extension(path, extension=extension)
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
"""If a path contains the extension in input, returns the path stripped of the extension.
Otherwise, returns the input path.
If extension is None, attempts to strip any allowed extension from path.
"""
if extension is None:
for t in ALLOWED_ARCHIVE_TYPES:
if has_extension(path_or_url, ext=t):
extension = t
break
else:
return path_or_url
return check_and_remove_ext(path_or_url, extension=extension)
def split_url_extension(url: str) -> Tuple[str, ...]:
"""Some URLs have a query string, e.g.:
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
In (1), the query string needs to be stripped to get at the
extension, but in (2) & (3), the filename is IN a single final query
argument.
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
The suffix contains anything that was stripped off the URL to
get at the file extension. In (1), it will be ``'?raw=true'``, but
in (2), it will be empty. In (3) the suffix is a parameter that follows
after the file extension, e.g.:
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
"""
# Strip off sourceforge download suffix.
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
prefix, suffix = split_url_on_sourceforge_suffix(url)
ext = extension_from_path(prefix)
if ext is not None:
prefix = strip_extension(prefix)
return prefix, ext, suffix
try:
prefix, suf = strip_query_and_fragment(prefix)
except ValueError:
# FIXME: tty.debug("Got error parsing path %s" % path)
# Ignore URL parse errors here
return url, ""
ext = extension_from_path(prefix)
prefix = strip_extension(prefix)
suffix = suf + suffix
if ext is None:
ext = ""
return prefix, ext, suffix
def strip_version_suffixes(path_or_url: str) -> str:
"""Some tarballs contain extraneous information after the version:
* ``bowtie2-2.2.5-source``
* ``libevent-2.0.21-stable``
* ``cuda_8.0.44_linux.run``
These strings are not part of the version number and should be ignored.
This function strips those suffixes off and returns the remaining string.
The goal is that the version is always the last thing in ``path``:
* ``bowtie2-2.2.5``
* ``libevent-2.0.21``
* ``cuda_8.0.44``
Args:
path_or_url: The filename or URL for the package
Returns:
The ``path`` with any extraneous suffixes removed
"""
# NOTE: This could be done with complicated regexes in parse_version_offset
# NOTE: The problem is that we would have to add these regexes to the end
# NOTE: of every single version regex. Easier to just strip them off
# NOTE: permanently
suffix_regexes = [
# Download type
r"[Ii]nstall",
r"all",
r"code",
r"[Ss]ources?",
r"file",
r"full",
r"single",
r"with[a-zA-Z_-]+",
r"rock",
r"src(_0)?",
r"public",
r"bin",
r"binary",
r"run",
r"[Uu]niversal",
r"jar",
r"complete",
r"dynamic",
r"oss",
r"gem",
r"tar",
r"sh",
# Download version
r"release",
r"bin",
r"stable",
r"[Ff]inal",
r"rel",
r"orig",
r"dist",
r"\+",
# License
r"gpl",
# Arch
# Needs to come before and after OS, appears in both orders
r"ia32",
r"intel",
r"amd64",
r"linux64",
r"x64",
r"64bit",
r"x86[_-]64",
r"i586_64",
r"x86",
r"i[36]86",
r"ppc64(le)?",
r"armv?(7l|6l|64)",
# Other
r"cpp",
r"gtk",
r"incubating",
# OS
r"[Ll]inux(_64)?",
r"LINUX",
r"[Uu]ni?x",
r"[Ss]un[Oo][Ss]",
r"[Mm]ac[Oo][Ss][Xx]?",
r"[Oo][Ss][Xx]",
r"[Dd]arwin(64)?",
r"[Aa]pple",
r"[Ww]indows",
r"[Ww]in(64|32)?",
r"[Cc]ygwin(64|32)?",
r"[Mm]ingw",
r"centos",
# Arch
# Needs to come before and after OS, appears in both orders
r"ia32",
r"intel",
r"amd64",
r"linux64",
r"x64",
r"64bit",
r"x86[_-]64",
r"i586_64",
r"x86",
r"i[36]86",
r"ppc64(le)?",
r"armv?(7l|6l|64)?",
# PyPI
r"[._-]py[23].*\.whl",
r"[._-]cp[23].*\.whl",
r"[._-]win.*\.exe",
]
for regex in suffix_regexes:
# Remove the suffix from the end of the path
# This may be done multiple times
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
return path_or_url
def expand_contracted_extension(extension: str) -> str:
"""Returns the expanded version of a known contracted extension.
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
return the input unmodified.
"""
extension = extension.strip(".")
return CONTRACTION_MAP.get(extension, extension)
def expand_contracted_extension_in_path(
path_or_url: str, *, extension: Optional[str] = None
) -> str:
"""Returns the input path or URL with any contraction extension expanded.
Args:
path_or_url: path or URL to be expanded
extension: if specified, only attempt to expand that extension
"""
extension = extension or extension_from_path(path_or_url)
if extension is None:
return path_or_url
expanded = expand_contracted_extension(extension)
if expanded != extension:
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
return path_or_url
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
"""Returns compression extension for a compressed archive"""
extension = expand_contracted_extension(extension)
for ext in [*EXTENSIONS]:
if ext in extension:
return ext
return None
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
"""Strips the compression extension from the input, and returns it. For instance,
"foo.tgz" becomes "foo.tar".
If no extension is given, try a default list of extensions.
Args:
path_or_url: input to be stripped
ext: if given, extension to be stripped
"""
if not extension_from_path(path_or_url):
return path_or_url
expanded_path = expand_contracted_extension_in_path(path_or_url)
candidates = [ext] if ext is not None else EXTENSIONS
for current_extension in candidates:
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
if modified_path != expanded_path:
return modified_path
return expanded_path
def allowed_archive(path_or_url: str) -> bool:
"""Returns true if the input is a valid archive, False otherwise."""
return (
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
)
def determine_url_file_extension(path: str) -> str:
"""This returns the type of archive a URL refers to. This is
sometimes confusing because of URLs like:
(1) https://github.com/petdance/ack/tarball/1.93_02
Where the URL doesn't actually contain the filename. We need
to know what type it is so that we can appropriately name files
in mirrors.
"""
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
if match:
if match.group(1) == "zip":
return "zip"
elif match.group(1) == "tar":
return "tar.gz"
prefix, ext, suffix = split_url_extension(path)
return ext

View File

@@ -11,7 +11,6 @@
import itertools
import numbers
import os
import pathlib
import posixpath
import re
import shutil
@@ -19,13 +18,11 @@
import sys
import tempfile
from contextlib import contextmanager
from itertools import accumulate
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
import llnl.util.symlink
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from llnl.util.symlink import islink, symlink
from spack.util.executable import Executable, which
from spack.util.path import path_to_os_path, system_path_filter
@@ -104,7 +101,7 @@ def _nop(args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (islink(src) and islink(dst))
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
@@ -172,7 +169,7 @@ def rename(src, dst):
if sys.platform == "win32":
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or islink(dst):
if os.path.exists(dst) or os.path.islink(dst):
os.remove(dst)
os.rename(src, dst)
@@ -569,7 +566,7 @@ def set_install_permissions(path):
# If this points to a file maintained in a Spack prefix, it is assumed that
# this function will be invoked on the target. If the file is outside a
# Spack-maintained prefix, the permissions should not be modified.
if islink(path):
if os.path.islink(path):
return
if os.path.isdir(path):
os.chmod(path, 0o755)
@@ -638,7 +635,7 @@ def chmod_x(entry, perms):
@system_path_filter
def copy_mode(src, dest):
"""Set the mode of dest to that of src unless it is a link."""
if islink(dest):
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
@@ -724,12 +721,26 @@ def install(src, dest):
copy(src, dest, _permissions=True)
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = os.readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
@system_path_filter
def copy_tree(
src: str,
dest: str,
symlinks: bool = True,
allow_broken_symlinks: bool = sys.platform != "win32",
ignore: Optional[Callable[[str], bool]] = None,
_permissions: bool = False,
):
@@ -752,8 +763,6 @@ def copy_tree(
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception. Defaults to true on unix.
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
@@ -761,8 +770,6 @@ def copy_tree(
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if allow_broken_symlinks and sys.platform == "win32":
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
if _permissions:
tty.debug("Installing {0} to {1}".format(src, dest))
else:
@@ -776,11 +783,6 @@ def copy_tree(
if not files:
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
# symlinks at the end.
links = []
for src in files:
abs_src = os.path.abspath(src)
if not abs_src.endswith(os.path.sep):
@@ -803,7 +805,7 @@ def copy_tree(
ignore=ignore,
follow_nonexisting=True,
):
if islink(s):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = os.readlink(s)
@@ -817,9 +819,7 @@ def escaped_path(path):
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
target = new_target
links.append((target, d, s))
continue
symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
@@ -834,17 +834,9 @@ def escaped_path(path):
set_install_permissions(d)
copy_mode(s, d)
for target, d, s in links:
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
@system_path_filter
def install_tree(
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
):
def install_tree(src, dest, symlinks=True, ignore=None):
"""Recursively install an entire directory tree rooted at *src*.
Same as :py:func:`copy_tree` with the addition of setting proper
@@ -855,21 +847,12 @@ def install_tree(
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (typing.Callable): function indicating which files to ignore
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception.
Raises:
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(
src,
dest,
symlinks=symlinks,
allow_broken_symlinks=allow_broken_symlinks,
ignore=ignore,
_permissions=True,
)
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@system_path_filter
@@ -1273,12 +1256,7 @@ def traverse_tree(
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (typing.Callable): function indicating which files to ignore. This will also
ignore symlinks if they point to an ignored file (regardless of whether the symlink
is explicitly ignored); note this only supports one layer of indirection (i.e. if
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
but not x). To avoid this, make sure the ignore function also ignores the symlink
paths too.
ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
@@ -1305,24 +1283,11 @@ def traverse_tree(
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# If the source path is a link and the link's source is ignored, then ignore the link too,
# but only do this if the ignore is defined.
if ignore is not None:
if islink(source_child) and not follow_links:
target = readlink(source_child)
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
if any(map(ignore, all_parents)):
tty.warn(
f"Skipping {source_path} because the source or a part of the source's "
f"path is included in the ignores."
)
continue
# Treat as a directory
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
@@ -1348,11 +1313,7 @@ def traverse_tree(
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
if sys.platform == "win32":
if not os.path.lexists(path):
return False, False, False
return os.path.lexists(path), islink(path), os.path.isdir(path)
number of stat calls."""
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
@@ -1567,7 +1528,7 @@ def remove_if_dead_link(path):
Parameters:
path (str): The potential dead link
"""
if islink(path) and not os.path.exists(path):
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
@@ -1626,7 +1587,7 @@ def remove_linked_tree(path):
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
if os.path.exists(path):
if islink(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
@@ -2427,7 +2388,7 @@ def library_dependents(self):
"""
Set of directories where package binaries/libraries are located.
"""
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
def add_library_dependent(self, *dest):
"""
@@ -2440,9 +2401,9 @@ def add_library_dependent(self, *dest):
"""
for pth in dest:
if os.path.isfile(pth):
self._additional_library_dependents.add(pathlib.Path(pth).parent)
self._additional_library_dependents.add(os.path.dirname)
else:
self._additional_library_dependents.add(pathlib.Path(pth))
self._additional_library_dependents.add(pth)
@property
def rpaths(self):
@@ -2455,7 +2416,7 @@ def rpaths(self):
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
for extra_path in self._addl_rpaths:
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
return set([pathlib.Path(x) for x in dependent_libs])
return set(dependent_libs)
def add_rpath(self, *paths):
"""
@@ -2471,7 +2432,7 @@ def add_rpath(self, *paths):
"""
self._addl_rpaths = self._addl_rpaths | set(paths)
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
def _link(self, path, dest_dir):
"""Perform link step of simulated rpathing, installing
simlinks of file in path to the dest_dir
location. This method deliberately prevents
@@ -2479,35 +2440,27 @@ def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
This is because it is both meaningless from an rpath
perspective, and will cause an error when Developer
mode is not enabled"""
def report_already_linked():
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(str(dest_file))
tty.debug(
"Linking library %s to %s failed, " % (str(path), str(dest_file))
+ "already linked."
if already_linked
else "library with name %s already exists at location %s."
% (str(file_name), str(dest_dir))
)
file_name = path.name
dest_file = dest_dir / file_name
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
file_name = os.path.basename(path)
dest_file = os.path.join(dest_dir, file_name)
if os.path.exists(dest_dir) and not dest_file == path:
try:
symlink(str(path), str(dest_file))
symlink(path, dest_file)
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
# Catch OSErrors missed by the SymlinkError checks
except OSError as e:
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
report_already_linked()
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)
tty.debug(
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
if already_linked
else "library with name %s already exists at location %s."
% (file_name, dest_dir)
)
pass
else:
raise e
# catch errors we raise ourselves from Spack
except llnl.util.symlink.AlreadyExistsError:
report_already_linked()
def establish_link(self):
"""
@@ -2740,7 +2693,7 @@ def remove_directory_contents(dir):
"""Remove all contents of a directory."""
if os.path.exists(dir):
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
if os.path.isfile(entry) or islink(entry):
if os.path.isfile(entry) or os.path.islink(entry):
os.unlink(entry)
else:
shutil.rmtree(entry)

View File

@@ -2,190 +2,77 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import os
import re
import shutil
import subprocess
import sys
import tempfile
from os.path import exists, join
from llnl.util import lang, tty
from spack.error import SpackError
from spack.util.path import system_path_filter
from llnl.util import lang
if sys.platform == "win32":
from win32file import CreateHardLink
is_windows = sys.platform == "win32"
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
def symlink(real_path, link_path):
"""
Create a link.
Create a symbolic link.
On non-Windows and Windows with System Administrator
privleges this will be a normal symbolic link via
os.symlink.
On Windows without privledges the link will be a
junction for a directory and a hardlink for a file.
On Windows the various link types are:
Symbolic Link: A link to a file or directory on the
same or different volume (drive letter) or even to
a remote file or directory (using UNC in its path).
Need System Administrator privileges to make these.
Hard Link: A link to a file on the same volume (drive
letter) only. Every file (file's data) has at least 1
hard link (file's name). But when this method creates
a new hard link there will be 2. Deleting all hard
links effectively deletes the file. Don't need System
Administrator privileges.
Junction: A link to a directory on the same or different
volume (drive letter) but not to a remote directory. Don't
need System Administrator privileges.
Parameters:
source_path (str): The real file or directory that the link points to.
Must be absolute OR relative to the link.
link_path (str): The path where the link will exist.
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
doesn't exist. This will still raise an exception on Windows.
On Windows, use junctions if os.symlink fails.
"""
source_path = os.path.normpath(source_path)
win_source_path = source_path
link_path = os.path.normpath(link_path)
# Never allow broken links on Windows.
if sys.platform == "win32" and allow_broken_symlinks:
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
if not allow_broken_symlinks:
# Perform basic checks to make sure symlinking will succeed
if os.path.lexists(link_path):
raise AlreadyExistsError(
f"Link path ({link_path}) already exists. Cannot create link."
)
if not os.path.exists(source_path):
if os.path.isabs(source_path) and not allow_broken_symlinks:
# An absolute source path that does not exist will result in a broken link.
raise SymlinkError(
f"Source path ({source_path}) is absolute but does not exist. Resulting "
f"link would be broken so not making link."
)
else:
# os.symlink can create a link when the given source path is relative to
# the link path. Emulate this behavior and check to see if the source exists
# relative to the link path ahead of link creation to prevent broken
# links from being made.
link_parent_dir = os.path.dirname(link_path)
relative_path = os.path.join(link_parent_dir, source_path)
if os.path.exists(relative_path):
# In order to work on windows, the source path needs to be modified to be
# relative because hardlink/junction dont resolve relative paths the same
# way as os.symlink. This is ignored on other operating systems.
win_source_path = relative_path
elif not allow_broken_symlinks:
raise SymlinkError(
f"The source path ({source_path}) is not relative to the link path "
f"({link_path}). Resulting link would be broken so not making link."
)
# Create the symlink
if sys.platform == "win32" and not _windows_can_symlink():
_windows_create_link(win_source_path, link_path)
if sys.platform != "win32":
os.symlink(real_path, link_path)
elif _win32_can_symlink():
# Windows requires target_is_directory=True when the target is a dir.
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
else:
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
try:
# Try to use junctions
_win32_junction(real_path, link_path)
except OSError as e:
if e.errno == errno.EEXIST:
# EEXIST error indicates that file we're trying to "link"
# is already present, don't bother trying to copy which will also fail
# just raise
raise
else:
# If all else fails, fall back to copying files
shutil.copyfile(real_path, link_path)
def islink(path: str) -> bool:
"""Override os.islink to give correct answer for spack logic.
For Non-Windows: a link can be determined with the os.path.islink method.
Windows-only methods will return false for other operating systems.
For Windows: spack considers symlinks, hard links, and junctions to
all be links, so if any of those are True, return True.
Args:
path (str): path to check if it is a link.
Returns:
bool - whether the path is any kind link or not.
"""
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
def islink(path):
return os.path.islink(path) or _win32_is_junction(path)
def _windows_is_hardlink(path: str) -> bool:
"""Determines if a path is a windows hard link. This is accomplished
by looking at the number of links using os.stat. A non-hard-linked file
will have a st_nlink value of 1, whereas a hard link will have a value
larger than 1. Note that both the original and hard-linked file will
return True because they share the same inode.
# '_win32' functions based on
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
def _win32_junction(path, link):
# junctions require absolute paths
if not os.path.isabs(link):
link = os.path.abspath(link)
Args:
path (str): Windows path to check for a hard link
# os.symlink will fail if link exists, emulate the behavior here
if exists(link):
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
Returns:
bool - Whether the path is a hard link or not.
"""
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
return False
if not os.path.isabs(path):
parent = os.path.join(link, os.pardir)
path = os.path.join(parent, path)
path = os.path.abspath(path)
return os.stat(path).st_nlink > 1
def _windows_is_junction(path: str) -> bool:
"""Determines if a path is a windows junction. A junction can be
determined using a bitwise AND operation between the file's
attribute bitmask and the known junction bitmask (0x400).
Args:
path (str): A non-file path
Returns:
bool - whether the path is a junction or not.
"""
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
return False
import ctypes.wintypes
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
get_file_attributes.restype = ctypes.wintypes.DWORD
invalid_file_attributes = 0xFFFFFFFF
reparse_point = 0x400
file_attr = get_file_attributes(str(path))
if file_attr == invalid_file_attributes:
return False
return file_attr & reparse_point > 0
CreateHardLink(link, path)
@lang.memoized
def _windows_can_symlink() -> bool:
"""
Determines if windows is able to make a symlink depending on
the system configuration and the level of the user's permissions.
"""
if sys.platform != "win32":
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
return False
def _win32_can_symlink():
tempdir = tempfile.mkdtemp()
dpath = os.path.join(tempdir, "dpath")
fpath = os.path.join(tempdir, "fpath.txt")
dpath = join(tempdir, "dpath")
fpath = join(tempdir, "fpath.txt")
dlink = os.path.join(tempdir, "dlink")
flink = os.path.join(tempdir, "flink.txt")
dlink = join(tempdir, "dlink")
flink = join(tempdir, "flink.txt")
import llnl.util.filesystem as fs
@@ -209,140 +96,24 @@ def _windows_can_symlink() -> bool:
return can_symlink_directories and can_symlink_files
def _windows_create_link(source: str, link: str):
def _win32_is_junction(path):
"""
Attempts to create a Hard Link or Junction as an alternative
to a symbolic link. This is called when symbolic links cannot
be created.
Determines if a path is a win32 junction
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
elif os.path.isdir(source):
_windows_create_junction(source=source, link=link)
elif os.path.isfile(source):
_windows_create_hard_link(path=source, link=link)
else:
raise SymlinkError(
f"Cannot create link from {source}. It is neither a file nor a directory."
)
if os.path.islink(path):
return False
if sys.platform == "win32":
import ctypes.wintypes
def _windows_create_junction(source: str, link: str):
"""Duly verify that the path and link are eligible to create a junction,
then create the junction.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
elif not os.path.exists(source):
raise SymlinkError("Source path does not exist, cannot create a junction.")
elif os.path.lexists(link):
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
elif not os.path.isdir(source):
raise SymlinkError("Source path is not a directory, cannot create a junction.")
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
GetFileAttributes.restype = ctypes.wintypes.DWORD
import subprocess
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
cmd = ["cmd", "/C", "mklink", "/J", link, source]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
tty.debug(out.decode())
if proc.returncode != 0:
err = err.decode()
tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err)
res = GetFileAttributes(path)
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
def _windows_create_hard_link(path: str, link: str):
"""Duly verify that the path and link are eligible to create a hard
link, then create the hard link.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
elif not os.path.exists(path):
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
elif os.path.lexists(link):
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
elif not os.path.isfile(path):
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
else:
tty.debug(f"Creating hard link {link} pointing to {path}")
CreateHardLink(link, path)
def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path):
return _windows_read_hard_link(path)
elif _windows_is_junction(path):
return _windows_read_junction(path)
else:
return os.readlink(path)
def _windows_read_hard_link(link: str) -> str:
"""Find all of the files that point to the same inode as the link"""
if sys.platform != "win32":
raise SymlinkError("Can't read hard link on non-Windows OS.")
link = os.path.abspath(link)
fsutil_cmd = ["fsutil", "hardlink", "list", link]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
# fsutil response does not include the drive name, so append it back to each linked file.
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
links.remove(link)
if len(links) == 1:
return links.pop()
elif len(links) > 1:
# TODO: How best to handle the case where 3 or more paths point to a single inode?
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
else:
raise SymlinkError("Cannot determine hard link source path.")
def _windows_read_junction(link: str):
"""Find the path that a junction points to."""
if sys.platform != "win32":
raise SymlinkError("Can't read junction on non-Windows OS.")
link = os.path.abspath(link)
link_basename = os.path.basename(link)
link_parent = os.path.dirname(link)
fsutil_cmd = ["dir", "/a:l", link_parent]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
if matches:
return matches.group(1)
else:
raise SymlinkError("Could not find junction path.")
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
class SymlinkError(SpackError):
"""Exception class for errors raised while creating symlinks,
junctions and hard links
"""
class AlreadyExistsError(SymlinkError):
"""Link path already exists."""
return False

View File

@@ -9,6 +9,7 @@
import io
import itertools
import json
import multiprocessing.pool
import os
import re
import shutil
@@ -48,7 +49,6 @@
import spack.util.gpg
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
from spack.caches import misc_cache_location
@@ -876,18 +876,32 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
db: A spack database used for adding specs and then writing the index.
temp_dir (str): Location to write index.json and hash for pushing
concurrency (int): Number of parallel processes to use when fetching
"""
for file in file_list:
contents = read_method(file)
# Need full spec.json name or this gets confused with index.json.
if file.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(contents)
fetched_spec = Spec.from_dict(specfile_json)
elif file.endswith(".json"):
fetched_spec = Spec.from_json(contents)
else:
continue
Return:
None
"""
def _fetch_spec_from_mirror(spec_url):
spec_file_contents = read_method(spec_url)
if spec_file_contents:
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
return Spec.from_dict(specfile_json)
if spec_url.endswith(".json"):
return Spec.from_json(spec_file_contents)
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
fetched_specs = tp.map(
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
)
finally:
tp.terminate()
tp.join()
for fetched_spec in fetched_specs:
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)
@@ -1799,11 +1813,10 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
m.linkname = m.linkname[result.end() :]
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
def extract_tarball(spec, download_result, unsigned=False, force=False):
"""
extract binary tarball for given package into install area
"""
timer.start("extract")
if os.path.exists(spec.prefix):
if force:
shutil.rmtree(spec.prefix)
@@ -1883,9 +1896,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
os.remove(tarfile_path)
os.remove(specfile_path)
timer.stop("extract")
timer.start("relocate")
try:
relocate_package(spec)
except Exception as e:
@@ -1906,7 +1917,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
if os.path.exists(filename):
os.remove(filename)
_delete_staged_downloads(download_result)
timer.stop("relocate")
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:

View File

@@ -476,16 +476,16 @@ def ensure_executables_in_path_or_raise(
def _add_externals_if_missing() -> None:
search_list = [
# clingo
"cmake",
"bison",
spack.repo.PATH.get_pkg_class("cmake"),
spack.repo.PATH.get_pkg_class("bison"),
# GnuPG
"gawk",
spack.repo.PATH.get_pkg_class("gawk"),
# develop deps
"git",
spack.repo.PATH.get_pkg_class("git"),
]
if IS_WINDOWS:
search_list.append("winbison")
externals = spack.detection.by_path(search_list)
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
externals = spack.detection.by_executable(search_list)
# System git is typically deprecated, so mark as non-buildable to force it as external
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)

View File

@@ -15,9 +15,9 @@
from llnl.util import tty
import spack.build_environment
import spack.environment
import spack.tengine
import spack.util.cpus
import spack.util.executable
from spack.environment import depfile
@@ -137,7 +137,7 @@ def _install_with_depfile(self) -> None:
"-C",
str(self.environment_root()),
"-j",
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
**kwargs,
)

View File

@@ -68,7 +68,7 @@
from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.installer import InstallError
from spack.util.cpus import determine_number_of_jobs
from spack.util.cpus import cpus_available
from spack.util.environment import (
SYSTEM_DIRS,
EnvironmentModifications,
@@ -537,6 +537,39 @@ def update_compiler_args_for_dep(dep):
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def determine_number_of_jobs(
parallel=False, command_line=None, config_default=None, max_cpus=None
):
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel (bool or None): true when package supports parallel builds
command_line (int or None): command line override
config_default (int or None): config default number of jobs
max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined.
"""
if not parallel:
return 1
if command_line is None and "command_line" in spack.config.scopes():
command_line = spack.config.get("config:build_jobs", scope="command_line")
if command_line is not None:
return command_line
max_cpus = max_cpus or cpus_available()
# in some rare cases _builtin config may not be set, so default to max 16
config_default = config_default or spack.config.get("config:build_jobs", 16)
return min(max_cpus, config_default)
def set_module_variables_for_package(pkg):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.

View File

@@ -274,6 +274,7 @@ def std_args(pkg, generator=None):
generator,
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
define("CMAKE_BUILD_TYPE", build_type),
define("BUILD_TESTING", pkg.run_tests),
]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
@@ -450,6 +451,7 @@ def cmake_args(self):
* CMAKE_INSTALL_PREFIX
* CMAKE_BUILD_TYPE
* BUILD_TESTING
which will be set automatically.
"""

View File

@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")

View File

@@ -95,7 +95,7 @@ def makefile_root(self):
return self.stage.source_path
@property
def makefile_name(self):
def nmakefile_name(self):
"""Name of the current makefile. This is currently an empty value.
If a project defines this value, it will be used with the /f argument
to provide nmake an explicit makefile. This is usefule in scenarios where
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
"""Run "nmake" on the build targets specified by the builder."""
opts = self.std_nmake_args
opts += self.nmake_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
if self.nmakefile_name:
opts.append("/f {}".format(self.nmakefile_name))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
opts = self.std_nmake_args
opts += self.nmake_args()
opts += self.nmake_install_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
if self.nmakefile_name:
opts.append("/f {}".format(self.nmakefile_name))
opts.append(self.define("PREFIX", prefix))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(

View File

@@ -16,7 +16,6 @@
import spack.builder
import spack.config
import spack.deptypes as dt
import spack.detection
import spack.multimethod
import spack.package_base
@@ -227,48 +226,7 @@ def update_external_dependencies(self, extendee_spec=None):
python.external_path = self.spec.external_path
python._mark_concrete()
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
def get_external_python_for_prefix(self):
"""
For an external package that extends python, find the most likely spec for the python
it depends on.
First search: an "installed" external that shares a prefix with this package
Second search: a configured external that shares a prefix with this package
Third search: search this prefix for a python package
Returns:
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
"""
python_externals_installed = [
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
]
if python_externals_installed:
return python_externals_installed[0]
python_external_config = spack.config.get("packages:python:externals", [])
python_externals_configured = [
spack.spec.parse_with_version_concrete(item["spec"])
for item in python_external_config
if item["prefix"] == self.spec.external_path
]
if python_externals_configured:
return python_externals_configured[0]
python_externals_detection = spack.detection.by_path(
["python"], path_hints=[self.spec.external_path]
)
python_externals_detected = [
d.spec
for d in python_externals_detection.get("python", [])
if d.prefix == self.spec.external_path
]
if python_externals_detected:
return python_externals_detected[0]
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
class PythonPackage(PythonExtension):
@@ -315,6 +273,47 @@ def list_url(cls):
name = cls.pypi.split("/")[0]
return "https://pypi.org/simple/" + name + "/"
def get_external_python_for_prefix(self):
"""
For an external package that extends python, find the most likely spec for the python
it depends on.
First search: an "installed" external that shares a prefix with this package
Second search: a configured external that shares a prefix with this package
Third search: search this prefix for a python package
Returns:
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
"""
python_externals_installed = [
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
]
if python_externals_installed:
return python_externals_installed[0]
python_external_config = spack.config.get("packages:python:externals", [])
python_externals_configured = [
spack.spec.parse_with_version_concrete(item["spec"])
for item in python_external_config
if item["prefix"] == self.spec.external_path
]
if python_externals_configured:
return python_externals_configured[0]
python_externals_detection = spack.detection.by_executable(
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
)
python_externals_detected = [
d.spec
for d in python_externals_detection.get("python", [])
if d.prefix == self.spec.external_path
]
if python_externals_detected:
return python_externals_detected[0]
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
@property
def headers(self):
"""Discover header files in platlib."""

View File

@@ -10,10 +10,9 @@
import llnl.util.tty as tty
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import build_system, extends, maintainers
from spack.package_base import PackageBase
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
@@ -93,7 +92,7 @@ def install(self, pkg, spec, prefix):
"--copy",
"-i",
"-j",
str(determine_number_of_jobs(parallel=parallel)),
str(determine_number_of_jobs(parallel)),
"--",
os.getcwd(),
]

View File

@@ -308,7 +308,7 @@ def append_dep(s, d):
dependencies.append({"spec": s, "depends": d})
for spec in spec_list:
for s in spec.traverse(deptype="all"):
for s in spec.traverse(deptype=all):
if s.external:
tty.msg("Will not stage external pkg: {0}".format(s))
continue
@@ -316,7 +316,7 @@ def append_dep(s, d):
skey = _spec_deps_key(s)
spec_labels[skey] = s
for d in s.dependencies(deptype="all"):
for d in s.dependencies(deptype=all):
dkey = _spec_deps_key(d)
if d.external:
tty.msg("Will not stage external dep: {0}".format(d))
@@ -1029,18 +1029,13 @@ def main_script_replacements(cmd):
job_vars = job_object.setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
job_object["needs"] = []
if spec_label in dependencies:
if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all
# available in the artifacts buildcache.
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.

View File

@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture if architecture else "no arch",
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
f"{compiler.display_str}" if compiler else "no compiler",
)

View File

@@ -20,7 +20,6 @@
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.error
import spack.mirror
import spack.relocate
import spack.repo
@@ -79,11 +78,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
"Alternatively, one can decide to build a cache for only the package or only the "
"dependencies",
)
push.add_argument(
"--fail-fast",
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn)
@@ -302,7 +296,6 @@ def push_fn(args):
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
failed = []
# tty printing
color = clr.get_color_when()
@@ -333,17 +326,11 @@ def push_fn(args):
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
raise
failed.append((format_spec(spec), e))
if skipped:
if len(specs) == 1:
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
elif len(skipped) == len(specs):
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
else:
tty.info(
"The following {} specs were skipped as they already exist in the buildcache:\n"
@@ -353,17 +340,6 @@ def push_fn(args):
)
)
if failed:
if len(failed) == 1:
raise failed[0][1]
raise spack.error.SpackError(
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
"\n".join(
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
),
)
def install_fn(args):
"""install from a binary package"""

View File

@@ -19,7 +19,6 @@
import spack.hash_types as ht
import spack.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
@@ -254,8 +253,6 @@ def ci_rebuild(args):
check a single spec against the remote mirror, and rebuild it from source if the mirror does
not contain the hash
"""
rebuild_timer = timer.Timer()
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
# Make sure the environment is "gitlab-enabled", or else there's nothing
@@ -739,14 +736,6 @@ def ci_rebuild(args):
print(reproduce_msg)
rebuild_timer.stop()
try:
with open("install_timers.json", "w") as timelog:
extra_attributes = {"name": ".ci-rebuild"}
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
# Tie job success/failure to the success/failure of building the spec
return install_exit_code

View File

@@ -812,9 +812,6 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)

View File

@@ -12,7 +12,7 @@
import spack.cmd
import spack.config
import spack.deptypes as dt
import spack.dependency as dep
import spack.environment as ev
import spack.mirror
import spack.modules
@@ -114,13 +114,16 @@ def __call__(self, parser, namespace, jobs, option_string):
class DeptypeAction(argparse.Action):
"""Creates a flag of valid dependency types from a deptype argument."""
"""Creates a tuple of valid dependency types from a deptype argument."""
def __call__(self, parser, namespace, values, option_string=None):
if not values or values == "all":
deptype = dt.ALL
else:
deptype = dt.canonicalize(values.split(","))
deptype = dep.all_deptypes
if values:
deptype = tuple(x.strip() for x in values.split(","))
if deptype == ("all",):
deptype = "all"
deptype = dep.canonical_deptype(deptype)
setattr(namespace, self.dest, deptype)
@@ -282,8 +285,9 @@ def deptype():
return Args(
"--deptype",
action=DeptypeAction,
default=dt.ALL,
help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
default=dep.all_deptypes,
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
% ",".join(dep.all_deptypes),
)

View File

@@ -10,7 +10,6 @@
import spack.build_environment as build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.error
import spack.paths
import spack.spec
@@ -47,9 +46,9 @@ def __init__(self, context="build"):
raise ValueError("context can only be build or test")
if context == "build":
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
self.direct_deps = ("build", "link", "run")
else:
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
self.direct_deps = ("build", "test", "link", "run")
self.has_uninstalled_deps = False
@@ -72,8 +71,8 @@ def accept(self, item):
def neighbors(self, item):
# Direct deps: follow build & test edges.
# Transitive deps: follow link / run.
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
return item.edge.spec.edges_to_dependencies(depflag=depflag)
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
def emulate_env_utility(cmd_name, context, args):

View File

@@ -185,7 +185,7 @@ def compiler_list(args):
os_str = os
if target:
os_str += "-%s" % target
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.spec.display_str for c in compilers)))

View File

@@ -822,7 +822,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = spack.url.find_versions_of_archive(args.url)
url_dict = spack.util.web.find_versions_of_archive(args.url)
except UndetectableVersionError:
# Use fake versions
tty.warn("Couldn't detect version in: {0}".format(args.url))

View File

@@ -74,7 +74,7 @@ def dependencies(parser, args):
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
depflag=args.deptype,
deptype=args.deptype,
)
if spec.name in dependencies:

View File

@@ -239,13 +239,6 @@ def env_deactivate_setup_parser(subparser):
const="bat",
help="print bat commands to activate the environment",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to activate the environment",
)
def env_deactivate(args):

View File

@@ -6,7 +6,6 @@
import errno
import os
import sys
from typing import List, Optional
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -55,7 +54,7 @@ def setup_parser(subparser):
find_parser.add_argument(
"--all", action="store_true", help="search for all packages that Spack knows about"
)
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
find_parser.epilog = (
'The search is by default on packages tagged with the "build-tools" or '
@@ -121,23 +120,46 @@ def external_find(args):
else:
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
# Outside the Cray manifest, the search is done by tag for performance reasons,
# since tags are cached.
# If the user specified both --all and --tag, then --all has precedence
if args.all or args.packages:
# Each detectable package has at least the detectable tag
args.tags = ["detectable"]
elif not args.tags:
# If the user didn't specify anything, search for build tools by default
# If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages:
args.tags = ["core-packages", "build-tools"]
candidate_packages = packages_to_search_for(
names=args.packages, tags=args.tags, exclude=args.exclude
)
detected_packages = spack.detection.by_path(
candidate_packages, path_hints=args.path, max_workers=args.jobs
)
# If the user specified both --all and --tag, then --all has precedence
if args.all and args.tags:
args.tags = []
# Construct the list of possible packages to be detected
pkg_cls_to_check = []
# Add the packages that have been required explicitly
if args.packages:
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
if args.tags:
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
if args.tags and not pkg_cls_to_check:
# If we arrived here we didn't have any explicit package passed
# as argument, which means to search all packages.
# Since tags are cached it's much faster to construct what we need
# to search directly, rather than filtering after the fact
pkg_cls_to_check = [
spack.repo.PATH.get_pkg_class(pkg_name)
for tag in args.tags
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
]
pkg_cls_to_check = list(set(pkg_cls_to_check))
# If the list of packages is empty, search for every possible package
if not args.tags and not pkg_cls_to_check:
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
# If the user specified any packages to exclude from external find, add them here
if args.exclude:
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
@@ -151,19 +173,6 @@ def external_find(args):
tty.msg("No new external packages detected")
def packages_to_search_for(
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
):
result = []
for current_tag in tags:
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
if names:
result = [x for x in result if x in names]
if exclude:
result = [x for x in result if x not in exclude]
return result
def external_read_cray_manifest(args):
_collect_and_consume_cray_manifest_files(
manifest_file=args.file,

View File

@@ -74,19 +74,19 @@ def graph(parser, args):
if args.static:
args.dot = True
static_graph_dot(specs, depflag=args.deptype)
static_graph_dot(specs, deptype=args.deptype)
return
if args.dot:
builder = SimpleDAG()
if args.color:
builder = DAGWithDependencyTypes()
graph_dot(specs, builder=builder, depflag=args.deptype)
graph_dot(specs, builder=builder, deptype=args.deptype)
return
# ascii is default: user doesn't need to provide it explicitly
debug = spack.config.get("config:debug")
graph_ascii(specs[0], debug=debug, depflag=args.deptype)
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
for spec in specs[1:]:
print() # extra line bt/w independent graphs
graph_ascii(spec, debug=debug)

View File

@@ -11,7 +11,6 @@
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.fetch_strategy as fs
import spack.install_test
import spack.repo
@@ -65,11 +64,11 @@ def section_title(s):
def version(s):
return spack.spec.VERSION_COLOR + s + plain_format
return spack.spec.version_color + s + plain_format
def variant(s):
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
return spack.spec.enabled_variant_color + s + plain_format
class VariantFormatter:
@@ -161,7 +160,7 @@ def print_dependencies(pkg):
for deptype in ("build", "link", "run"):
color.cprint("")
color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:

View File

@@ -16,7 +16,7 @@
from llnl.util.tty.colify import colify
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
import spack.dependency
import spack.repo
from spack.version import VersionList
@@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
def get_dependencies(pkg):
all_deps = {}
for deptype in dt.ALL_TYPES:
deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
for deptype in spack.dependency.all_deptypes:
deps = pkg.dependencies_of_type(deptype)
all_deps[deptype] = [d for d in deps]
return all_deps
@@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
out.write("\n")
out.write("</dd>\n")
for deptype in dt.ALL_TYPES:
deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
for deptype in spack.dependency.all_deptypes:
deps = pkg_cls.dependencies_of_type(deptype)
if deps:
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
out.write("<dd>\n")

View File

@@ -52,13 +52,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"--first",

View File

@@ -443,7 +443,7 @@ def mirror_create(args):
)
# When no directory is provided, the source dir is used
path = args.directory or spack.caches.fetch_cache_location()
path = args.directory or spack.caches.FETCH_CACHE_location()
if args.all and not ev.active_environment():
create_mirror_for_all_specs(

View File

@@ -137,7 +137,7 @@ def solve(parser, args):
# these are the same options as `spack spec`
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt

View File

@@ -77,7 +77,7 @@ def setup_parser(subparser):
def spec(parser, args):
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt

View File

@@ -51,13 +51,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"-a", "--all", action="store_true", help="unload all loaded Spack packages"

View File

@@ -12,7 +12,6 @@
import spack.fetch_strategy as fs
import spack.repo
import spack.spec
import spack.url
import spack.util.crypto as crypto
from spack.url import (
UndetectableNameError,
@@ -27,6 +26,7 @@
substitution_offsets,
)
from spack.util.naming import simplify_name
from spack.util.web import find_versions_of_archive
description = "debugging tool for url parsing"
section = "developer"
@@ -139,7 +139,7 @@ def url_parse(args):
if args.spider:
print()
tty.msg("Spidering for versions:")
versions = spack.url.find_versions_of_archive(url)
versions = find_versions_of_archive(url)
if not versions:
print(" Found no versions for {0}".format(name))

View File

@@ -99,28 +99,6 @@ def cxx17_flag(self):
else:
return "-std=c++17"
@property
def cxx20_flag(self):
if self.real_version < Version("8.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++20 standard", "cxx20_flag", "< 8.0"
)
elif self.real_version < Version("11.0"):
return "-std=c++2a"
else:
return "-std=c++20"
@property
def cxx23_flag(self):
if self.real_version < Version("11.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++23 standard", "cxx23_flag", "< 11.0"
)
elif self.real_version < Version("14.0"):
return "-std=c++2b"
else:
return "-std=c++23"
@property
def c99_flag(self):
if self.real_version < Version("4.5"):

View File

@@ -29,90 +29,6 @@
}
class CmdCall:
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
def __init__(self, *cmds):
if not cmds:
raise RuntimeError(
"""Attempting to run commands from CMD without specifying commands.
Please add commands to be run."""
)
self._cmds = cmds
def __call__(self):
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
return out.decode("utf-16le", errors="replace") # novermin
@property
def cmd_line(self):
base_call = "cmd /u /c "
commands = " && ".join([x.command_str() for x in self._cmds])
# If multiple commands are being invoked by a single subshell
# they must be encapsulated by a double quote. Always double
# quote to be sure of proper handling
# cmd will properly resolve nested double quotes as needed
#
# `set`` writes out the active env to the subshell stdout,
# and in this context we are always trying to obtain env
# state so it should always be appended
return base_call + f'"{commands} && set"'
class VarsInvocation:
def __init__(self, script):
self._script = script
def command_str(self):
return f'"{self._script}"'
@property
def script(self):
return self._script
class VCVarsInvocation(VarsInvocation):
def __init__(self, script, arch, msvc_version):
super(VCVarsInvocation, self).__init__(script)
self._arch = arch
self._msvc_version = msvc_version
@property
def sdk_ver(self):
"""Accessor for Windows SDK version property
Note: This property may not be set by
the calling context and as such this property will
return an empty string
This property will ONLY be set if the SDK package
is a dependency somewhere in the Spack DAG of the package
for which we are constructing an MSVC compiler env.
Otherwise this property should be unset to allow the VCVARS
script to use its internal heuristics to determine appropriate
SDK version
"""
if getattr(self, "_sdk_ver", None):
return self._sdk_ver + ".0"
return ""
@sdk_ver.setter
def sdk_ver(self, val):
self._sdk_ver = val
@property
def arch(self):
return self._arch
@property
def vcvars_ver(self):
return f"-vcvars_ver={self._msvc_version}"
def command_str(self):
script = super(VCVarsInvocation, self).command_str()
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
@@ -159,48 +75,22 @@ class Msvc(Compiler):
# file based on compiler executable path.
def __init__(self, *args, **kwargs):
# This positional argument "paths" is later parsed and process by the base class
# via the call to `super` later in this method
paths = args[3]
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
args[3][:] = new_pth
super().__init__(*args, **kwargs)
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
env_cmds = []
compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
env_cmds.append(self.vcvars_call)
# Below is a check for a valid fortran path
# paths has c, cxx, fc, and f77 paths in that order
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
if os.getenv("ONEAPI_ROOT"):
# If this found, it sets all the vars
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
# env first
env_cmds.extend(
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
else:
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
@property
def msvc_version(self):
@@ -229,29 +119,15 @@ def platform_toolset_ver(self):
"""
return self.msvc_version[:2].joined.string[:3]
def _compiler_version(self, compiler):
"""Returns version object for given compiler"""
# ignore_errors below is true here due to ifx's
# non zero return code if it is not provided
# and input file
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(
compiler, version_arg=None, ignore_errors=True
),
).group(1)
)
@property
def cl_version(self):
"""Cl toolset version"""
return self._compiler_version(self.cc)
@property
def ifx_version(self):
"""Ifx compiler version associated with this version of MSVC"""
return self._compiler_version(self.fc)
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
).group(1)
)
@property
def vs_root(self):
@@ -270,12 +146,27 @@ def setup_custom_environment(self, pkg, env):
# output, sort into dictionary, use that to make the build
# environment.
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
sdk_ver = (
""
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
else pkg.spec["win-sdk"].version.string + ".0"
)
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} {} {} && set'.format(
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
out = self.msvc_compiler_environment()
int_env = dict(
(key, value)
for key, _, value in (line.partition("=") for line in out.splitlines())

View File

@@ -857,12 +857,12 @@ def add_from_file(filename, scope=None):
def add(fullpath, scope=None):
"""Add the given configuration to the specified config scope.
Add accepts a path. If you want to add from a filename, use add_from_file"""
components = process_config_path(fullpath)
has_existing_value = True
path = ""
override = False
value = syaml.load_config(components[-1])
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = "::" if override else ":" if path else ""
@@ -883,14 +883,14 @@ def add(fullpath, scope=None):
existing = get_valid_type(path)
# construct value from this point down
value = syaml.load_config(components[-1])
for component in reversed(components[idx + 1 : -1]):
value = {component: value}
break
if override:
path += "::"
if has_existing_value:
path, _, value = fullpath.rpartition(":")
value = syaml.load_config(value)
existing = get(path, scope=scope)
# append values to lists
@@ -1231,17 +1231,11 @@ def they_are(t):
return copy.copy(source)
#
# Process a path argument to config.set() that may contain overrides ('::' or
# trailing ':')
#
def process_config_path(path):
"""Process a path argument to config.set() that may contain overrides ('::' or
trailing ':')
Note: quoted value path components will be processed as a single value (escaping colons)
quoted path components outside of the value will be considered ill formed and will
raise.
e.g. `this:is:a:path:'value:with:colon'` will yield:
[this, is, a, path, value:with:colon]
"""
result = []
if path.startswith(":"):
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
@@ -1269,17 +1263,6 @@ def process_config_path(path):
front.append = True
result.append(front)
quote = "['\"]"
not_quote = "[^'\"]"
if re.match(f"^{quote}", path):
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
if not m:
raise ValueError("Quotes indicate value, but there are additional path entries")
result.append(m.group(1))
break
return result

View File

@@ -11,7 +11,6 @@
import llnl.util.tty as tty
import spack.cmd
import spack.deptypes as dt
import spack.error
import spack.hash_types as hash_types
import spack.platforms
@@ -159,13 +158,13 @@ def entries_to_specs(entries):
dependencies = entry["dependencies"]
for name, properties in dependencies.items():
dep_hash = properties["hash"]
depflag = dt.canonicalize(properties["type"])
deptypes = properties["type"]
if dep_hash in spec_dict:
if entry["hash"] not in spec_dict:
continue
parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
for spec in spec_dict.values():
spack.spec.reconstruct_virtuals_on_edges(spec)

View File

@@ -27,8 +27,6 @@
import time
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
import spack.deptypes as dt
try:
import uuid
@@ -91,7 +89,7 @@
#: Types of dependencies tracked by the database
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
_TRACKED_DEPENDENCIES = ht.dag_hash.depflag
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
#: Default list of fields written for each install record
DEFAULT_INSTALL_RECORD_FIELDS = (
@@ -797,7 +795,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
tty.warn(msg)
continue
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
def _read_from_file(self, filename):
"""Fill database from file, do not maintain old data.
@@ -1148,7 +1146,7 @@ def _add(
# Retrieve optional arguments
installation_time = installation_time or _now()
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
@@ -1156,7 +1154,7 @@ def _add(
# is missing a build dep; there's no need to install the
# build dep's build dep first, and there's no need to warn
# about it missing.
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
self._add(
edge.spec,
directory_layout,
@@ -1200,10 +1198,10 @@ def _add(
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
if not upstream:
record.ref_count += 1
@@ -1373,13 +1371,7 @@ def deprecate(self, spec, deprecator):
return self._deprecate(spec, deprecator)
@_autospec
def installed_relatives(
self,
spec,
direction="children",
transitive=True,
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
):
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
"""Return installed specs related to this one."""
if direction not in ("parents", "children"):
raise ValueError("Invalid direction: %s" % direction)

View File

@@ -3,11 +3,64 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's dependency relationships."""
from typing import Dict, List
from typing import Dict, List, Optional, Set, Tuple, Union
import spack.deptypes as dt
import spack.spec
#: The types of dependency relationships that Spack understands.
all_deptypes = ("build", "link", "run", "test")
#: Default dependency type if none is specified
default_deptype = ("build", "link")
#: Type hint for the arguments accepting a dependency type
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
def deptype_chars(*type_tuples: str) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged).
"""
types: Set[str] = set()
for t in type_tuples:
if t:
types.update(t)
return "".join(t[0] if t in types else " " for t in all_deptypes)
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, str):
if deptype not in all_deptypes:
raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)
elif isinstance(deptype, (tuple, list, set)):
bad = [d for d in deptype if d not in all_deptypes]
if bad:
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
return tuple(sorted(set(deptype)))
raise ValueError("Invalid dependency type: %s" % repr(deptype))
class Dependency:
"""Class representing metadata for a dependency on a package.
@@ -40,7 +93,7 @@ def __init__(
self,
pkg: "spack.package_base.PackageBase",
spec: "spack.spec.Spec",
depflag: dt.DepFlag = dt.DEFAULT,
type: Optional[Tuple[str, ...]] = default_deptype,
):
"""Create a new Dependency.
@@ -57,7 +110,11 @@ def __init__(
# This dict maps condition specs to lists of Patch objects, just
# as the patches dict on packages does.
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
self.depflag = depflag
if type is None:
self.type = set(default_deptype)
else:
self.type = set(type)
@property
def name(self) -> str:
@@ -67,7 +124,7 @@ def name(self) -> str:
def merge(self, other: "Dependency"):
"""Merge constraints, deptypes, and patches of other into self."""
self.spec.constrain(other.spec)
self.depflag |= other.depflag
self.type |= other.type
# concatenate patch lists, or just copy them in
for cond, p in other.patches.items():
@@ -78,5 +135,5 @@ def merge(self, other: "Dependency"):
self.patches[cond] = other.patches[cond]
def __repr__(self) -> str:
types = dt.flag_to_chars(self.depflag)
types = deptype_chars(*self.type)
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"

View File

@@ -1,123 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Data structures that represent Spack's edge types."""
from typing import Iterable, List, Tuple, Union
#: Type hint for the low-level dependency input (enum.Flag is too slow)
DepFlag = int
#: Type hint for the high-level dependency input
DepTypes = Union[str, List[str], Tuple[str, ...]]
#: Individual dependency types
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
# the order (link, run, build, test) when depending on the same package multiple times,
# and we rely on default integer comparison to sort dependency types.
# New dependency types should be appended.
LINK = 0b0001
RUN = 0b0010
BUILD = 0b0100
TEST = 0b1000
#: The types of dependency relationships that Spack understands.
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
#: Default dependency type if none is specified
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
#: A flag with all dependency types set
ALL: DepFlag = BUILD | LINK | RUN | TEST
#: Default dependency type if none is specified
DEFAULT: DepFlag = BUILD | LINK
#: An iterator of all flag components
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
def flag_from_string(s: str) -> DepFlag:
if s == "build":
return BUILD
elif s == "link":
return LINK
elif s == "run":
return RUN
elif s == "test":
return TEST
else:
raise ValueError(f"Invalid dependency type: {s}")
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
"""Transform an iterable of deptype strings into a flag."""
flag = 0
for deptype_str in deptype:
flag |= flag_from_string(deptype_str)
return flag
def canonicalize(deptype: DepTypes) -> DepFlag:
"""Convert deptype user input to a DepFlag, or raise ValueError.
Args:
deptype: string representing dependency type, or a list/tuple of such strings.
Can also be the builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
if deptype in ("all", all):
return ALL
if isinstance(deptype, str):
return flag_from_string(deptype)
if isinstance(deptype, (tuple, list, set)):
return flag_from_strings(deptype)
raise ValueError(f"Invalid dependency type: {deptype!r}")
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
deptype: List[DepType] = []
if x & BUILD:
deptype.append("build")
if x & LINK:
deptype.append("link")
if x & RUN:
deptype.append("run")
if x & TEST:
deptype.append("test")
return tuple(deptype)
def flag_to_string(x: DepFlag) -> DepType:
if x == BUILD:
return "build"
elif x == LINK:
return "link"
elif x == RUN:
return "run"
elif x == TEST:
return "test"
else:
raise ValueError(f"Invalid dependency type flag: {x}")
def flag_to_chars(depflag: DepFlag) -> str:
"""Create a string representing deptypes for many dependencies.
The string will be some subset of 'blrt', like 'bl ', 'b t', or
' lr ' where each letter in 'blrt' stands for 'build', 'link',
'run', and 'test' (the dependency types).
For a single dependency, this just indicates that the dependency has
the indicated deptypes. For a list of dependnecies, this shows
whether ANY dpeendency in the list has the deptypes (so the deptypes
are merged)."""
return "".join(
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
)

View File

@@ -3,11 +3,12 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from .common import DetectedPackage, executable_prefix, update_configuration
from .path import by_path, executables_in_path
from .path import by_executable, by_library, executables_in_path
__all__ = [
"DetectedPackage",
"by_path",
"by_library",
"by_executable",
"executables_in_path",
"executable_prefix",
"update_configuration",

View File

@@ -13,13 +13,13 @@
The module also contains other functions that might be useful across different
detection mechanisms.
"""
import collections
import glob
import itertools
import os
import os.path
import re
import sys
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.tty
@@ -29,28 +29,12 @@
import spack.util.spack_yaml
import spack.util.windows_registry
class DetectedPackage(NamedTuple):
"""Information on a package that has been detected."""
#: Spec that was detected
spec: spack.spec.Spec
#: Prefix of the spec
prefix: str
def __reduce__(self):
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
@staticmethod
def restore(
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
) -> "DetectedPackage":
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
return DetectedPackage(spec=spec, prefix=prefix)
#: Information on a package that has been detected
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
"""Returns all the specs mentioned as externals in packages.yaml"""
def _externals_in_packages_yaml():
"""Return all the specs mentioned as externals in packages.yaml"""
packages_yaml = spack.config.get("packages")
already_defined_specs = set()
for pkg_name, package_configuration in packages_yaml.items():
@@ -59,12 +43,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
return already_defined_specs
ExternalEntryType = Union[str, Dict[str, str]]
def _pkg_config_dict(
external_pkg_entries: List[DetectedPackage],
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
def _pkg_config_dict(external_pkg_entries):
"""Generate a package specific config dict according to the packages.yaml schema.
This does not generate the entire packages.yaml. For example, given some
@@ -86,10 +65,7 @@ def _pkg_config_dict(
if not _spec_is_valid(e.spec):
continue
external_items: List[Tuple[str, ExternalEntryType]] = [
("spec", str(e.spec)),
("prefix", e.prefix),
]
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
if e.spec.external_modules:
external_items.append(("modules", e.spec.external_modules))
@@ -107,14 +83,15 @@ def _pkg_config_dict(
return pkg_dict
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
def _spec_is_valid(spec):
try:
str(spec)
except spack.error.SpackError:
# It is assumed here that we can at least extract the package name from the spec so we
# can look up the implementation of determine_spec_details
msg = f"Constructed spec for {spec.name} does not have a string representation"
llnl.util.tty.warn(msg)
# It is assumed here that we can at least extract the package name from
# the spec so we can look up the implementation of
# determine_spec_details
msg = "Constructed spec for {0} does not have a string representation"
llnl.util.tty.warn(msg.format(spec.name))
return False
try:
@@ -129,7 +106,7 @@ def _spec_is_valid(spec: spack.spec.Spec) -> bool:
return True
def path_to_dict(search_paths: List[str]):
def path_to_dict(search_paths):
"""Return dictionary[fullpath]: basename from list of paths"""
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
@@ -147,7 +124,7 @@ def path_to_dict(search_paths: List[str]):
return path_to_lib
def is_executable(file_path: str) -> bool:
def is_executable(file_path):
"""Return True if the path passed as argument is that of an executable"""
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
@@ -169,7 +146,7 @@ def _convert_to_iterable(single_val_or_multiple):
return [x]
def executable_prefix(executable_dir: str) -> str:
def executable_prefix(executable_dir):
"""Given a directory where an executable is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
@@ -190,12 +167,12 @@ def executable_prefix(executable_dir: str) -> str:
return os.sep.join(components[:idx])
def library_prefix(library_dir: str) -> str:
"""Given a directory where a library is found, guess the prefix
def library_prefix(library_dir):
"""Given a directory where an library is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
Args:
library_dir: directory where a library is found
library_dir: directory where an library is found
"""
# Given a prefix where an library is found, assuming that prefix
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
@@ -218,17 +195,13 @@ def library_prefix(library_dir: str) -> str:
return library_dir
def update_configuration(
detected_packages: Dict[str, List[DetectedPackage]],
scope: Optional[str] = None,
buildable: bool = True,
) -> List[spack.spec.Spec]:
def update_configuration(detected_packages, scope=None, buildable=True):
"""Add the packages passed as arguments to packages.yaml
Args:
detected_packages: list of DetectedPackage objects to be added
scope: configuration scope where to add the detected packages
buildable: whether the detected packages are buildable or not
detected_packages (list): list of DetectedPackage objects to be added
scope (str): configuration scope where to add the detected packages
buildable (bool): whether the detected packages are buildable or not
"""
predefined_external_specs = _externals_in_packages_yaml()
pkg_to_cfg, all_new_specs = {}, []
@@ -236,10 +209,7 @@ def update_configuration(
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
pkg_config = _pkg_config_dict(new_entries)
external_entries = pkg_config.get("externals", [])
assert not isinstance(external_entries, bool), "unexpected value for external entry"
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
if buildable is False:
pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config
@@ -252,19 +222,16 @@ def update_configuration(
return all_new_specs
def _windows_drive() -> str:
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
which is guaranteed to be defined for all logins.
"""
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
if match is None:
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
return match.group(1)
def _windows_drive():
"""Return Windows drive string extracted from PROGRAMFILES
env var, which is garunteed to be defined for all logins"""
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
return drive
class WindowsCompilerExternalPaths:
@staticmethod
def find_windows_compiler_root_paths() -> List[str]:
def find_windows_compiler_root_paths():
"""Helper for Windows compiler installation root discovery
At the moment simply returns location of VS install paths from VSWhere
@@ -272,7 +239,7 @@ def find_windows_compiler_root_paths() -> List[str]:
return list(winOs.WindowsOs.vs_install_paths)
@staticmethod
def find_windows_compiler_cmake_paths() -> List[str]:
def find_windows_compiler_cmake_paths():
"""Semi hard-coded search path for cmake bundled with MSVC"""
return [
os.path.join(
@@ -282,7 +249,7 @@ def find_windows_compiler_cmake_paths() -> List[str]:
]
@staticmethod
def find_windows_compiler_ninja_paths() -> List[str]:
def find_windows_compiler_ninja_paths():
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
return [
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
@@ -290,7 +257,7 @@ def find_windows_compiler_ninja_paths() -> List[str]:
]
@staticmethod
def find_windows_compiler_bundled_packages() -> List[str]:
def find_windows_compiler_bundled_packages():
"""Return all MSVC compiler bundled packages"""
return (
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
@@ -299,15 +266,14 @@ def find_windows_compiler_bundled_packages() -> List[str]:
class WindowsKitExternalPaths:
plat_major_ver = None
if sys.platform == "win32":
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots() -> Optional[str]:
def find_windows_kit_roots():
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if sys.platform != "win32":
return None
return []
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
@@ -315,23 +281,21 @@ def find_windows_kit_roots() -> Optional[str]:
return kit_base
@staticmethod
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
def find_windows_kit_bin_paths(kit_base=None):
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base is not None, "unexpected value for kit_base"
kit_bin = os.path.join(kit_base, "bin")
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
@staticmethod
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
def find_windows_kit_lib_paths(kit_base=None):
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base is not None, "unexpected value for kit_base"
kit_lib = os.path.join(kit_base, "Lib")
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
@staticmethod
def find_windows_driver_development_kit_paths() -> List[str]:
def find_windows_driver_development_kit_paths():
"""Provides a list of all installation paths
for the WDK by version and architecture
"""
@@ -339,7 +303,7 @@ def find_windows_driver_development_kit_paths() -> List[str]:
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
@staticmethod
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
def find_windows_kit_reg_installed_roots_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
@@ -352,7 +316,7 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
)
@staticmethod
def find_windows_kit_reg_sdk_paths() -> List[str]:
def find_windows_kit_reg_sdk_paths():
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
% WindowsKitExternalPaths.plat_major_ver,
@@ -366,7 +330,7 @@ def find_windows_kit_reg_sdk_paths() -> List[str]:
)
def find_win32_additional_install_paths() -> List[str]:
def find_win32_additional_install_paths():
"""Not all programs on Windows live on the PATH
Return a list of other potential install locations.
"""
@@ -393,12 +357,13 @@ def find_win32_additional_install_paths() -> List[str]:
return windows_search_ext
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
"""Given a package, attempts to compute its Windows program files location,
and returns the list of best guesses.
def compute_windows_program_path_for_package(pkg):
"""Given a package, attempt to compute its Windows
program files location, return list of best guesses
Args:
pkg: package for which Program Files location is to be computed
pkg (spack.package_base.PackageBase): package for which
Program Files location is to be computed
"""
if sys.platform != "win32":
return []
@@ -413,7 +378,7 @@ def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBas
]
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
def compute_windows_user_path_for_package(pkg):
"""Given a package attempt to compute its user scoped
install location, return list of potential locations based
on common heuristics. For more info on Windows user specific

View File

@@ -6,13 +6,11 @@
and running executables.
"""
import collections
import concurrent.futures
import os
import os.path
import re
import sys
import warnings
from typing import Dict, List, Optional, Set, Tuple
import llnl.util.filesystem
import llnl.util.tty
@@ -20,7 +18,7 @@
import spack.util.environment
import spack.util.ld_so_conf
from .common import (
from .common import ( # find_windows_compiler_bundled_packages,
DetectedPackage,
WindowsCompilerExternalPaths,
WindowsKitExternalPaths,
@@ -33,13 +31,8 @@
path_to_dict,
)
#: Timeout used for package detection (seconds)
DETECTION_TIMEOUT = 60
if sys.platform == "win32":
DETECTION_TIMEOUT = 120
def common_windows_package_paths() -> List[str]:
def common_windows_package_paths():
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
paths.extend(find_win32_additional_install_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
@@ -48,7 +41,7 @@ def common_windows_package_paths() -> List[str]:
return paths
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
def executables_in_path(path_hints):
"""Get the paths of all executables available from the current PATH.
For convenience, this is constructed as a dictionary where the keys are
@@ -59,7 +52,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
assumed there are two different instances of the executable.
Args:
path_hints: list of paths to be searched. If None the list will be
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if sys.platform == "win32":
@@ -68,9 +61,7 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def libraries_in_ld_and_system_library_path(
path_hints: Optional[List[str]] = None,
) -> Dict[str, str]:
def libraries_in_ld_and_system_library_path(path_hints=None):
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
standard system library paths.
@@ -83,7 +74,7 @@ def libraries_in_ld_and_system_library_path(
assumed there are two different instances of the library.
Args:
path_hints: list of paths to be searched. If None the list will be
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables as well as the standard system library paths.
@@ -99,7 +90,7 @@ def libraries_in_ld_and_system_library_path(
return path_to_dict(search_paths)
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
def libraries_in_windows_paths(path_hints):
path_hints.extend(spack.util.environment.get_path("PATH"))
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
@@ -115,250 +106,218 @@ def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
def _group_by_prefix(paths):
groups = collections.defaultdict(set)
for p in paths:
groups[os.path.dirname(p)].add(p)
return groups
return groups.items()
class Finder:
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
# TODO consolidate this with by_executable
# Packages should be able to define both .libraries and .executables in the future
# determine_spec_details should get all relevant libraries and executables in one call
def by_library(packages_to_check, path_hints=None):
# Techniques for finding libraries is determined on a per recipe basis in
# the determine_version class method. Some packages will extract the
# version number from a shared libraries filename.
# Other libraries could use the strings function to extract it as described
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
"""Return the list of packages that have been detected on the system,
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
def path_hints(
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
) -> List[str]:
"""Returns the list of paths to be searched.
Args:
packages_to_check (list): list of packages to be detected
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
and standard system library paths.
"""
# If no path hints from command line, intialize to empty list so
# we can add default hints on a per package basis
path_hints = [] if path_hints is None else path_hints
Args:
pkg: package being detected
initial_guess: initial list of paths from caller
"""
result = initial_guess or []
result.extend(compute_windows_user_path_for_package(pkg))
result.extend(compute_windows_program_path_for_package(pkg))
return result
lib_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "libraries"):
for lib in pkg.libraries:
lib_pattern_to_pkgs[lib].append(pkg)
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
"""Returns the list of patterns used to match candidate files.
path_to_lib_name = (
libraries_in_ld_and_system_library_path(path_hints=path_hints)
if sys.platform != "win32"
else libraries_in_windows_paths(path_hints)
)
Args:
pkg: package being detected
"""
raise NotImplementedError("must be implemented by derived classes")
pkg_to_found_libs = collections.defaultdict(set)
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
compiled_re = re.compile(lib_pattern)
for path, lib in path_to_lib_name.items():
if compiled_re.search(lib):
for pkg in pkgs:
pkg_to_found_libs[pkg].add(path)
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
"""Returns a list of candidate files found on the system.
pkg_to_entries = collections.defaultdict(list)
resolved_specs = {} # spec -> lib found for the spec
Args:
patterns: search patterns to be used for matching files
paths: paths where to search for files
"""
raise NotImplementedError("must be implemented by derived classes")
def prefix_from_path(self, *, path: str) -> str:
"""Given a path where a file was found, returns the corresponding prefix.
Args:
path: path of a detected file
"""
raise NotImplementedError("must be implemented by derived classes")
def detect_specs(
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
) -> List[DetectedPackage]:
"""Given a list of files matching the search patterns, returns a list of detected specs.
Args:
pkg: package being detected
paths: files matching the package search patterns
"""
for pkg, libs in pkg_to_found_libs.items():
if not hasattr(pkg, "determine_spec_details"):
warnings.warn(
f"{pkg.name} must define 'determine_spec_details' in order"
f" for Spack to detect externally-provided instances"
f" of the package."
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
" of the package.".format(pkg.name)
)
return []
continue
result = []
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
try:
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
except Exception as e:
specs = []
msg = 'error detecting "{0}" from prefix {1} [{2}]'
warnings.warn(msg.format(pkg.name, prefix, str(e)))
if not specs:
llnl.util.tty.debug(
"The following libraries in {0} were decidedly not "
"part of the package {1}: {2}".format(
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
)
)
for spec in specs:
pkg_prefix = library_prefix(prefix)
if not pkg_prefix:
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
"add it as a Spack package"
llnl.util.tty.debug(msg.format(prefix))
continue
if spec in resolved_specs:
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
"Libraries in {0} and {1} are both associated"
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
)
continue
else:
resolved_specs[spec] = prefix
try:
spec.validate_detection()
except Exception as e:
msg = (
'"{0}" has been detected on the system but will '
"not be added to packages.yaml [reason={1}]"
)
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
pkg_prefix = spec.external_path
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return pkg_to_entries
def by_executable(packages_to_check, path_hints=None):
"""Return the list of packages that have been detected on the system,
searching by path.
Args:
packages_to_check (list): list of package classes to be detected
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
exe_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
if hasattr(pkg, "executables"):
for exe in pkg.platform_executables():
exe_pattern_to_pkgs[exe].append(pkg)
# Add Windows specific, package related paths to the search paths
path_hints.extend(compute_windows_user_path_for_package(pkg))
path_hints.extend(compute_windows_program_path_for_package(pkg))
path_to_exe_name = executables_in_path(path_hints=path_hints)
pkg_to_found_exes = collections.defaultdict(set)
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
compiled_re = re.compile(exe_pattern)
for path, exe in path_to_exe_name.items():
if compiled_re.search(exe):
for pkg in pkgs:
pkg_to_found_exes[pkg].add(path)
pkg_to_entries = collections.defaultdict(list)
resolved_specs = {} # spec -> exe found for the spec
for pkg, exes in pkg_to_found_exes.items():
if not hasattr(pkg, "determine_spec_details"):
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
" of the package.".format(pkg.name)
)
continue
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
# TODO: multiple instances of a package can live in the same
# prefix, and a package implementation can return multiple specs
# for one prefix, but without additional details (e.g. about the
# naming scheme which differentiates them), the spec won't be
# usable.
try:
specs = _convert_to_iterable(
pkg.determine_spec_details(candidate_path, items_in_prefix)
)
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
except Exception as e:
specs = []
warnings.warn(
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
)
msg = 'error detecting "{0}" from prefix {1} [{2}]'
warnings.warn(msg.format(pkg.name, prefix, str(e)))
if not specs:
files = ", ".join(_convert_to_iterable(items_in_prefix))
llnl.util.tty.debug(
f"The following files in {candidate_path} were decidedly not "
f"part of the package {pkg.name}: {files}"
"The following executables in {0} were decidedly not "
"part of the package {1}: {2}".format(
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
)
)
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
for spec in specs:
prefix = self.prefix_from_path(path=candidate_path)
if not prefix:
pkg_prefix = executable_prefix(prefix)
if not pkg_prefix:
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg.format(prefix))
continue
if spec in resolved_specs:
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
f"Files in {candidate_path} and {prior_prefix} are both associated"
f" with the same spec {str(spec)}"
"Executables in {0} and {1} are both associated"
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
)
continue
else:
resolved_specs[spec] = prefix
resolved_specs[spec] = candidate_path
try:
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '
f"not be added to packages.yaml [reason={str(e)}]"
'"{0}" has been detected on the system but will '
"not be added to packages.yaml [reason={1}]"
)
warnings.warn(msg)
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
prefix = spec.external_path
pkg_prefix = spec.external_path
result.append(DetectedPackage(spec=spec, prefix=prefix))
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return result
def find(
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
) -> List[DetectedPackage]:
"""For a given package, returns a list of detected specs.
Args:
pkg_name: package being detected
initial_guess: initial list of paths to search from the caller
"""
import spack.repo
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
patterns = self.search_patterns(pkg=pkg_cls)
if not patterns:
return []
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
return result
class ExecutablesFinder(Finder):
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
result = pkg.platform_executables()
return result
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
executables_by_path = executables_in_path(path_hints=paths)
patterns = [re.compile(x) for x in patterns]
result = []
for compiled_re in patterns:
for path, exe in executables_by_path.items():
if compiled_re.search(exe):
result.append(path)
return list(sorted(set(result)))
def prefix_from_path(self, *, path: str) -> str:
result = executable_prefix(path)
if not result:
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg)
return result
class LibrariesFinder(Finder):
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
"""
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "libraries"):
result = pkg.libraries
return result
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
libraries_by_path = (
libraries_in_ld_and_system_library_path(path_hints=paths)
if sys.platform != "win32"
else libraries_in_windows_paths(paths)
)
patterns = [re.compile(x) for x in patterns]
result = []
for compiled_re in patterns:
for path, exe in libraries_by_path.items():
if compiled_re.search(exe):
result.append(path)
return result
def prefix_from_path(self, *, path: str) -> str:
result = library_prefix(path)
if not result:
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
llnl.util.tty.debug(msg)
return result
def by_path(
packages_to_search: List[str],
*,
path_hints: Optional[List[str]] = None,
max_workers: Optional[int] = None,
) -> Dict[str, List[DetectedPackage]]:
"""Return the list of packages that have been detected on the system,
searching by path.
Args:
packages_to_search: list of package classes to be detected
path_hints: initial list of paths to be searched
"""
# TODO: Packages should be able to define both .libraries and .executables in the future
# TODO: determine_spec_details should get all relevant libraries and executables in one call
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
executables_path_guess = (
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
)
libraries_path_guess = [] if path_hints is None else path_hints
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
result = collections.defaultdict(list)
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
for pkg in packages_to_search:
executable_future = executor.submit(
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
)
library_future = executor.submit(
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
)
detected_specs_by_package[pkg] = executable_future, library_future
for pkg_name, futures in detected_specs_by_package.items():
for future in futures:
try:
detected = future.result(timeout=DETECTION_TIMEOUT)
if detected:
result[pkg_name].extend(detected)
except Exception:
llnl.util.tty.debug(
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
)
return result
return pkg_to_entries

View File

@@ -38,14 +38,12 @@ class OpenMpi(Package):
import llnl.util.lang
import llnl.util.tty.color
import spack.deptypes as dt
import spack.error
import spack.patch
import spack.spec
import spack.url
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency
from spack.dependency import Dependency, canonical_deptype, default_deptype
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
@@ -409,7 +407,10 @@ def version(
def _execute_version(pkg, ver, **kwargs):
if (
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
any(
s in kwargs
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
)
and hasattr(pkg, "has_code")
and not pkg.has_code
):
@@ -437,7 +438,7 @@ def _execute_version(pkg, ver, **kwargs):
pkg.versions[version] = kwargs
def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -448,7 +449,7 @@ def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
if pkg.name == dep_spec.name:
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
depflag = dt.canonicalize(type)
type = canonical_deptype(type)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
# call this patches here for clarity -- we want patch to be a list,
@@ -478,12 +479,12 @@ def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
# this is where we actually add the dependency to this package
if when_spec not in conditions:
dependency = Dependency(pkg, dep_spec, depflag=depflag)
dependency = Dependency(pkg, dep_spec, type=type)
conditions[when_spec] = dependency
else:
dependency = conditions[when_spec]
dependency.spec.constrain(dep_spec, deps=False)
dependency.depflag |= depflag
dependency.type |= set(type)
# apply patches to the dependency
for execute_patch in patches:
@@ -526,7 +527,7 @@ def _execute_conflicts(pkg):
@directive(("dependencies"))
def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
Args:
@@ -759,7 +760,7 @@ def _execute_variant(pkg):
when_spec = make_when_spec(when)
when_specs = [when_spec]
if not re.match(spack.spec.IDENTIFIER_RE, name):
if not re.match(spack.spec.identifier_re, name):
directive = "variant"
msg = "Invalid variant name in {0}: '{1}'"
raise DirectiveError(directive, msg.format(pkg.name, name))

View File

@@ -12,7 +12,6 @@
from enum import Enum
from typing import List, Optional
import spack.deptypes as dt
import spack.environment.environment as ev
import spack.spec
import spack.traverse as traverse
@@ -37,9 +36,7 @@ def from_string(s: str) -> "UseBuildCache":
def _deptypes(use_buildcache: UseBuildCache):
"""What edges should we follow for a given node? If it's a cache-only
node, then we can drop build type deps."""
return (
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
)
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run")
class DepfileNode:
@@ -72,13 +69,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
self.adjacency_list: List[DepfileNode] = []
self.pkg_buildcache = pkg_buildcache
self.deps_buildcache = deps_buildcache
self.depflag_root = _deptypes(pkg_buildcache)
self.depflag_deps = _deptypes(deps_buildcache)
self.deptypes_root = _deptypes(pkg_buildcache)
self.deptypes_deps = _deptypes(deps_buildcache)
def neighbors(self, node):
"""Produce a list of spec to follow from node"""
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
def accept(self, node):
self.adjacency_list.append(

View File

@@ -28,7 +28,6 @@
import spack.compilers
import spack.concretize
import spack.config
import spack.deptypes as dt
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht
@@ -1505,7 +1504,7 @@ def _concretize_separately(self, tests=False):
start = time.time()
max_processes = min(
len(arguments), # Number of specs
spack.util.cpus.determine_number_of_jobs(parallel=True),
spack.config.get("config:build_jobs"), # Cap on build jobs
)
# TODO: revisit this print as soon as darwin is parallel too
@@ -1537,13 +1536,13 @@ def _concretize_separately(self, tests=False):
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
test_edges = node.edges_to_dependencies(deptype="test")
for current_edge in test_edges:
test_dependency = current_edge.spec
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
)
results = [
@@ -2059,7 +2058,7 @@ def matching_spec(self, spec):
# If multiple root specs match, it is assumed that the abstract
# spec will most-succinctly summarize the difference between them
# (and the user can enter one of these to disambiguate)
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
fmt_str = "{hash:7} " + spack.spec.default_format
color = clr.get_color_when()
match_strings = [
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
@@ -2191,7 +2190,7 @@ def _read_lockfile_dict(self, d):
name, data = reader.name_and_data(node_dict)
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
specs_by_hash[lockfile_key]._add_dependency(
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
)
# Traverse the root specs one at a time in the order they appear.
@@ -2367,7 +2366,7 @@ def display_specs(concretized_specs):
def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
format=spack.spec.DISPLAY_FORMAT,
format=spack.spec.display_format,
status_fn=spack.spec.Spec.install_status,
hashlen=7,
hashes=True,
@@ -2665,26 +2664,6 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.yaml_content = with_defaults_added
self.changed = False
def _all_matches(self, user_spec: str) -> List[str]:
"""Maps the input string to the first equivalent user spec in the manifest,
and returns it.
Args:
user_spec: user spec to be found
Raises:
ValueError: if no equivalent match is found
"""
result = []
for yaml_spec_str in self.pristine_configuration["specs"]:
if Spec(yaml_spec_str) == Spec(user_spec):
result.append(yaml_spec_str)
if not result:
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
return result
def add_user_spec(self, user_spec: str) -> None:
"""Appends the user spec passed as input to the list of root specs.
@@ -2705,9 +2684,8 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list
"""
try:
for key in self._all_matches(user_spec):
self.pristine_configuration["specs"].remove(key)
self.configuration["specs"].remove(key)
self.pristine_configuration["specs"].remove(user_spec)
self.configuration["specs"].remove(user_spec)
except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e

View File

@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
cmds += "$Env:SPACK_ENV=%s\n" % env.path
else:
if "color" in os.getenv("TERM", "") and prompt:
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
@@ -82,7 +82,7 @@ def deactivate_header(shell):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "Set-Item -Path Env:SPACK_ENV\n"
cmds += "Remove-Item Env:SPACK_ENV"
else:
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"

View File

@@ -31,7 +31,6 @@
import urllib.parse
from typing import List, Optional
import llnl.url
import llnl.util
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -47,7 +46,7 @@
import spack.util.web as web_util
import spack.version
import spack.version.git_ref_lookup
from spack.util.compression import decompressor_for
from spack.util.compression import decompressor_for, extension_from_path
from spack.util.executable import CommandNotFoundError, which
from spack.util.string import comma_and, quote
@@ -442,7 +441,7 @@ def expand(self):
# TODO: replace this by mime check.
if not self.extension:
self.extension = llnl.url.determine_url_file_extension(self.url)
self.extension = spack.url.determine_url_file_extension(self.url)
if self.stage.expanded:
tty.debug("Source already staged to %s" % self.stage.source_path)
@@ -571,7 +570,7 @@ def expand(self):
@_needs_stage
def archive(self, destination, **kwargs):
assert llnl.url.extension_from_path(destination) == "tar.gz"
assert extension_from_path(destination) == "tar.gz"
assert self.stage.source_path.startswith(self.stage.path)
tar = which("tar", required=True)

View File

@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture,
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
compiler,
)
tty.hline(colorize(header), char="-")

View File

@@ -0,0 +1,28 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.parse
import urllib.response
from urllib.error import URLError
from urllib.request import BaseHandler
def gcs_open(req, *args, **kwargs):
"""Open a reader stream to a blob object on GCS"""
import spack.util.gcs as gcs_util
url = urllib.parse.urlparse(req.get_full_url())
gcsblob = gcs_util.GCSBlob(url)
if not gcsblob.exists():
raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path))
stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers()
return urllib.response.addinfourl(stream, headers, url)
class GCSHandler(BaseHandler):
def gs_open(self, req):
return gcs_open(req)

View File

@@ -38,12 +38,11 @@
"""
import enum
import sys
from typing import List, Optional, Set, TextIO, Tuple
from typing import List, Optional, Set, TextIO, Tuple, Union
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.dependency
import spack.spec
import spack.tengine
@@ -79,7 +78,7 @@ def __init__(self):
self.node_character = "o"
self.debug = False
self.indent = 0
self.depflag = dt.ALL
self.deptype = spack.dependency.all_deptypes
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@@ -327,7 +326,7 @@ def write(self, spec, color=None, out=None):
nodes_in_topological_order = [
edge.spec
for edge in spack.traverse.traverse_edges_topo(
[spec], direction="children", deptype=self.depflag
[spec], direction="children", deptype=self.deptype
)
]
nodes_in_topological_order.reverse()
@@ -425,7 +424,7 @@ def write(self, spec, color=None, out=None):
# Replace node with its dependencies
self._frontier.pop(i)
edges = sorted(node.edges_to_dependencies(depflag=self.depflag), reverse=True)
edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True)
if edges:
deps = [e.spec.dag_hash() for e in edges]
self._connect_deps(i, deps, "new-deps") # anywhere.
@@ -434,14 +433,13 @@ def write(self, spec, color=None, out=None):
self._collapse_line(i)
def graph_ascii(
spec, node="o", out=None, debug=False, indent=0, color=None, depflag: dt.DepFlag = dt.ALL
):
def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
graph.node_character = node
graph.depflag = depflag
if deptype:
graph.deptype = spack.dependency.canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
@@ -515,7 +513,7 @@ def __init__(self):
def visit(self, edge):
if edge.parent is None:
for node in spack.traverse.traverse_nodes([edge.spec], deptype=dt.LINK | dt.RUN):
for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")):
self.main_unified_space.add(node.dag_hash())
super().visit(edge)
@@ -535,34 +533,36 @@ def edge_entry(self, edge):
)
def _static_edges(specs, depflag):
def _static_edges(specs, deptype):
for spec in specs:
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),
spack.spec.Spec(dependency_name),
depflag=depflag,
deptypes=deptype,
virtuals=(),
)
def static_graph_dot(
specs: List[spack.spec.Spec], depflag: dt.DepFlag = dt.ALL, out: Optional[TextIO] = None
specs: List[spack.spec.Spec],
deptype: Optional[Union[str, Tuple[str, ...]]] = "all",
out: Optional[TextIO] = None,
):
"""Static DOT graph with edges to all possible dependencies.
Args:
specs: abstract specs to be represented
depflag: dependency types to consider
deptype: dependency types to consider
out: optional output stream. If None sys.stdout is used
"""
out = out or sys.stdout
builder = StaticDag()
for edge in _static_edges(specs, depflag):
for edge in _static_edges(specs, deptype):
builder.visit(edge)
out.write(builder.render())
@@ -570,7 +570,7 @@ def static_graph_dot(
def graph_dot(
specs: List[spack.spec.Spec],
builder: Optional[DotGraphBuilder] = None,
depflag: dt.DepFlag = dt.ALL,
deptype: spack.dependency.DependencyArgument = "all",
out: Optional[TextIO] = None,
):
"""DOT graph of the concrete specs passed as input.
@@ -578,7 +578,7 @@ def graph_dot(
Args:
specs: specs to be represented
builder: builder to use to render the graph
depflag: dependency types to consider
deptype: dependency types to consider
out: optional output stream. If None sys.stdout is used
"""
if not specs:
@@ -587,9 +587,10 @@ def graph_dot(
if out is None:
out = sys.stdout
deptype = spack.dependency.canonical_deptype(deptype)
builder = builder or SimpleDAG()
for edge in spack.traverse.traverse_edges(
specs, cover="edges", order="breadth", deptype=depflag
specs, cover="edges", order="breadth", deptype=deptype
):
builder.visit(edge)

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Definitions that control how Spack creates Spec hashes."""
import spack.deptypes as dt
import spack.dependency as dp
import spack.repo
hashes = []
@@ -20,8 +20,8 @@ class SpecHashDescriptor:
We currently use different hashes for different use cases."""
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
self.depflag = depflag
def __init__(self, deptype, package_hash, name, override=None):
self.deptype = dp.canonical_deptype(deptype)
self.package_hash = package_hash
self.name = name
hashes.append(self)
@@ -39,12 +39,12 @@ def __call__(self, spec):
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash")
#: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
)
@@ -56,7 +56,7 @@ def _content_hash_override(spec):
#: Package hash used as part of dag hash
package_hash = SpecHashDescriptor(
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
deptype=(), package_hash=True, name="package_hash", override=_content_hash_override
)
@@ -64,10 +64,10 @@ def _content_hash_override(spec):
# spec formats
full_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="full_hash"
deptype=("build", "link", "run"), package_hash=True, name="full_hash"
)
build_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=False, name="build_hash"
deptype=("build", "link", "run"), package_hash=False, name="build_hash"
)

View File

@@ -50,7 +50,6 @@
import spack.compilers
import spack.config
import spack.database
import spack.deptypes as dt
import spack.error
import spack.hooks
import spack.mirror
@@ -91,16 +90,6 @@
STATUS_REMOVED = "removed"
def _write_timer_json(pkg, timer, cache):
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
try:
with open(pkg.times_log_path, "w") as timelog:
timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
return
class InstallAction:
#: Don't perform an install
NONE = 0
@@ -314,7 +303,7 @@ def _packages_needed_to_bootstrap_compiler(
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
dep._dependents.add(
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
)
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
@@ -410,8 +399,6 @@ def _install_from_cache(
return False
t.stop()
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
_write_timer_json(pkg, t, True)
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec, explicit)
@@ -494,7 +481,7 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
pkg.spec, download_result, unsigned=unsigned, force=False
)
pkg.installed_from_binary_cache = True
@@ -605,9 +592,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
if node is spec:
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
elif source_pkg_dir:
fs.install_tree(
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(source_pkg_dir, dest_pkg_dir)
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
@@ -789,9 +774,10 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# Save off dependency package ids for quick checks since traversals
# are not able to return full dependents for all packages across
# environment specs.
deptypes = self.get_deptypes(self.pkg)
self.dependencies = set(
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
for d in self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id
)
@@ -830,7 +816,7 @@ def _add_default_args(self) -> None:
]:
_ = self.install_args.setdefault(arg, default)
def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]:
"""Determine the required dependency types for the associated package.
Args:
@@ -839,7 +825,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
Returns:
tuple: required dependency type(s) for the package
"""
depflag = dt.LINK | dt.RUN
deptypes = ["link", "run"]
include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg):
@@ -851,10 +837,10 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# is False, or if build depdencies are explicitly called for
# by include_build_deps.
if include_build_deps or not (cache_only or pkg.spec.installed):
depflag |= dt.BUILD
deptypes.append("build")
if self.run_tests(pkg):
depflag |= dt.TEST
return depflag
deptypes.append("test")
return tuple(sorted(deptypes))
def has_dependency(self, dep_id) -> bool:
"""Returns ``True`` if the package id represents a known dependency
@@ -887,8 +873,9 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
spec = self.spec
if visited is None:
visited = set()
deptype = self.get_deptypes(spec.package)
for dep in spec.dependencies(deptype=self.get_depflags(spec.package)):
for dep in spec.dependencies(deptype=deptype):
hash = dep.dag_hash()
if hash in visited:
continue
@@ -972,9 +959,10 @@ def __init__(
# Be consistent wrt use of dependents and dependencies. That is,
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
deptypes = self.request.get_deptypes(self.pkg)
self.dependencies = set(
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
for d in self.pkg.spec.dependencies(deptype=deptypes)
if package_id(d.package) != self.pkg_id
)
@@ -1328,6 +1316,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
Preparation includes cleaning up installation and stage directories
and ensuring the database is up-to-date.
@@ -2103,6 +2092,7 @@ def install(self) -> None:
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
ltype, lock = self._ensure_locked("read", pkg)
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
@@ -2382,7 +2372,8 @@ def run(self) -> bool:
# Stop the timer and save results
self.timer.stop()
_write_timer_json(self.pkg, self.timer, False)
with open(self.pkg.times_log_path, "w") as timelog:
self.timer.write_json(timelog)
print_install_test_log(self.pkg)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
@@ -2403,9 +2394,7 @@ def _install_source(self) -> None:
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
fs.install_tree(
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(pkg.stage.source_path, src_target)
def _real_install(self) -> None:
import spack.builder

View File

@@ -30,6 +30,7 @@
import llnl.util.tty.color as color
from llnl.util.tty.log import log_output
import spack
import spack.cmd
import spack.config
import spack.environment as ev
@@ -50,7 +51,7 @@
stat_names = pstats.Stats.sort_arg_dict_default
#: top-level aliases for Spack commands
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
aliases = {"rm": "remove"}
#: help levels in order of detail (i.e., number of commands shown)
levels = ["short", "long"]
@@ -715,7 +716,7 @@ def __call__(self, *argv, **kwargs):
out = io.StringIO()
try:
with log_output(out, echo=True):
with log_output(out):
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
except SystemExit as e:

View File

@@ -20,7 +20,6 @@
import urllib.parse
from typing import Optional, Union
import llnl.url
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -30,6 +29,7 @@
import spack.fetch_strategy as fs
import spack.mirror
import spack.spec
import spack.url as url
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
@@ -375,7 +375,7 @@ def _determine_extension(fetcher):
if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive:
# If we fetch with a URLFetchStrategy, use URL's archive type
ext = llnl.url.determine_url_file_extension(fetcher.url)
ext = url.determine_url_file_extension(fetcher.url)
if ext:
# Remove any leading dots

View File

@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
if spec.satisfies(constraint):
if hasattr(constraint, "override") and constraint.override:
spec_configuration = {}
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
update_dictionary_extending_lists(spec_configuration, action)
# Transform keywords for dependencies or prerequisites into a list of spec

View File

@@ -67,7 +67,7 @@
from spack.build_systems.waf import WafPackage
from spack.build_systems.xorg import XorgPackage
from spack.builder import run_after, run_before
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.dependency import all_deptypes
from spack.directives import *
from spack.install_test import (
SkipTest,
@@ -96,7 +96,6 @@
on_package_attributes,
)
from spack.spec import InvalidSpecDetected, Spec
from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.variant import (
any_combination_of,

View File

@@ -34,7 +34,7 @@
import spack.compilers
import spack.config
import spack.deptypes as dt
import spack.dependency
import spack.directives
import spack.directory_layout
import spack.environment
@@ -175,13 +175,11 @@ def windows_establish_runtime_linkage(self):
detectable_packages = collections.defaultdict(list)
class DetectablePackageMeta(type):
class DetectablePackageMeta:
"""Check if a package is detectable and add default implementations
for the detection function.
"""
TAG = "detectable"
def __init__(cls, name, bases, attr_dict):
if hasattr(cls, "executables") and hasattr(cls, "libraries"):
msg = "a package can have either an 'executables' or 'libraries' attribute"
@@ -197,11 +195,6 @@ def __init__(cls, name, bases, attr_dict):
# If a package has the executables or libraries attribute then it's
# assumed to be detectable
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
# Append a tag to each detectable package, so that finding them is faster
if hasattr(cls, "tags"):
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
else:
setattr(cls, "tags", [DetectablePackageMeta.TAG])
@classmethod
def platform_executables(cls):
@@ -525,9 +518,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
# This allows analysis tools to correctly interpret the class attributes.
versions: dict
# Same for dependencies
dependencies: dict
#: By default, packages are not virtual
#: Virtual packages override this attribute
virtual = False
@@ -685,7 +675,7 @@ def possible_dependencies(
cls,
transitive=True,
expand_virtuals=True,
depflag: dt.DepFlag = dt.ALL,
deptype="all",
visited=None,
missing=None,
virtuals=None,
@@ -697,7 +687,7 @@ def possible_dependencies(
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
deptype (str or tuple or None): dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
@@ -723,6 +713,8 @@ def possible_dependencies(
Note: the returned dict *includes* the package itself.
"""
deptype = spack.dependency.canonical_deptype(deptype)
visited = {} if visited is None else visited
missing = {} if missing is None else missing
@@ -730,10 +722,9 @@ def possible_dependencies(
for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for dep in conditions.values():
depflag_union |= dep.depflag
if not (depflag & depflag_union):
deptypes = [dep.type for cond, dep in conditions.items()]
deptypes = set.union(*deptypes)
if not any(d in deptypes for d in deptype):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
@@ -772,7 +763,7 @@ def possible_dependencies(
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
transitive, expand_virtuals, deptype, visited, missing, virtuals
)
return visited
@@ -1205,7 +1196,7 @@ def fetcher(self, f):
self._fetcher.set_package(self)
@classmethod
def dependencies_of_type(cls, deptypes: dt.DepFlag):
def dependencies_of_type(cls, *deptypes):
"""Get dependencies that can possibly have these deptypes.
This analyzes the package and determines which dependencies *can*
@@ -1217,7 +1208,7 @@ def dependencies_of_type(cls, deptypes: dt.DepFlag):
return dict(
(name, conds)
for name, conds in cls.dependencies.items()
if any(deptypes & cls.dependencies[name][cond].depflag for cond in conds)
if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
)
# TODO: allow more than one active extendee.
@@ -2377,7 +2368,7 @@ def fetch_remote_versions(self, concurrency=128):
return {}
try:
return spack.url.find_versions_of_archive(
return spack.util.web.find_versions_of_archive(
self.all_urls, self.list_url, self.list_depth, concurrency, reference_package=self
)
except spack.util.web.NoNetworkConnectionError as e:

View File

@@ -288,7 +288,10 @@ def next_spec(
)
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
root_spec._add_dependency(dependency, depflag=0, virtuals=())
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
else:
break
@@ -303,12 +306,13 @@ def all_specs(self) -> List[spack.spec.Spec]:
class SpecNodeParser:
"""Parse a single spec node from a stream of tokens"""
__slots__ = "ctx", "has_compiler", "has_version"
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
def __init__(self, ctx):
self.ctx = ctx
self.has_compiler = False
self.has_version = False
self.has_hash = False
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
"""Parse a single spec node from a stream of tokens
@@ -339,6 +343,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
while True:
if self.ctx.accept(TokenType.COMPILER):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_compiler:
raise spack.spec.DuplicateCompilerSpecError(
f"{initial_spec} cannot have multiple compilers"
@@ -348,6 +353,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
self.has_compiler = True
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_compiler:
raise spack.spec.DuplicateCompilerSpecError(
f"{initial_spec} cannot have multiple compilers"
@@ -361,6 +367,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
TokenType.VERSION_HASH_PAIR
):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_version:
raise spack.spec.MultipleVersionError(
f"{initial_spec} cannot have multiple versions"
@@ -371,21 +378,25 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
initial_spec.attach_git_version_lookup()
self.has_version = True
elif self.ctx.accept(TokenType.BOOL_VARIANT):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
variant_value = self.ctx.current_token.value[0] == "+"
initial_spec._add_flag(
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
)
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
variant_value = self.ctx.current_token.value[0:2] == "++"
initial_spec._add_flag(
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
)
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
name = name.strip("'\" ")
value = value.strip("'\" ")
initial_spec._add_flag(name, value, propagate=False)
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
name = name.strip("'\" ")
value = value.strip("'\" ")
@@ -400,6 +411,12 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
return initial_spec
def hash_not_parsed_or_raise(self, spec, addition):
if not self.has_hash:
return
raise spack.spec.RedundantSpecError(spec, addition)
class FileParser:
"""Parse a single spec from a JSON or YAML file"""

View File

@@ -11,7 +11,6 @@
import llnl.util.filesystem
import llnl.util.lang
from llnl.url import allowed_archive
import spack
import spack.error
@@ -20,6 +19,7 @@
import spack.repo
import spack.stage
import spack.util.spack_json as sjson
from spack.util.compression import allowed_archive
from spack.util.crypto import Checker, checksum
from spack.util.executable import which, which_string

View File

@@ -139,8 +139,6 @@ def craype_type_and_version(cls):
# If no default version, sort available versions and return latest
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
versions_available.sort(reverse=True)
if not versions_available:
return (craype_type, None)
return (craype_type, versions_available[0])
@classmethod

View File

@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
# Warn about invalid names that look like packages.
if not nm.valid_module_name(pkg_name):
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
if not pkg_name.startswith("."):
tty.warn(
'Skipping package at {0}. "{1}" is not '
"a valid Spack module name.".format(pkg_dir, pkg_name)

View File

@@ -0,0 +1,80 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import urllib.error
import urllib.parse
import urllib.request
import urllib.response
from io import BufferedReader, BytesIO, IOBase
import spack.util.s3 as s3_util
# NOTE(opadron): Workaround issue in boto where its StreamingBody
# implementation is missing several APIs expected from IOBase. These missing
# APIs prevent the streams returned by boto from being passed as-are along to
# urllib.
#
# https://github.com/boto/botocore/issues/879
# https://github.com/python/cpython/pull/3249
class WrapStream(BufferedReader):
def __init__(self, raw):
# In botocore >=1.23.47, StreamingBody inherits from IOBase, so we
# only add missing attributes in older versions.
# https://github.com/boto/botocore/commit/a624815eabac50442ed7404f3c4f2664cd0aa784
if not isinstance(raw, IOBase):
raw.readable = lambda: True
raw.writable = lambda: False
raw.seekable = lambda: False
raw.closed = False
raw.flush = lambda: None
super().__init__(raw)
def detach(self):
self.raw = None
def read(self, *args, **kwargs):
return self.raw.read(*args, **kwargs)
def __getattr__(self, key):
return getattr(self.raw, key)
def _s3_open(url, method="GET"):
parsed = urllib.parse.urlparse(url)
s3 = s3_util.get_s3_session(url, method="fetch")
bucket = parsed.netloc
key = parsed.path
if key.startswith("/"):
key = key[1:]
if method not in ("GET", "HEAD"):
raise urllib.error.URLError(
"Only GET and HEAD verbs are currently supported for the s3:// scheme"
)
try:
if method == "GET":
obj = s3.get_object(Bucket=bucket, Key=key)
# NOTE(opadron): Apply workaround here (see above)
stream = WrapStream(obj["Body"])
elif method == "HEAD":
obj = s3.head_object(Bucket=bucket, Key=key)
stream = BytesIO()
except s3.ClientError as e:
raise urllib.error.URLError(e) from e
headers = obj["ResponseMetadata"]["HTTPHeaders"]
return url, headers, stream
class UrllibS3Handler(urllib.request.BaseHandler):
def s3_open(self, req):
orig_url = req.get_full_url()
url, headers, stream = _s3_open(orig_url, method=req.get_method())
return urllib.response.addinfourl(stream, headers, url)

View File

@@ -13,12 +13,10 @@
import re
import types
import warnings
from typing import List, NamedTuple, Tuple, Union
from typing import List, NamedTuple
import archspec.cpu
import spack.deptypes as dt
try:
import clingo # type: ignore[import]
@@ -36,6 +34,7 @@
import spack.cmd
import spack.compilers
import spack.config
import spack.dependency
import spack.directives
import spack.environment as ev
import spack.error
@@ -45,18 +44,15 @@
import spack.repo
import spack.spec
import spack.store
import spack.util.crypto
import spack.traverse
import spack.util.path
import spack.util.timer
import spack.variant
import spack.version as vn
import spack.version.git_ref_lookup
from spack import traverse
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
# these are from clingo.ast and bootstrapped later
ASTType = None
parse_files = None
@@ -573,41 +569,6 @@ def keyfn(x):
return normalized_yaml
def _is_checksummed_git_version(v):
return isinstance(v, vn.GitVersion) and v.is_commit
def _is_checksummed_version(version_info: Tuple[GitOrStandardVersion, dict]):
"""Returns true iff the version is not a moving target"""
version, info = version_info
if isinstance(version, spack.version.StandardVersion):
if any(h in info for h in spack.util.crypto.hashes.keys()) or "checksum" in info:
return True
return "commit" in info and len(info["commit"]) == 40
return _is_checksummed_git_version(version)
def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
"""Version order key for concretization, where preferred > not preferred,
not deprecated > deprecated, finite > any infinite component; only if all are
the same, do we use default version ordering."""
version, info = version_info
return (
info.get("preferred", False),
not info.get("deprecated", False),
not version.isdevelop(),
version,
)
def _spec_with_default_name(spec_str, name):
"""Return a spec with a default name if none is provided, used for requirement specs"""
spec = spack.spec.Spec(spec_str)
if not spec.name:
spec.name = name
return spec
def bootstrap_clingo():
global clingo, ASTType, parse_files
@@ -847,8 +808,6 @@ def visit(node):
# Load the file itself
self.control.load(os.path.join(parent_dir, "concretize.lp"))
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
if not setup.concretize_everything:
@@ -1463,18 +1422,18 @@ def package_dependencies_rules(self, pkg):
"""Translate 'depends_on' directives into ASP logic."""
for _, conditions in sorted(pkg.dependencies.items()):
for cond, dep in sorted(conditions.items()):
depflag = dep.depflag
deptypes = dep.type.copy()
# Skip test dependencies if they're not requested
if not self.tests:
depflag &= ~dt.TEST
deptypes.discard("test")
# ... or if they are requested only for certain packages
elif not isinstance(self.tests, bool) and pkg.name not in self.tests:
depflag &= ~dt.TEST
if not isinstance(self.tests, bool) and pkg.name not in self.tests:
deptypes.discard("test")
# if there are no dependency types to be considered
# anymore, don't generate the dependency
if not depflag:
if not deptypes:
continue
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
@@ -1488,10 +1447,9 @@ def package_dependencies_rules(self, pkg):
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
)
for t in dt.ALL_FLAGS:
if t & depflag:
# there is a declared dependency of type t
self.gen.fact(fn.dependency_type(condition_id, dt.flag_to_string(t)))
for t in sorted(deptypes):
# there is a declared dependency of type t
self.gen.fact(fn.dependency_type(condition_id, t))
self.gen.newline()
@@ -1865,21 +1823,45 @@ class Body:
if spec.concrete:
# We know dependencies are real for concrete specs. For abstract
# specs they just mean the dep is somehow in the DAG.
for dtype in dt.ALL_FLAGS:
if not dspec.depflag & dtype:
continue
for dtype in dspec.deptypes:
# skip build dependencies of already-installed specs
if concrete_build_deps or dtype != dt.BUILD:
if concrete_build_deps or dtype != "build":
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
for virtual_name in dspec.virtuals:
clauses.append(
fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name)
)
clauses.append(fn.attr("virtual_node", virtual_name))
# TODO: We have to look up info from package.py here, but we'd
# TODO: like to avoid this entirely. We should not need to look
# TODO: up potentially wrong info if we have virtual edge info.
try:
try:
pkg = dep.package
except spack.repo.UnknownNamespaceError:
# Try to look up the package of the same name and use its
# providers. This is as good as we can do without edge info.
pkg_class = spack.repo.PATH.get_pkg_class(dep.name)
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
pkg = pkg_class(spec)
virtuals = pkg.virtuals_provided
except spack.repo.UnknownPackageError:
# Skip virtual node constriants for renamed/deleted packages,
# so their binaries can still be installed.
# NOTE: with current specs (which lack edge attributes) this
# can allow concretizations with two providers, but it's unlikely.
continue
# Don't concretize with two providers of the same virtual.
# See above for exception for unknown packages.
# TODO: we will eventually record provider information on edges,
# TODO: which avoids the need for the package lookup above.
for virtual in virtuals:
clauses.append(fn.attr("virtual_node", virtual.name))
clauses.append(fn.provider(dep.name, virtual.name))
# imposing hash constraints for all but pure build deps of
# already-installed concrete specs.
if concrete_build_deps or dspec.depflag != dt.BUILD:
if concrete_build_deps or dspec.deptypes != ("build",):
clauses.append(fn.attr("hash", dep.name, dep.dag_hash()))
# if the spec is abstract, descend into dependencies.
@@ -1897,27 +1879,30 @@ class Body:
return clauses
def define_package_versions_and_validate_preferences(
self, possible_pkgs, require_checksum: bool
):
def build_version_dict(self, possible_pkgs):
"""Declare any versions in specs not declared in packages."""
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
for pkg_name in possible_pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
# All the versions from the corresponding package.py file. Since concepts
# like being a "develop" version or being preferred exist only at a
# package.py level, sort them in this partial list here
package_py_versions = sorted(
pkg_cls.versions.items(), key=_concretization_version_order, reverse=True
)
def key_fn(item):
version, info = item
# When COMPARING VERSIONS, the '@develop' version is always
# larger than other versions. BUT when CONCRETIZING, the largest
# NON-develop version is selected by default.
return (
info.get("preferred", False),
not info.get("deprecated", False),
not version.isdevelop(),
version,
)
if require_checksum and pkg_cls.has_code:
package_py_versions = [
x for x in package_py_versions if _is_checksummed_version(x)
]
for idx, (v, version_info) in enumerate(package_py_versions):
for idx, item in enumerate(sorted(pkg_cls.versions.items(), key=key_fn, reverse=True)):
v, version_info = item
self.possible_versions[pkg_name].add(v)
self.declared_versions[pkg_name].append(
DeclaredVersion(version=v, idx=idx, origin=Provenance.PACKAGE_PY)
@@ -1926,26 +1911,22 @@ def define_package_versions_and_validate_preferences(
if deprecated:
self.deprecated_versions[pkg_name].add(v)
if pkg_name not in packages_yaml or "version" not in packages_yaml[pkg_name]:
continue
# All the preferred version from packages.yaml, versions in external
# specs will be computed later
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
version_defs = []
for vstr in packages_yaml[pkg_name]["version"]:
pkg_class = spack.repo.PATH.get_pkg_class(pkg_name)
for vstr in version_preferences:
v = vn.ver(vstr)
if isinstance(v, vn.GitVersion):
if not require_checksum or v.is_commit:
version_defs.append(v)
version_defs.append(v)
else:
matches = [x for x in self.possible_versions[pkg_name] if x.satisfies(v)]
matches.sort(reverse=True)
if not matches:
raise spack.config.ConfigError(
f"Preference for version {v} does not match any known "
f"version of {pkg_name} (in its package.py or any external)"
)
version_defs.extend(matches)
satisfying_versions = self._check_for_defined_matching_versions(pkg_class, v)
# Amongst all defined versions satisfying this specific
# preference, the highest-numbered version is the
# most-preferred: therefore sort satisfying versions
# from greatest to least
version_defs.extend(sorted(satisfying_versions, reverse=True))
for weight, vdef in enumerate(llnl.util.lang.dedupe(version_defs)):
self.declared_versions[pkg_name].append(
@@ -1953,9 +1934,31 @@ def define_package_versions_and_validate_preferences(
)
self.possible_versions[pkg_name].add(vdef)
def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: bool):
def _check_for_defined_matching_versions(self, pkg_class, v):
"""Given a version specification (which may be a concrete version,
range, etc.), determine if any package.py version declarations
or externals define a version which satisfies it.
This is primarily for determining whether a version request (e.g.
version preferences, which should not themselves define versions)
refers to a defined version.
This function raises an exception if no satisfying versions are
found.
"""
pkg_name = pkg_class.name
satisfying_versions = list(x for x in pkg_class.versions if x.satisfies(v))
satisfying_versions.extend(x for x in self.possible_versions[pkg_name] if x.satisfies(v))
if not satisfying_versions:
raise spack.config.ConfigError(
"Preference for version {0} does not match any version"
" defined for {1} (in its package.py or any external)".format(str(v), pkg_name)
)
return satisfying_versions
def add_concrete_versions_from_specs(self, specs, origin):
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
for s in traverse.traverse_nodes(specs):
for s in spack.traverse.traverse_nodes(specs):
# If there is a concrete version on the CLI *that we know nothing
# about*, add it to the known versions. Use idx=0, which is the
# best possible, so they're guaranteed to be used preferentially.
@@ -1964,13 +1967,9 @@ def define_ad_hoc_versions_from_specs(self, specs, origin, require_checksum: boo
if version is None or any(v == version for v in self.possible_versions[s.name]):
continue
if require_checksum and not _is_checksummed_git_version(version):
raise UnsatisfiableSpecError(
s.format("No matching version for constraint {name}{@versions}")
)
declared = DeclaredVersion(version=version, idx=0, origin=origin)
self.declared_versions[s.name].append(declared)
self.declared_versions[s.name].append(
DeclaredVersion(version=version, idx=0, origin=origin)
)
self.possible_versions[s.name].add(version)
def _supported_targets(self, compiler_name, compiler_version, targets):
@@ -2167,7 +2166,7 @@ def generate_possible_compilers(self, specs):
# add compiler specs from the input line to possibilities if we
# don't require compilers to exist.
strict = spack.concretize.Concretizer().check_for_compiler_existence
for s in traverse.traverse_nodes(specs):
for s in spack.traverse.traverse_nodes(specs):
# we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler:
continue
@@ -2417,12 +2416,13 @@ def setup(self, driver, specs, reuse=None):
self.provider_requirements()
self.external_packages()
# TODO: make a config option for this undocumented feature
require_checksum = "SPACK_CONCRETIZER_REQUIRE_CHECKSUM" in os.environ
self.define_package_versions_and_validate_preferences(self.pkgs, require_checksum)
self.define_ad_hoc_versions_from_specs(specs, Provenance.SPEC, require_checksum)
self.define_ad_hoc_versions_from_specs(dev_specs, Provenance.DEV_SPEC, require_checksum)
self.validate_and_define_versions_from_requirements(require_checksum)
# traverse all specs and packages to build dict of possible versions
self.build_version_dict(self.pkgs)
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
req_version_specs = self._get_versioned_specs_from_pkg_requirements()
self.add_concrete_versions_from_specs(req_version_specs, Provenance.PACKAGE_REQUIREMENT)
self.gen.h1("Package Constraints")
for pkg in sorted(self.pkgs):
@@ -2471,68 +2471,78 @@ def literal_specs(self, specs):
if self.concretize_everything:
self.gen.fact(fn.solve_literal(idx))
def validate_and_define_versions_from_requirements(self, require_checksum: bool):
"""If package requirements mention concrete versions that are not mentioned
def _get_versioned_specs_from_pkg_requirements(self):
"""If package requirements mention versions that are not mentioned
elsewhere, then we need to collect those to mark them as possible
versions. If they are abstract and statically have no match, then we
need to throw an error. This function assumes all possible versions are already
registered in self.possible_versions."""
for pkg_name, d in spack.config.get("packages").items():
if pkg_name == "all" or "require" not in d:
versions.
"""
req_version_specs = list()
config = spack.config.get("packages")
for pkg_name, d in config.items():
if pkg_name == "all":
continue
for s in traverse.traverse_nodes(self._specs_from_requires(pkg_name, d["require"])):
name, versions = s.name, s.versions
if name not in self.pkgs or versions == spack.version.any_version:
continue
s.attach_git_version_lookup()
v = versions.concrete
if not v:
# If the version is not concrete, check it's statically concretizable. If
# not throw an error, which is just so that users know they need to change
# their config, instead of getting a hard to decipher concretization error.
if not any(x for x in self.possible_versions[name] if x.satisfies(versions)):
raise spack.config.ConfigError(
f"Version requirement {versions} on {pkg_name} for {name} "
f"cannot match any known version from package.py or externals"
)
continue
if v in self.possible_versions[name]:
continue
# If concrete an not yet defined, conditionally define it, like we do for specs
# from the command line.
if not require_checksum or _is_checksummed_git_version(v):
self.declared_versions[name].append(
DeclaredVersion(version=v, idx=0, origin=Provenance.PACKAGE_REQUIREMENT)
)
self.possible_versions[name].add(v)
if "require" in d:
req_version_specs.extend(self._specs_from_requires(pkg_name, d["require"]))
return req_version_specs
def _specs_from_requires(self, pkg_name, section):
"""Collect specs from a requirement rule"""
"""Collect specs from requirements which define versions (i.e. those that
have a concrete version). Requirements can define *new* versions if
they are included as part of an equivalence (hash=number) but not
otherwise.
"""
if isinstance(section, str):
yield _spec_with_default_name(section, pkg_name)
return
spec = spack.spec.Spec(section)
if not spec.name:
spec.name = pkg_name
extracted_specs = [spec]
else:
spec_strs = []
for spec_group in section:
if isinstance(spec_group, str):
spec_strs.append(spec_group)
else:
# Otherwise it is an object. The object can contain a single
# "spec" constraint, or a list of them with "any_of" or
# "one_of" policy.
if "spec" in spec_group:
new_constraints = [spec_group["spec"]]
else:
key = "one_of" if "one_of" in spec_group else "any_of"
new_constraints = spec_group[key]
spec_strs.extend(new_constraints)
for spec_group in section:
if isinstance(spec_group, str):
yield _spec_with_default_name(spec_group, pkg_name)
extracted_specs = []
for spec_str in spec_strs:
spec = spack.spec.Spec(spec_str)
if not spec.name:
spec.name = pkg_name
extracted_specs.append(spec)
version_specs = []
for spec in extracted_specs:
if spec.versions.concrete:
# Note: this includes git versions
version_specs.append(spec)
continue
# Otherwise it is an object. The object can contain a single
# "spec" constraint, or a list of them with "any_of" or
# "one_of" policy.
if "spec" in spec_group:
yield _spec_with_default_name(spec_group["spec"], pkg_name)
continue
# Prefer spec's name if it exists, in case the spec is
# requiring a specific implementation inside of a virtual section
# e.g. packages:mpi:require:openmpi@4.0.1
pkg_class = spack.repo.PATH.get_pkg_class(spec.name or pkg_name)
satisfying_versions = self._check_for_defined_matching_versions(
pkg_class, spec.versions
)
key = "one_of" if "one_of" in spec_group else "any_of"
for s in spec_group[key]:
yield _spec_with_default_name(s, pkg_name)
# Version ranges ("@1.3" without the "=", "@1.2:1.4") and lists
# will end up here
ordered_satisfying_versions = sorted(satisfying_versions, reverse=True)
vspecs = list(spack.spec.Spec("@{0}".format(x)) for x in ordered_satisfying_versions)
version_specs.extend(vspecs)
for spec in version_specs:
spec.attach_git_version_lookup()
return version_specs
class SpecBuilder:
@@ -2662,14 +2672,13 @@ def depends_on(self, parent_node, dependency_node, type):
dependency_spec = self._specs[dependency_node]
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
depflag = dt.flag_from_string(type)
if not edges:
self._specs[parent_node].add_dependency_edge(
self._specs[dependency_node], depflag=depflag, virtuals=()
self._specs[dependency_node], deptypes=(type,), virtuals=()
)
else:
edges[0].update_deptypes(depflag=depflag)
edges[0].update_deptypes(deptypes=(type,))
def virtual_on_edge(self, parent_node, provider_node, virtual):
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
@@ -2735,8 +2744,9 @@ def reorder_flags(self):
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
def deprecated(self, node: NodeArgument, version: str) -> None:
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
def deprecated(self, pkg, version):
msg = 'using "{0}@{1}" which is a deprecated version'
tty.warn(msg.format(pkg, version))
@staticmethod
def sort_fn(function_tuple):

View File

@@ -113,7 +113,6 @@ unification_set(SetID, VirtualNode)
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
multiple_nodes_attribute("node_flag_source").
multiple_nodes_attribute("depends_on").
multiple_nodes_attribute("virtual_on_edge").
% Map constraint on the literal ID to facts on the node
attr(Name, node(min_dupe_id, A1)) :- literal(LiteralID, Name, A1), solve_literal(LiteralID).
@@ -368,18 +367,6 @@ attr("node_flag_source", node(X, A1), A2, node(Y, A3))
:- impose(ID, node(X, A1)),
imposed_constraint(ID, "depends_on", A1, A2, A3).
% Reconstruct virtual dependencies for reused specs
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual)
:- impose(ID, node(X, A1)),
depends_on(node(X, A1), node(Y, A2)),
imposed_constraint(ID, "virtual_on_edge", A1, A2, Virtual),
not build(node(X, A1)).
virtual_condition_holds(node(Y, A2), Virtual)
:- impose(ID, node(X, A1)),
attr("virtual_on_edge", node(X, A1), node(Y, A2), Virtual),
not build(node(X, A1)).
% we cannot have additional variant values when we are working with concrete specs
:- attr("node", node(ID, Package)),
attr("hash", node(ID, Package), Hash),

View File

@@ -3,11 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set
from typing import List, Set, Tuple
import spack.deptypes as dt
import spack.dependency
import spack.package_base
import spack.repo
PossibleDependencies = Set[str]
@@ -24,11 +23,11 @@ class Counter:
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self.link_run_types = ("link", "run")
self.all_types = ("link", "run", "build")
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
@@ -60,7 +59,7 @@ def _compute_cache_values(self):
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
)
self._possible_dependencies = set(result)
@@ -90,17 +89,17 @@ def __init__(self, specs, tests):
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
self._direct_build.update(current)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)

View File

@@ -4,7 +4,7 @@
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Heuristic to speed-up solves (node with ID 0)
% Heuristic to speed-up solves
%=============================================================================
@@ -27,3 +27,18 @@
% Providers
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
% node(ID, _)
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)). [25-5*ID, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)). [25-5*ID, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)). [25-5*ID, true]
% node(ID, _), split build dependencies
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]

View File

@@ -1,24 +0,0 @@
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Heuristic to speed-up solves (node with ID > 0)
%=============================================================================
% node(ID, _)
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
% node(ID, _), split build dependencies
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]

View File

@@ -56,7 +56,7 @@
import os
import re
import warnings
from typing import Callable, List, Optional, Tuple, Union
from typing import List, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.lang as lang
@@ -67,7 +67,6 @@
import spack.compilers
import spack.config
import spack.dependency as dp
import spack.deptypes as dt
import spack.error
import spack.hash_types as ht
import spack.paths
@@ -113,49 +112,50 @@
"UnsatisfiableDependencySpecError",
"AmbiguousHashError",
"InvalidHashError",
"RedundantSpecError",
"SpecDeprecatedError",
]
#: Valid pattern for an identifier in Spack
IDENTIFIER_RE = r"\w[\w-]*"
identifier_re = r"\w[\w-]*"
COMPILER_COLOR = "@g" #: color for highlighting compilers
VERSION_COLOR = "@c" #: color for highlighting versions
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
HASH_COLOR = "@K" #: color for highlighting package hashes
compiler_color = "@g" #: color for highlighting compilers
version_color = "@c" #: color for highlighting versions
architecture_color = "@m" #: color for highlighting architectures
enabled_variant_color = "@B" #: color for highlighting enabled variants
disabled_variant_color = "r" #: color for highlighting disabled varaints
dependency_color = "@." #: color for highlighting dependencies
hash_color = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
COLOR_FORMATS = {
"%": COMPILER_COLOR,
"@": VERSION_COLOR,
"=": ARCHITECTURE_COLOR,
"+": ENABLED_VARIANT_COLOR,
"~": DISABLED_VARIANT_COLOR,
"^": DEPENDENCY_COLOR,
"#": HASH_COLOR,
color_formats = {
"%": compiler_color,
"@": version_color,
"=": architecture_color,
"+": enabled_variant_color,
"~": disabled_variant_color,
"^": dependency_color,
"#": hash_color,
}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``COLOR_FORMATS.keys()``.
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
#: ``color_formats.keys()``.
_separators = "[\\%s]" % "\\".join(color_formats.keys())
#: Default format for Spec.format(). This format can be round-tripped, so that:
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
default_format = (
"{name}{@versions}"
"{%compiler.name}{@compiler.versions}{compiler_flags}"
"{variants}{arch=architecture}{/abstract_hash}"
)
#: Display format, which eliminates extra `@=` in the output, for readability.
DISPLAY_FORMAT = (
display_format = (
"{name}{@version}"
"{%compiler.name}{@compiler.version}{compiler_flags}"
"{variants}{arch=architecture}{/abstract_hash}"
@@ -175,12 +175,9 @@
SPECFILE_FORMAT_VERSION = 4
# InstallStatus is used to map install statuses to symbols for display
# Options are artificially disjoint for dispay purposes
class InstallStatus(enum.Enum):
"""Maps install statuses to symbols for display.
Options are artificially disjoint for display purposes
"""
installed = "@g{[+]} "
upstream = "@g{[^]} "
external = "@g{[e]} "
@@ -190,7 +187,7 @@ class InstallStatus(enum.Enum):
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
COLOR_FORMATS."""
color_formats."""
class insert_color:
def __init__(self):
@@ -203,9 +200,9 @@ def __call__(self, match):
return clr.cescape(sep)
self.last = sep
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
return "%s%s" % (color_formats[sep], clr.cescape(sep))
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
@lang.lazy_lexicographic_ordering
@@ -728,54 +725,81 @@ class DependencySpec:
Args:
parent: starting node of the edge
spec: ending node of the edge.
depflag: represents dependency relationships.
deptypes: list of strings, representing dependency relationships.
virtuals: virtual packages provided from child to parent node.
"""
__slots__ = "parent", "spec", "depflag", "virtuals"
__slots__ = "parent", "spec", "parameters"
def __init__(
self, parent: "Spec", spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
self,
parent: "Spec",
spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
):
self.parent = parent
self.spec = spec
self.depflag = depflag
self.virtuals = virtuals
self.parameters = {
"deptypes": dp.canonical_deptype(deptypes),
"virtuals": tuple(sorted(set(virtuals))),
}
def update_deptypes(self, depflag: dt.DepFlag) -> bool:
"""Update the current dependency types"""
old = self.depflag
new = depflag | old
if new == old:
@property
def deptypes(self) -> Tuple[str, ...]:
return self.parameters["deptypes"]
@property
def virtuals(self) -> Tuple[str, ...]:
return self.parameters["virtuals"]
def _update_edge_multivalued_property(
self, property_name: str, value: Tuple[str, ...]
) -> bool:
current = self.parameters[property_name]
update = set(current) | set(value)
update = tuple(sorted(update))
changed = current != update
if not changed:
return False
self.depflag = new
self.parameters[property_name] = update
return True
def update_deptypes(self, deptypes: Tuple[str, ...]) -> bool:
"""Update the current dependency types"""
return self._update_edge_multivalued_property("deptypes", deptypes)
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
"""Update the list of provided virtuals"""
old = self.virtuals
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
return old != self.virtuals
return self._update_edge_multivalued_property("virtuals", virtuals)
def copy(self) -> "DependencySpec":
"""Return a copy of this edge"""
return DependencySpec(self.parent, self.spec, depflag=self.depflag, virtuals=self.virtuals)
return DependencySpec(
self.parent, self.spec, deptypes=self.deptypes, virtuals=self.virtuals
)
def _cmp_iter(self):
yield self.parent.name if self.parent else None
yield self.spec.name if self.spec else None
yield self.depflag
yield self.deptypes
yield self.virtuals
def __str__(self) -> str:
parent = self.parent.name if self.parent else None
child = self.spec.name if self.spec else None
return f"{parent} {self.depflag}[virtuals={','.join(self.virtuals)}] --> {child}"
return f"{parent} {self.deptypes}[virtuals={','.join(self.virtuals)}] --> {child}"
def canonical(self) -> Tuple[str, str, Tuple[str, ...], Tuple[str, ...]]:
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes, self.virtuals
def flip(self) -> "DependencySpec":
"""Flip the dependency, and drop virtual information"""
return DependencySpec(
parent=self.spec, spec=self.parent, depflag=self.depflag, virtuals=()
parent=self.spec, spec=self.parent, deptypes=self.deptypes, virtuals=()
)
@@ -920,8 +944,9 @@ def __str__(self):
)
def _sort_by_dep_types(dspec: DependencySpec):
return dspec.depflag
def _sort_by_dep_types(dspec):
# Use negation since False < True for sorting
return tuple(t not in dspec.deptypes for t in ("link", "run", "build", "test"))
#: Enum for edge directions
@@ -987,7 +1012,7 @@ def copy(self):
return clone
def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
def select(self, parent=None, child=None, deptypes=dp.all_deptypes):
"""Select a list of edges and return them.
If an edge:
@@ -995,18 +1020,18 @@ def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
- Matches the parent and/or child name, if passed
then it is selected.
The deptypes argument needs to be a flag, since the method won't
The deptypes argument needs to be canonical, since the method won't
convert it for performance reason.
Args:
parent (str): name of the parent package
child (str): name of the child package
depflag: allowed dependency types in flag form
deptypes (tuple): allowed dependency types in canonical form
Returns:
List of DependencySpec objects
"""
if not depflag:
if not deptypes:
return []
# Start from all the edges we store
@@ -1021,7 +1046,12 @@ def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
selected = (d for d in selected if d.spec.name == child)
# Filter by allowed dependency types
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
if deptypes:
selected = (
dep
for dep in selected
if not dep.deptypes or any(d in deptypes for d in dep.deptypes)
)
return list(selected)
@@ -1441,49 +1471,47 @@ def _get_dependency(self, name):
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
def edges_from_dependents(self, name=None, depflag: dt.DepFlag = dt.ALL):
def edges_from_dependents(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to parents.
Args:
name (str): filter dependents by package name
depflag: allowed dependency types
deptype (str or tuple): allowed dependency types
"""
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
deptype = dp.canonical_deptype(deptype)
return [d for d in self._dependents.select(parent=name, deptypes=deptype)]
def edges_to_dependencies(self, name=None, depflag: dt.DepFlag = dt.ALL):
def edges_to_dependencies(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to children.
Args:
name (str): filter dependencies by package name
depflag: allowed dependency types
deptype (str or tuple): allowed dependency types
"""
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
deptype = dp.canonical_deptype(deptype)
return [d for d in self._dependencies.select(child=name, deptypes=deptype)]
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
def dependencies(self, name=None, deptype="all"):
"""Return a list of direct dependencies (nodes in the DAG).
Args:
name (str): filter dependencies by package name
deptype: allowed dependency types
deptype (str or tuple): allowed dependency types
"""
if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
def dependents(self, name=None, deptype="all"):
"""Return a list of direct dependents (nodes in the DAG).
Args:
name (str): filter dependents by package name
deptype: allowed dependency types
deptype (str or tuple): allowed dependency types
"""
if not isinstance(deptype, dt.DepFlag):
deptype = dt.canonicalize(deptype)
return [d.parent for d in self.edges_from_dependents(name, depflag=deptype)]
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
def _dependencies_dict(self, deptype="all"):
"""Return a dictionary, keyed by package name, of the direct
dependencies.
@@ -1492,9 +1520,10 @@ def _dependencies_dict(self, depflag: dt.DepFlag = dt.ALL):
Args:
deptype: allowed dependency types
"""
_sort_fn = lambda x: (x.spec.name, _sort_by_dep_types(x))
_sort_fn = lambda x: (x.spec.name,) + _sort_by_dep_types(x)
_group_fn = lambda x: x.spec.name
selected_edges = self._dependencies.select(depflag=depflag)
deptype = dp.canonical_deptype(deptype)
selected_edges = self._dependencies.select(deptypes=deptype)
result = {}
for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn):
result[key] = list(group)
@@ -1590,17 +1619,19 @@ def _set_compiler(self, compiler):
)
self.compiler = compiler
def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]):
def _add_dependency(
self, spec: "Spec", *, deptypes: dp.DependencyArgument, virtuals: Tuple[str, ...]
):
"""Called by the parser to add another spec as a dependency."""
if spec.name not in self._dependencies or not spec.name:
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
self.add_dependency_edge(spec, deptypes=deptypes, virtuals=virtuals)
return
# Keep the intersection of constraints when a dependency is added
# multiple times. Currently, we only allow identical edge types.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
except StopIteration:
raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
@@ -1612,7 +1643,11 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
)
def add_dependency_edge(
self, dependency_spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[str, ...]
self,
dependency_spec: "Spec",
*,
deptypes: dp.DependencyArgument,
virtuals: Tuple[str, ...],
):
"""Add a dependency edge to this spec.
@@ -1621,17 +1656,19 @@ def add_dependency_edge(
deptypes: dependency types for this edge
virtuals: virtuals provided by this edge
"""
deptypes = dp.canonical_deptype(deptypes)
# Check if we need to update edges that are already present
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
has_errors, details = False, []
msg = f"cannot update the edge from {edge.parent.name} to {edge.spec.name}"
if edge.depflag & depflag:
if any(d in edge.deptypes for d in deptypes):
has_errors = True
details.append(
(
f"{edge.parent.name} has already an edge matching any"
f" of these types {depflag}"
f" of these types {str(deptypes)}"
)
)
@@ -1640,7 +1677,7 @@ def add_dependency_edge(
details.append(
(
f"{edge.parent.name} has already an edge matching any"
f" of these virtuals {virtuals}"
f" of these virtuals {str(virtuals)}"
)
)
@@ -1652,11 +1689,11 @@ def add_dependency_edge(
# If we are here, it means the edge object was previously added to
# both the parent and the child. When we update this object they'll
# both see the deptype modification.
edge.update_deptypes(depflag=depflag)
edge.update_deptypes(deptypes=deptypes)
edge.update_virtuals(virtuals=virtuals)
return
edge = DependencySpec(self, dependency_spec, depflag=depflag, virtuals=virtuals)
edge = DependencySpec(self, dependency_spec, deptypes=deptypes, virtuals=virtuals)
self._dependencies.add(edge)
dependency_spec._dependents.add(edge)
@@ -1923,12 +1960,12 @@ def lookup_hash(self):
# Get dependencies that need to be replaced
for node in self.traverse(root=False):
if node.abstract_hash:
spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
# reattach nodes that were not otherwise satisfied by new dependencies
for node in self.traverse(root=False):
if not any(n.satisfies(node) for n in spec.traverse()):
spec._add_dependency(node.copy(), depflag=0, virtuals=())
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
return spec
@@ -2054,7 +2091,7 @@ def to_node_dict(self, hash=ht.dag_hash):
d["package_hash"] = package_hash
# Note: Relies on sorting dict by keys later in algorithm.
deps = self._dependencies_dict(depflag=hash.depflag)
deps = self._dependencies_dict(deptype=hash.deptype)
if deps:
deps_list = []
for name, edges_for_name in sorted(deps.items()):
@@ -2064,10 +2101,7 @@ def to_node_dict(self, hash=ht.dag_hash):
parameters_tuple = (
"parameters",
syaml.syaml_dict(
(
("deptypes", dt.flag_to_tuple(dspec.depflag)),
("virtuals", dspec.virtuals),
)
(key, dspec.parameters[key]) for key in sorted(dspec.parameters)
),
)
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
@@ -2165,7 +2199,7 @@ def to_dict(self, hash=ht.dag_hash):
"""
node_list = [] # Using a list to preserve preorder traversal for hash.
hash_set = set()
for s in self.traverse(order="pre", deptype=hash.depflag):
for s in self.traverse(order="pre", deptype=hash.deptype):
spec_hash = s._cached_hash(hash)
if spec_hash not in hash_set:
@@ -2349,12 +2383,13 @@ def spec_builder(d):
if dep_like is None:
return spec
def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
def name_and_dependency_types(s):
"""Given a key in the dictionary containing the literal,
extracts the name of the spec and its dependency types.
Args:
s: key in the dictionary containing the literal
s (str): key in the dictionary containing the literal
"""
t = s.split(":")
@@ -2362,37 +2397,39 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
msg = 'more than one ":" separator in key "{0}"'
raise KeyError(msg.format(s))
name = t[0]
n = t[0]
if len(t) == 2:
depflag = dt.flag_from_strings(dep_str.strip() for dep_str in t[1].split(","))
dtypes = tuple(dt.strip() for dt in t[1].split(","))
else:
depflag = 0
return name, depflag
dtypes = ()
def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]]
) -> Tuple[Spec, dt.DepFlag]:
return n, dtypes
def spec_and_dependency_types(s):
"""Given a non-string key in the literal, extracts the spec
and its dependency types.
Args:
s: either a Spec object, or a tuple of Spec and string of dependency types
s (spec or tuple): either a Spec object or a tuple
composed of a Spec object and a string with the
dependency types
"""
if isinstance(s, Spec):
return s, 0
return s, ()
spec_obj, dtypes = s
return spec_obj, dt.flag_from_strings(dt.strip() for dt in dtypes.split(","))
return spec_obj, tuple(dt.strip() for dt in dtypes.split(","))
# Recurse on dependencies
for s, s_dependencies in dep_like.items():
if isinstance(s, str):
dag_node, dep_flag = name_and_dependency_types(s)
dag_node, dependency_types = name_and_dependency_types(s)
else:
dag_node, dep_flag = spec_and_dependency_types(s)
dag_node, dependency_types = spec_and_dependency_types(s)
dependency_spec = spec_builder({dag_node: s_dependencies})
spec._add_dependency(dependency_spec, depflag=dep_flag, virtuals=())
spec._add_dependency(dependency_spec, deptypes=dependency_types, virtuals=())
return spec
@@ -2565,7 +2602,7 @@ def _replace_with(self, concrete):
virtuals = (self.name,)
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
dependent = dep_spec.parent
depflag = dep_spec.depflag
deptypes = dep_spec.deptypes
# remove self from all dependents, unless it is already removed
if self.name in dependent._dependencies:
@@ -2573,7 +2610,7 @@ def _replace_with(self, concrete):
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, depflag=depflag, virtuals=virtuals)
dependent._add_dependency(concrete, deptypes=deptypes, virtuals=virtuals)
else:
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
virtuals=virtuals
@@ -3135,7 +3172,7 @@ def _evaluate_dependency_conditions(self, name):
for when_spec, dependency in conditions.items():
if self.satisfies(when_spec):
if dep is None:
dep = dp.Dependency(self.name, Spec(name), depflag=0)
dep = dp.Dependency(self.name, Spec(name), type=())
try:
dep.merge(dependency)
except spack.error.UnsatisfiableSpecError as e:
@@ -3279,7 +3316,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
# Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, depflag=dependency.depflag, virtuals=virtuals)
self._add_dependency(spec_dependency, deptypes=dependency.type, virtuals=virtuals)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
@@ -3320,7 +3357,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index, tests):
or (tests and self.name in tests)
or
# this is not a test-only dependency
(dep.depflag & ~dt.TEST)
dep.type - set(["test"])
)
if merge:
@@ -3614,7 +3651,9 @@ def _constrain_dependencies(self, other):
# WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name"
edges_from_name = self._dependencies[name]
changed |= edges_from_name[0].update_deptypes(other._dependencies[name][0].depflag)
changed |= edges_from_name[0].update_deptypes(
other._dependencies[name][0].deptypes
)
changed |= edges_from_name[0].update_virtuals(
other._dependencies[name][0].virtuals
)
@@ -3626,7 +3665,7 @@ def _constrain_dependencies(self, other):
dep_spec_copy = other._get_dependency(name)
self._add_dependency(
dep_spec_copy.spec.copy(),
depflag=dep_spec_copy.depflag,
deptypes=dep_spec_copy.deptypes,
virtuals=dep_spec_copy.virtuals,
)
changed = True
@@ -3901,7 +3940,7 @@ def patches(self):
return self._patches
def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, cleardeps=True):
def _dup(self, other, deps=True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default
copies dependencies.
@@ -3910,8 +3949,9 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
Args:
other (Spec): spec to be copied onto ``self``
deps: if True copies all the dependencies. If
False copies None. If deptype/depflag, copy matching types.
deps (bool or Sequence): if True copies all the dependencies. If
False copies None. If a sequence of dependency types copy
only those types.
cleardeps (bool): if True clears the dependencies of ``self``,
before possibly copying the dependencies of ``other`` onto
``self``
@@ -3971,10 +4011,10 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
if deps:
# If caller restricted deptypes to be copied, adjust that here.
# By default, just copy all deptypes
depflag = dt.ALL
if isinstance(deps, (tuple, list, str)):
depflag = dt.canonicalize(deps)
self._dup_deps(other, depflag)
deptypes = dp.all_deptypes
if isinstance(deps, (tuple, list)):
deptypes = deps
self._dup_deps(other, deptypes)
self._concrete = other._concrete
@@ -3995,13 +4035,13 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
return changed
def _dup_deps(self, other, depflag: dt.DepFlag):
def _dup_deps(self, other, deptypes):
def spid(spec):
return id(spec)
new_specs = {spid(other): self}
for edge in other.traverse_edges(cover="edges", root=False):
if edge.depflag and not depflag & edge.depflag:
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
continue
if spid(edge.parent) not in new_specs:
@@ -4011,16 +4051,17 @@ def spid(spec):
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], depflag=edge.depflag, virtuals=edge.virtuals
new_specs[spid(edge.spec)], deptypes=edge.deptypes, virtuals=edge.virtuals
)
def copy(self, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, **kwargs):
def copy(self, deps=True, **kwargs):
"""Make a copy of this spec.
Args:
deps: Defaults to True. If boolean, controls
deps (bool or tuple): Defaults to True. If boolean, controls
whether dependencies are copied (copied if True). If a
DepTypes or DepFlag is provided, *only* matching dependencies are copied.
tuple is provided, *only* dependencies of types matching
those in the tuple are copied.
kwargs: additional arguments for internal use (passed to ``_dup``).
Returns:
@@ -4080,7 +4121,7 @@ def __getitem__(self, name):
# only when we don't find the package do we consider the full DAG.
order = lambda: itertools.chain(
self.traverse(deptype="link"),
self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
self.dependencies(deptype=("build", "run", "test")),
self.traverse(), # fall back to a full search
)
@@ -4138,7 +4179,7 @@ def eq_dag(self, other, deptypes=True, vs=None, vo=None):
for s_dspec, o_dspec in zip(
itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted)
):
if deptypes and s_dspec.depflag != o_dspec.depflag:
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
return False
s, o = s_dspec.spec, o_dspec.spec
@@ -4196,7 +4237,7 @@ def _cmp_iter(self):
def deps():
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
yield dep.spec.name
yield dep.depflag
yield tuple(sorted(dep.deptypes))
yield hash(dep.spec)
yield deps
@@ -4204,7 +4245,7 @@ def deps():
def colorized(self):
return colorize_spec(self)
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
def format(self, format_string=default_format, **kwargs):
r"""Prints out particular pieces of a spec, depending on what is
in the format string.
@@ -4266,6 +4307,19 @@ def format(self, format_string=DEFAULT_FORMAT, **kwargs):
``\{`` and ``\}`` for literal braces, and ``\\`` for the
literal ``\`` character.
The ``?`` sigil may be used to conditionally add a
value. Conditional format values are used if constructing the
value would not throw any error, and are ignored if it would
throw an error. For example, ``{?^mpi.name}`` will print
``Spec["mpi"].name`` if such a node exists, and otherwise
prints nothing.
The ``?`` sigil may also be combined with a conditional
separator. This separator is prepended if anything is printed
for the conditional attribute. The syntax for this is
``?sep?attribute``,
e.g. ``{name}-{version}{?/?^mpi.name}{?-?^mpi.version}``.
Args:
format_string (str): string containing the format to be expanded
@@ -4283,12 +4337,21 @@ def format(self, format_string=DEFAULT_FORMAT, **kwargs):
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
f = COLOR_FORMATS[c] + f + "@."
f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
def write_attribute(spec, attribute, color):
attribute = attribute.lower()
conditional = False
conditional_sep = ""
matches_conditional_sep = re.match(r"^\?(.*)\?", attribute)
if matches_conditional_sep:
conditional = True
conditional_sep = matches_conditional_sep.group(1)
if attribute.startswith("?"):
conditional = True
sig = ""
if attribute.startswith(("@", "%", "/")):
# color sigils that are inside braces
@@ -4320,6 +4383,9 @@ def write_attribute(spec, attribute, color):
elif sig == " arch=" and attribute not in ("architecture", "arch"):
raise SpecFormatSigilError(sig, "the architecture", attribute)
# Now that we're done testing sig, combine it with conditional sep
sig = conditional_sep + sig
# find the morph function for our attribute
morph = transform.get(attribute, lambda s, x: x)
@@ -4349,7 +4415,12 @@ def write_attribute(spec, attribute, color):
else:
if isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
try:
current = current[part]
except KeyError:
if conditional:
return
raise
else:
# aliases
if part == "arch":
@@ -4365,6 +4436,8 @@ def write_attribute(spec, attribute, color):
try:
current = getattr(current, part)
except AttributeError:
if conditional:
return
parent = ".".join(parts[:idx])
m = "Attempted to format attribute %s." % attribute
m += "Spec %s has no attribute %s" % (parent, part)
@@ -4469,50 +4542,28 @@ def _installed_explicitly(self):
except KeyError:
return None
def tree(
self,
*,
color: Optional[bool] = None,
depth: bool = False,
hashes: bool = False,
hashlen: Optional[int] = None,
cover: str = "nodes",
indent: int = 0,
format: str = DEFAULT_FORMAT,
deptypes: Union[Tuple[str, ...], str] = "all",
show_types: bool = False,
depth_first: bool = False,
recurse_dependencies: bool = True,
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
prefix: Optional[Callable[["Spec"], str]] = None,
) -> str:
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation.
Status function may either output a boolean or an InstallStatus
Args:
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
use the default from llnl.tty.color
depth: print the depth from the root
hashes: if True, print the hash of each node
hashlen: length of the hash to be printed
cover: either "nodes" or "edges"
indent: extra indentation for the tree being printed
format: format to be used to print each node
deptypes: dependency types to be represented in the tree
show_types: if True, show the (merged) dependency type of a node
depth_first: if True, traverse the DAG depth first when representing it as a tree
recurse_dependencies: if True, recurse on dependencies
status_fn: optional callable that takes a node as an argument and return its
installation status
prefix: optional callable that takes a node as an argument and return its
installation prefix
"""
out = ""
color = kwargs.pop("color", clr.get_color_when())
depth = kwargs.pop("depth", False)
hashes = kwargs.pop("hashes", False)
hlen = kwargs.pop("hashlen", None)
status_fn = kwargs.pop("status_fn", False)
cover = kwargs.pop("cover", "nodes")
indent = kwargs.pop("indent", 0)
fmt = kwargs.pop("format", default_format)
prefix = kwargs.pop("prefix", None)
show_types = kwargs.pop("show_types", False)
deptypes = kwargs.pop("deptypes", "all")
recurse_dependencies = kwargs.pop("recurse_dependencies", True)
depth_first = kwargs.pop("depth_first", False)
lang.check_kwargs(kwargs, self.tree)
if color is None:
color = clr.get_color_when()
out = ""
for d, dep_spec in traverse.traverse_tree(
[self], cover=cover, deptype=deptypes, depth_first=depth_first
@@ -4536,27 +4587,25 @@ def tree(
out += clr.colorize("@r{[-]} ", color=color)
if hashes:
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
if show_types:
if cover == "nodes":
# when only covering nodes, we merge dependency types
# from all dependents before showing them.
depflag = 0
for ds in node.edges_from_dependents():
depflag |= ds.depflag
types = [ds.deptypes for ds in node.edges_from_dependents()]
else:
# when covering edges or paths, we show dependency
# types only for the edge through which we visited
depflag = dep_spec.depflag
types = [dep_spec.deptypes]
type_chars = dt.flag_to_chars(depflag)
type_chars = dp.deptype_chars(*types)
out += "[%s] " % type_chars
out += " " * d
if d > 0:
out += "^"
out += node.format(format, color=color) + "\n"
out += node.format(fmt, color=color) + "\n"
# Check if we wanted just the first line
if not recurse_dependencies:
@@ -4712,14 +4761,14 @@ def from_self(name, transitive):
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
nodes[name].add_dependency_edge(
nodes[dep_name], depflag=edge.depflag, virtuals=edge.virtuals
nodes[dep_name], deptypes=edge.deptypes, virtuals=edge.virtuals
)
if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
nodes[name].add_dependency_edge(
nodes[edge.spec.name], depflag=edge.depflag, virtuals=edge.virtuals
nodes[edge.spec.name], deptypes=edge.deptypes, virtuals=edge.virtuals
)
if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
@@ -4810,9 +4859,8 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
# Update with additional constraints from other spec
for name in current_spec_constraint.direct_dep_difference(merged_spec):
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
merged_spec._add_dependency(
edge.spec.copy(), depflag=edge.depflag, virtuals=edge.virtuals
edge.spec.copy(), deptypes=edge.deptypes, virtuals=edge.virtuals
)
return merged_spec
@@ -4959,11 +5007,9 @@ def _load(cls, data):
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
node_spec = node["node_spec"]
for _, dhash, dtype, _, virtuals in cls.dependencies_from_node_dict(node):
for _, dhash, dtypes, _, virtuals in cls.dependencies_from_node_dict(node):
node_spec._add_dependency(
hash_dict[dhash]["node_spec"],
depflag=dt.canonicalize(dtype),
virtuals=virtuals,
hash_dict[dhash]["node_spec"], deptypes=dtypes, virtuals=virtuals
)
if "build_spec" in node.keys():
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
@@ -4999,9 +5045,7 @@ def load(cls, data):
# get dependency dict from the node.
name, data = cls.name_and_data(node)
for dname, _, dtypes, _, virtuals in cls.dependencies_from_node_dict(data):
deps[name]._add_dependency(
deps[dname], depflag=dt.canonicalize(dtypes), virtuals=virtuals
)
deps[name]._add_dependency(deps[dname], deptypes=dtypes, virtuals=virtuals)
reconstruct_virtuals_on_edges(result)
return result
@@ -5319,6 +5363,14 @@ class NoSuchSpecFileError(SpecFilenameError):
"""Raised when a spec file doesn't exist."""
class RedundantSpecError(spack.error.SpecError):
def __init__(self, spec, addition):
super().__init__(
"Attempting to add %s to spec %s which is already concrete."
" This is likely the result of adding to a spec specified by hash." % (addition, spec)
)
class SpecFormatStringError(spack.error.SpecError):
"""Called for errors in Spec format strings."""

View File

@@ -97,10 +97,8 @@ def remove(self, spec):
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
# Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
assert len(remove) == 1
self.yaml_list.remove(remove[0])
# invalidate cache variables when we change the list
self._expanded_list = None

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import sys
import pytest
@@ -224,7 +223,6 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
(["21.11", "21.9"], None, False),
],
)
@pytest.mark.skipif(sys.platform == "win32", reason="Cray does not use windows")
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
ex_path = str(tmpdir.join("fake_craype_dir"))
fs.mkdirp(ex_path)

View File

@@ -16,9 +16,8 @@
import spack.package_base
import spack.spec
import spack.util.spack_yaml as syaml
from spack.build_environment import _static_to_shared_library, dso_suffix
from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
from spack.paths import build_env_path
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
from spack.util.path import Path, convert_to_platform_path
@@ -443,7 +442,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
spack.build_environment.set_module_variables_for_package(s["b"].package)
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
parallel=s["b"].package.parallel
s["b"].package.parallel
)
@@ -475,62 +474,28 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
def test_build_jobs_sequential_is_sequential():
assert (
determine_number_of_jobs(
parallel=False,
max_cpus=8,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
),
)
== 1
determine_number_of_jobs(parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
)
def test_build_jobs_command_line_overrides():
assert (
determine_number_of_jobs(
parallel=True,
max_cpus=1,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
),
)
determine_number_of_jobs(parallel=True, command_line=10, config_default=1, max_cpus=1)
== 10
)
assert (
determine_number_of_jobs(
parallel=True,
max_cpus=100,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
),
)
determine_number_of_jobs(parallel=True, command_line=10, config_default=100, max_cpus=100)
== 10
)
def test_build_jobs_defaults():
assert (
determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
),
)
determine_number_of_jobs(parallel=True, command_line=None, config_default=1, max_cpus=10)
== 1
)
assert (
determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
),
)
determine_number_of_jobs(parallel=True, command_line=None, config_default=100, max_cpus=10)
== 10
)

View File

@@ -5,7 +5,6 @@
import pytest
import spack.deptypes as dt
import spack.installer as inst
import spack.repo
import spack.spec
@@ -60,10 +59,10 @@ def test_build_request_strings(install_mockery):
@pytest.mark.parametrize(
"package_cache_only,dependencies_cache_only,package_deptypes,dependencies_deptypes",
[
(False, False, dt.BUILD | dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(True, False, dt.LINK | dt.RUN, dt.BUILD | dt.LINK | dt.RUN),
(False, True, dt.BUILD | dt.LINK | dt.RUN, dt.LINK | dt.RUN),
(True, True, dt.LINK | dt.RUN, dt.LINK | dt.RUN),
(False, False, ["build", "link", "run"], ["build", "link", "run"]),
(True, False, ["link", "run"], ["build", "link", "run"]),
(False, True, ["build", "link", "run"], ["link", "run"]),
(True, True, ["link", "run"], ["link", "run"]),
],
)
def test_build_request_deptypes(
@@ -83,8 +82,8 @@ def test_build_request_deptypes(
},
)
actual_package_deptypes = build_request.get_depflags(s.package)
actual_dependency_deptypes = build_request.get_depflags(s["dependency-install"].package)
actual_package_deptypes = build_request.get_deptypes(s.package)
actual_dependency_deptypes = build_request.get_deptypes(s["dependency-install"].package)
assert actual_package_deptypes == package_deptypes
assert actual_dependency_deptypes == dependencies_deptypes
assert sorted(actual_package_deptypes) == package_deptypes
assert sorted(actual_dependency_deptypes) == dependencies_deptypes

View File

@@ -31,16 +31,13 @@ def test_fetch_missing_cache(tmpdir, _fetch_method):
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_fetch(tmpdir, _fetch_method):
"""Ensure a fetch after expanding is effectively a no-op."""
cache_dir = tmpdir.join("cache")
stage_dir = tmpdir.join("stage")
mkdirp(cache_dir)
mkdirp(stage_dir)
cache = os.path.join(cache_dir, "cache.tar.gz")
testpath = str(tmpdir)
cache = os.path.join(testpath, "cache.tar.gz")
touch(cache)
url = url_util.path_to_file_url(cache)
with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=str(stage_dir)) as stage:
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
mkdirp(source_path)
fetcher.fetch()

View File

@@ -42,36 +42,45 @@ def define_plat_exe(exe):
return exe
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
def test_find_external_single_package(mock_executable):
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
search_dir = cmake_path.parent.parent
executables_found({str(cmake_path): define_plat_exe("cmake")})
specs_by_package = spack.detection.by_path(["cmake"], path_hints=[str(search_dir)])
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
assert len(specs_by_package) == 1 and "cmake" in specs_by_package
detected_spec = specs_by_package["cmake"]
assert len(detected_spec) == 1 and detected_spec[0].spec == Spec("cmake@1.foo")
pkg, entries = next(iter(pkg_to_entries.items()))
single_entry = next(iter(entries))
assert single_entry.spec == Spec("cmake@1.foo")
def test_find_external_two_instances_same_package(mock_executable, _platform_executables):
def test_find_external_two_instances_same_package(
mock_executable, executables_found, _platform_executables
):
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
# Each of these cmake instances is created in a different prefix
# In Windows, quoted strings are echo'd with quotes includes
# we need to avoid that for proper regex.
cmake1 = mock_executable("cmake", output="echo cmake version 1.foo", subdir=("base1", "bin"))
cmake2 = mock_executable("cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin"))
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
cmake_path1 = mock_executable(
"cmake", output="echo cmake version 1.foo", subdir=("base1", "bin")
)
cmake_path2 = mock_executable(
"cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
)
cmake_exe = define_plat_exe("cmake")
executables_found({str(cmake_path1): cmake_exe, str(cmake_path2): cmake_exe})
finder = spack.detection.path.ExecutablesFinder()
detected_specs = finder.find(pkg_name="cmake", initial_guess=search_paths)
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
assert len(detected_specs) == 2
spec_to_path = {e.spec: e.prefix for e in detected_specs}
pkg, entries = next(iter(pkg_to_entries.items()))
spec_to_path = dict((e.spec, e.prefix) for e in entries)
assert spec_to_path[Spec("cmake@1.foo")] == (
spack.detection.executable_prefix(str(cmake1.parent))
spack.detection.executable_prefix(os.path.dirname(cmake_path1))
)
assert spec_to_path[Spec("cmake@3.17.2")] == (
spack.detection.executable_prefix(str(cmake2.parent))
spack.detection.executable_prefix(os.path.dirname(cmake_path2))
)
@@ -103,6 +112,23 @@ def test_get_executables(working_env, mock_executable):
external = SpackCommand("external")
def test_find_external_cmd(mutable_config, working_env, mock_executable, _platform_executables):
"""Test invoking 'spack external find' with additional package arguments,
which restricts the set of packages that Spack looks for.
"""
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
prefix = os.path.dirname(os.path.dirname(cmake_path1))
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
external("find", "cmake")
pkgs_cfg = spack.config.get("packages")
cmake_cfg = pkgs_cfg["cmake"]
cmake_externals = cmake_cfg["externals"]
assert {"spec": "cmake@1.foo", "prefix": prefix} in cmake_externals
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
@@ -112,29 +138,50 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
external("find", "--not-buildable", "cmake")
pkgs_cfg = spack.config.get("packages")
assert "cmake" in pkgs_cfg
assert not pkgs_cfg["cmake"]["buildable"]
@pytest.mark.parametrize(
"names,tags,exclude,expected",
[
# find --all
(None, ["detectable"], [], ["find-externals1"]),
# find --all --exclude find-externals1
(None, ["detectable"], ["find-externals1"], []),
# find cmake (and cmake is not detectable)
(["cmake"], ["detectable"], [], []),
],
)
def test_package_selection(names, tags, exclude, expected, mutable_mock_repo):
"""Tests various cases of selecting packages"""
# In the mock repo we only have 'find-externals1' that is detectable
result = spack.cmd.external.packages_to_search_for(names=names, tags=tags, exclude=exclude)
assert set(result) == set(expected)
def test_find_external_cmd_full_repo(
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
):
"""Test invoking 'spack external find' with no additional arguments, which
iterates through each package in the repository.
"""
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
prefix = os.path.dirname(os.path.dirname(exe_path1))
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
external("find", "--all")
pkgs_cfg = spack.config.get("packages")
pkg_cfg = pkgs_cfg["find-externals1"]
pkg_externals = pkg_cfg["externals"]
assert {"spec": "find-externals1@1.foo", "prefix": prefix} in pkg_externals
def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_repo, monkeypatch):
def test_find_external_cmd_exclude(
mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
):
"""Test invoking 'spack external find --all --exclude', to ensure arbitary
external packages can be ignored.
"""
exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
external("find", "--all", "--exclude=find-externals1")
pkgs_cfg = spack.config.get("packages")
assert "find-externals1" not in pkgs_cfg.keys()
def test_find_external_no_manifest(
mutable_config,
working_env,
mock_executable,
mutable_mock_repo,
_platform_executables,
monkeypatch,
):
"""The user runs 'spack external find'; the default path for storing
manifest files does not exist. Ensure that the command does not
fail.
@@ -147,7 +194,13 @@ def test_find_external_no_manifest(mutable_config, working_env, mutable_mock_rep
def test_find_external_empty_default_manifest_dir(
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
mutable_config,
working_env,
mock_executable,
mutable_mock_repo,
_platform_executables,
tmpdir,
monkeypatch,
):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but is empty. Ensure that the command does not
@@ -162,7 +215,13 @@ def test_find_external_empty_default_manifest_dir(
@pytest.mark.not_on_windows("Can't chmod on Windows")
@pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_find_external_manifest_with_bad_permissions(
mutable_config, working_env, mutable_mock_repo, tmpdir, monkeypatch
mutable_config,
working_env,
mock_executable,
mutable_mock_repo,
_platform_executables,
tmpdir,
monkeypatch,
):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but with insufficient permissions. Check that
@@ -203,7 +262,12 @@ def fail():
def test_find_external_nonempty_default_manifest_dir(
mutable_database, mutable_mock_repo, tmpdir, monkeypatch, directory_with_manifest
mutable_database,
mutable_mock_repo,
_platform_executables,
tmpdir,
monkeypatch,
directory_with_manifest,
):
"""The user runs 'spack external find'; the default manifest directory
contains a manifest file. Ensure that the specs are read.
@@ -248,7 +312,6 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
assert external.returncode == 0
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
# Prepare an environment to detect a fake gcc
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
@@ -274,8 +337,11 @@ def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _pla
def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
# Prepare an environment to detect a fake gcc that
# override its external prefix
gcc_exe = mock_executable("gcc", output="echo 4.2.1")
search_dir = gcc_exe.parent
prefix = os.path.dirname(gcc_exe)
monkeypatch.setenv("PATH", prefix)
@classmethod
def _determine_variants(cls, exes, version_str):
@@ -284,17 +350,18 @@ def _determine_variants(cls, exes, version_str):
gcc_cls = spack.repo.PATH.get_pkg_class("gcc")
monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
finder = spack.detection.path.ExecutablesFinder()
detected_specs = finder.find(pkg_name="gcc", initial_guess=[str(search_dir)])
# Find the external spec
external("find", "gcc")
assert len(detected_specs) == 1
gcc = detected_specs[0].spec
assert gcc.name == "gcc"
assert gcc.external_path == os.path.sep + os.path.join("opt", "gcc", "bin")
# Check entries in 'packages.yaml'
packages_yaml = spack.config.get("packages")
assert "gcc" in packages_yaml
assert "externals" in packages_yaml["gcc"]
externals = packages_yaml["gcc"]["externals"]
assert len(externals) == 1
assert externals[0]["prefix"] == os.path.sep + os.path.join("opt", "gcc", "bin")
@pytest.mark.xfail(sys.platform == "win32", reason="https://github.com/spack/spack/pull/39850")
def test_new_entries_are_reported_correctly(
mock_executable, mutable_config, monkeypatch, _platform_executables
):

View File

@@ -16,17 +16,15 @@
import spack.compilers
import spack.concretize
import spack.config
import spack.deptypes as dt
import spack.detection
import spack.error
import spack.hash_types as ht
import spack.platforms
import spack.repo
import spack.solver.asp
import spack.variant as vt
from spack.concretize import find_spec
from spack.spec import CompilerSpec, Spec
from spack.version import Version, ver
from spack.version import ver
def check_spec(abstract, concrete):
@@ -236,13 +234,13 @@ def test_concretize_mention_build_dep(self):
# Check parent's perspective of child
to_dependencies = spec.edges_to_dependencies(name="cmake")
assert len(to_dependencies) == 1
assert to_dependencies[0].depflag == dt.BUILD
assert set(to_dependencies[0].deptypes) == set(["build"])
# Check child's perspective of parent
cmake = spec["cmake"]
from_dependents = cmake.edges_from_dependents(name="cmake-client")
assert len(from_dependents) == 1
assert from_dependents[0].depflag == dt.BUILD
assert set(from_dependents[0].deptypes) == set(["build"])
def test_concretize_preferred_version(self):
spec = check_concretize("python")
@@ -1600,6 +1598,8 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, monkeypatch):
)
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize(self, specs, expected):
import spack.solver.asp
specs = [Spec(s) for s in specs]
solver = spack.solver.asp.Solver()
solver.reuse = False
@@ -1643,6 +1643,8 @@ def test_best_effort_coconcretize(self, specs, expected):
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
"""Test package preferences during coconcretization."""
import spack.solver.asp
specs = [Spec(s) for s in specs]
solver = spack.solver.asp.Solver()
solver.reuse = False
@@ -1658,6 +1660,8 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_coconcretize_reuse_and_virtuals(self):
import spack.solver.asp
reusable_specs = []
for s in ["mpileaks ^mpich", "zmpi"]:
reusable_specs.extend(Spec(s).concretized().traverse(root=True))
@@ -1678,6 +1682,8 @@ def test_misleading_error_message_on_version(self, mutable_database):
# For this bug to be triggered we need a reusable dependency
# that is not optimal in terms of optimization scores.
# We pick an old version of "b"
import spack.solver.asp
reusable_specs = [Spec("non-existing-conditional-dep@1.0").concretized()]
root_spec = Spec("non-existing-conditional-dep@2.0")
@@ -1693,6 +1699,8 @@ def test_misleading_error_message_on_version(self, mutable_database):
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_version_weight_and_provenance(self):
"""Test package preferences during coconcretization."""
import spack.solver.asp
reusable_specs = [Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")]
root_spec = Spec("a foobar=bar")
@@ -1724,6 +1732,8 @@ def test_version_weight_and_provenance(self):
@pytest.mark.regression("31169")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_not_reusing_incompatible_os_or_compiler(self):
import spack.solver.asp
root_spec = Spec("b")
s = root_spec.concretized()
wrong_compiler, wrong_os = s.copy(), s.copy()
@@ -1944,7 +1954,7 @@ def test_external_python_extension_find_dependency_from_detection(self, monkeypa
def find_fake_python(classes, path_hints):
return {"python": [spack.detection.DetectedPackage(python_spec, prefix=path_hints[0])]}
monkeypatch.setattr(spack.detection, "by_path", find_fake_python)
monkeypatch.setattr(spack.detection, "by_executable", find_fake_python)
external_conf = {
"py-extension1": {
"buildable": False,
@@ -2089,29 +2099,6 @@ def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretiza
edges = spec.edges_to_dependencies(name="callpath")
assert len(edges) == 1 and edges[0].virtuals == ()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@pytest.mark.db
@pytest.mark.parametrize(
"spec_str,mpi_name",
[("mpileaks", "mpich"), ("mpileaks ^mpich2", "mpich2"), ("mpileaks ^zmpi", "zmpi")],
)
def test_virtuals_are_reconstructed_on_reuse(self, spec_str, mpi_name, database):
"""Tests that when we reuse a spec, virtual on edges are reconstructed correctly"""
with spack.config.override("concretizer:reuse", True):
spec = Spec(spec_str).concretized()
assert spec.installed
mpi_edges = spec.edges_to_dependencies(mpi_name)
assert len(mpi_edges) == 1
assert "mpi" in mpi_edges[0].virtuals
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_dont_define_new_version_from_input_if_checksum_required(self, working_env):
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
with pytest.raises(spack.error.UnsatisfiableSpecError):
# normally spack concretizes to @=3.0 if it's not defined in package.py, except
# when checksums are required
Spec("a@=3.0").concretized()
@pytest.fixture()
def duplicates_test_repository():
@@ -2206,39 +2193,3 @@ def test_solution_without_cycles(self):
s = Spec("cycle-b").concretized()
assert s["cycle-a"].satisfies("~cycle")
assert s["cycle-b"].satisfies("+cycle")
@pytest.mark.parametrize(
"v_str,v_opts,checksummed",
[
("1.2.3", {"sha256": f"{1:064x}"}, True),
# it's not about the version being "infinite",
# but whether it has a digest
("develop", {"sha256": f"{1:064x}"}, True),
# other hash types
("1.2.3", {"checksum": f"{1:064x}"}, True),
("1.2.3", {"md5": f"{1:032x}"}, True),
("1.2.3", {"sha1": f"{1:040x}"}, True),
("1.2.3", {"sha224": f"{1:056x}"}, True),
("1.2.3", {"sha384": f"{1:096x}"}, True),
("1.2.3", {"sha512": f"{1:0128x}"}, True),
# no digest key
("1.2.3", {"bogus": f"{1:064x}"}, False),
# git version with full commit sha
("1.2.3", {"commit": f"{1:040x}"}, True),
(f"{1:040x}=1.2.3", {}, True),
# git version with short commit sha
("1.2.3", {"commit": f"{1:07x}"}, False),
(f"{1:07x}=1.2.3", {}, False),
# git tag is a moving target
("1.2.3", {"tag": "v1.2.3"}, False),
("1.2.3", {"tag": "v1.2.3", "commit": f"{1:07x}"}, False),
# git branch is a moving target
("1.2.3", {"branch": "releases/1.2"}, False),
# git ref is a moving target
("git.branch=1.2.3", {}, False),
],
)
def test_drop_moving_targets(v_str, v_opts, checksummed):
v = Version(v_str)
assert spack.solver.asp._is_checksummed_version((v, v_opts)) == checksummed

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib
import pytest
@@ -300,14 +299,9 @@ def test_requirement_adds_version_satisfies(
assert s1.satisfies("@2.2")
@pytest.mark.parametrize("require_checksum", (True, False))
def test_requirement_adds_git_hash_version(
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
concretize_scope, test_repo, mock_git_version_info, monkeypatch
):
# A full commit sha is a checksummed version, so this test should pass in both cases
if require_checksum:
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr(
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False

View File

@@ -277,25 +277,6 @@ def test_add_config_path(mutable_config):
compilers = spack.config.get("packages")["all"]["compiler"]
assert "gcc" in compilers
# Try quotes to escape brackets
path = "config:install_tree:projections:cmake:\
'{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
spack.config.add(path)
set_value = spack.config.get("config")["install_tree"]["projections"]["cmake"]
assert set_value == "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# NOTE:
# The config path: "config:install_tree:root:<path>" is unique in that it can accept multiple
# schemas (such as a dropped "root" component) which is atypical and may lead to passing tests
# when the behavior is in reality incorrect.
# the config path below is such that no subkey accepts a string as a valid entry in our schema
# try quotes to escape colons
path = "config:build_stage:'C:\\path\\to\\config.yaml'"
spack.config.add(path)
set_value = spack.config.get("config")["build_stage"]
assert "C:\\path\\to\\config.yaml" in set_value
@pytest.mark.regression("17543,23259")
def test_add_config_path_with_enumerated_type(mutable_config):

View File

@@ -27,7 +27,7 @@
import llnl.util.lang
import llnl.util.lock
import llnl.util.tty as tty
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, touchp, working_dir
from llnl.util.filesystem import copy_tree, mkdirp, remove_linked_tree, working_dir
import spack.binary_distribution
import spack.caches
@@ -565,8 +565,6 @@ def mock_repo_path():
def _pkg_install_fn(pkg, spec, prefix):
# sanity_check_prefix requires something in the install directory
mkdirp(prefix.bin)
if not os.path.exists(spec.package.build_log_path):
touchp(spec.package.build_log_path)
@pytest.fixture

Some files were not shown because too many files have changed in this diff Show More