Compare commits

..

1 Commits

Author SHA1 Message Date
John Parent
b424388d6d gdal: remove non windows deps from windows build 2023-08-14 20:21:35 -04:00
586 changed files with 3790 additions and 9907 deletions

View File

@@ -22,7 +22,7 @@ jobs:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{inputs.python_version}}

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -179,7 +179,7 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap clingo
run: |
set -ex
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup repo
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh

View File

@@ -56,7 +56,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Set Container Tag Normal (Nightly)
run: |
@@ -95,7 +95,7 @@ jobs:
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -14,7 +14,7 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1

View File

@@ -47,7 +47,7 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -94,7 +94,7 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -133,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -152,7 +152,7 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -187,7 +187,7 @@ jobs:
matrix:
python-version: ["3.10"]
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2

View File

@@ -18,7 +18,7 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
@@ -35,7 +35,7 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
@@ -68,7 +68,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -15,7 +15,7 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -39,7 +39,7 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
@@ -63,7 +63,7 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
with:
fetch-depth: 0
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1

View File

@@ -2,26 +2,24 @@
## Supported Versions
We provide security updates for `develop` and for the last two
stable (`0.x`) release series of Spack. Security updates will be
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
We provide security updates for the following releases.
For more on Spack's release structure, see
[`README.md`](https://github.com/spack/spack#releases).
| Version | Supported |
| ------- | ------------------ |
| develop | :white_check_mark: |
| 0.19.x | :white_check_mark: |
| 0.18.x | :white_check_mark: |
## Reporting a Vulnerability
You can report a vulnerability using GitHub's private reporting
feature:
To report a vulnerability or other security
issue, email maintainers@spack.io.
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
2. Click "Report a vulnerability" in the upper right corner of that page.
3. Fill out the form and submit your draft security advisory.
More details are available in
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
You can expect to hear back about security issues within two days.
If your security issue is accepted, we will do our best to release
a fix within a week. If fixing the issue will take longer than
this, we will discuss timeline options with you.
You can expect to hear back within two days.
If your security issue is accepted, we will do
our best to release a fix within a week. If
fixing the issue will take longer than this,
we will discuss timeline options with you.

View File

@@ -51,43 +51,65 @@ setlocal enabledelayedexpansion
:: subcommands will never start with '-'
:: everything after the subcommand is an arg
:: we cannot allow batch "for" loop to directly process CL args
:: a number of batch reserved characters are commonly passed to
:: spack and allowing batch's "for" method to process the raw inputs
:: results in a large number of formatting issues
:: instead, treat the entire CLI as one string
:: and split by space manually
:: capture cl args in variable named cl_args
set cl_args=%*
:process_cl_args
rem Set first cl argument (denoted by %1) to be processed
set t=%1
rem shift moves all cl positional arguments left by one
rem meaning %2 is now %1, this allows us to iterate over each
rem argument
shift
rem assign next "first" cl argument to cl_args, will be null when
rem there are now further arguments to process
set cl_args=%1
if "!t:~0,1!" == "-" (
if defined _sp_subcommand (
rem We already have a subcommand, processing args now
rem tokens=1* returns the first processed token produced
rem by tokenizing the input string cl_args on spaces into
rem the named variable %%g
rem While this make look like a for loop, it only
rem executes a single time for each of the cl args
rem the actual iterative loop is performed by the
rem goto process_cl_args stanza
rem we are simply leveraging the "for" method's string
rem tokenization
for /f "tokens=1*" %%g in ("%cl_args%") do (
set t=%%~g
rem remainder of string is composed into %%h
rem these are the cl args yet to be processed
rem assign cl_args var to only the args to be processed
rem effectively discarding the current arg %%g
rem this will be nul when we have no further tokens to process
set cl_args=%%h
rem process the first space delineated cl arg
rem of this iteration
if "!t:~0,1!" == "-" (
if defined _sp_subcommand (
rem We already have a subcommand, processing args now
if not defined _sp_args (
set "_sp_args=!t!"
) else (
set "_sp_args=!_sp_args! !t!"
)
) else (
if not defined _sp_flags (
set "_sp_flags=!t!"
shift
) else (
set "_sp_flags=!_sp_flags! !t!"
shift
)
)
) else if not defined _sp_subcommand (
set "_sp_subcommand=!t!"
shift
) else (
if not defined _sp_args (
set "_sp_args=!t!"
shift
) else (
set "_sp_args=!_sp_args! !t!"
shift
)
) else (
if not defined _sp_flags (
set "_sp_flags=!t!"
) else (
set "_sp_flags=!_sp_flags! !t!"
)
)
) else if not defined _sp_subcommand (
set "_sp_subcommand=!t!"
) else (
if not defined _sp_args (
set "_sp_args=!t!"
) else (
set "_sp_args=!_sp_args! !t!"
)
)
rem if this is not nu;ll, we have more tokens to process
rem if this is not nil, we have more tokens to process
rem start above process again with remaining unprocessed cl args
if defined cl_args goto :process_cl_args

View File

@@ -39,20 +39,6 @@ function Read-SpackArgs {
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
}
function Set-SpackEnv {
# This method is responsible
# for processing the return from $(spack <command>)
# which are returned as System.Object[]'s containing
# a list of env commands
# Invoke-Expression can only handle one command at a time
# so we iterate over the list to invoke the env modification
# expressions one at a time
foreach($envop in $args[0]){
Invoke-Expression $envop
}
}
function Invoke-SpackCD {
if (Compare-CommonArgs $SpackSubCommandArgs) {
python $Env:SPACK_ROOT/bin/spack cd -h
@@ -93,7 +79,7 @@ function Invoke-SpackEnv {
}
else {
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
Set-SpackEnv $SpackEnv
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
"deactivate" {
@@ -104,8 +90,8 @@ function Invoke-SpackEnv {
python $Env:SPACK_ROOT/bin/spack env deactivate -h
}
else {
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate "--pwsh")
Set-SpackEnv $SpackEnv
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
$ExecutionContext.InvokeCommand($SpackEnv)
}
}
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
@@ -121,9 +107,8 @@ function Invoke-SpackLoad {
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
}
else {
# python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
Set-SpackEnv $SpackEnv
$ExecutionContext.InvokeCommand($SpackEnv)
}
}

View File

@@ -36,9 +36,3 @@ concretizer:
# on each root spec, allowing different versions and variants of the same package in
# an environment.
unify: true
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
duplicates:
# "none": allows a single node for any package in the DAG.
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: none

View File

@@ -60,7 +60,7 @@ packages:
xxd: [xxd-standalone, vim]
yacc: [bison, byacc]
ziglang: [zig]
zlib-api: [zlib-ng+compat, zlib]
zlib-api: [zlib, zlib-ng+compat]
permissions:
read: world
write: user

View File

@@ -1,113 +0,0 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
==========================
Using External GPU Support
==========================
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
configuration Spack will download and install the needed components.
It may be preferable to use existing system support: the following sections
help with using a system installation of GPU libraries.
-----------------------------------
Using an External ROCm Installation
-----------------------------------
Spack breaks down ROCm into many separate component packages. The following
is an example ``packages.yaml`` that organizes a consistent set of ROCm
components for use by dependent packages:
.. code-block:: yaml
packages:
all:
compiler: [rocmcc@=5.3.0]
variants: amdgpu_target=gfx90a
hip:
buildable: false
externals:
- spec: hip@5.3.0
prefix: /opt/rocm-5.3.0/hip
hsa-rocr-dev:
buildable: false
externals:
- spec: hsa-rocr-dev@5.3.0
prefix: /opt/rocm-5.3.0/
llvm-amdgpu:
buildable: false
externals:
- spec: llvm-amdgpu@5.3.0
prefix: /opt/rocm-5.3.0/llvm/
comgr:
buildable: false
externals:
- spec: comgr@5.3.0
prefix: /opt/rocm-5.3.0/
hipsparse:
buildable: false
externals:
- spec: hipsparse@5.3.0
prefix: /opt/rocm-5.3.0/
hipblas:
buildable: false
externals:
- spec: hipblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocblas:
buildable: false
externals:
- spec: rocblas@5.3.0
prefix: /opt/rocm-5.3.0/
rocprim:
buildable: false
externals:
- spec: rocprim@5.3.0
prefix: /opt/rocm-5.3.0/rocprim/
This is in combination with the following compiler definition:
.. code-block:: yaml
compilers:
- compiler:
spec: rocmcc@=5.3.0
paths:
cc: /opt/rocm-5.3.0/bin/amdclang
cxx: /opt/rocm-5.3.0/bin/amdclang++
f77: null
fc: /opt/rocm-5.3.0/bin/amdflang
operating_system: rhel8
target: x86_64
This includes the following considerations:
- Each of the listed externals specifies ``buildable: false`` to force Spack
to use only the externals we defined.
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
packages, but not all of them, and furthermore not in a manner that
guarantees a complementary set if multiple ROCm installations are available.
- The ``prefix`` is the same for several components, but note that others
require listing one of the subdirectories as a prefix.
-----------------------------------
Using an External CUDA Installation
-----------------------------------
CUDA is split into fewer components and is simpler to specify:
.. code-block:: yaml
packages:
all:
variants:
- cuda_arch=70
cuda:
buildable: false
externals:
- spec: cuda@11.0.2
prefix: /opt/cuda/cuda-11.0.2/
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.

View File

@@ -77,7 +77,6 @@ or refer to the full manual below.
extensions
pipelines
signing
gpu_configuration
.. toctree::
:maxdepth: 2

View File

@@ -4773,17 +4773,17 @@ For example, running:
results in spack checking that the installation created the following **file**:
* ``self.prefix.bin.reframe``
* ``self.prefix/bin/reframe``
and the following **directories**:
* ``self.prefix.bin``
* ``self.prefix.config``
* ``self.prefix.docs``
* ``self.prefix.reframe``
* ``self.prefix.tutorials``
* ``self.prefix.unittests``
* ``self.prefix.cscs-checks``
* ``self.prefix/bin``
* ``self.prefix/config``
* ``self.prefix/docs``
* ``self.prefix/reframe``
* ``self.prefix/tutorials``
* ``self.prefix/unittests``
* ``self.prefix/cscs-checks``
If **any** of these paths are missing, then Spack considers the installation
to have failed.
@@ -4927,7 +4927,7 @@ installed executable. The check is implemented as follows:
@on_package_attributes(run_tests=True)
def check_list(self):
with working_dir(self.stage.source_path):
reframe = Executable(self.prefix.bin.reframe)
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
reframe("-l")
.. warning::
@@ -5147,8 +5147,8 @@ embedded test parts.
for example in ["ex1", "ex2"]:
with test_part(
self,
f"test_example_{example}",
purpose=f"run installed {example}",
"test_example_{0}".format(example),
purpose="run installed {0}".format(example),
):
exe = which(join_path(self.prefix.bin, example))
exe()
@@ -5226,10 +5226,11 @@ Below illustrates using this feature to compile an example.
...
cxx = which(os.environ["CXX"])
cxx(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.cpp",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.cpp".format(exe),
"-o",
exe
)
cxx_example = which(exe)
cxx_example()
@@ -5253,7 +5254,7 @@ Saving build-time files
will be important to maintain them so they work across listed or supported
versions of the package.
You can use the ``cache_extra_test_sources`` helper to copy directories
You can use the ``cache_extra_test_sources`` method to copy directories
and or files from the source build stage directory to the package's
installation directory.
@@ -5261,15 +5262,10 @@ The signature for ``cache_extra_test_sources`` is:
.. code-block:: python
def cache_extra_test_sources(pkg, srcs):
where each argument has the following meaning:
* ``pkg`` is an instance of the package for the spec under test.
* ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
def cache_extra_test_sources(self, srcs):
where ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
The paths must be relative to the staged source directory. Contents of
subdirectories and files are copied to a special test cache subdirectory
of the installation prefix. They are automatically copied to the appropriate
@@ -5290,18 +5286,21 @@ and using ``foo.c`` in a test method is illustrated below.
srcs = ["tests",
join_path("examples", "foo.c"),
join_path("examples", "bar.c")]
cache_extra_test_sources(self, srcs)
self.cache_extra_test_sources(srcs)
def test_foo(self):
exe = "foo"
src_dir = self.test_suite.current_test_cache_dir.examples
src_dir = join_path(
self.test_suite.current_test_cache_dir, "examples"
)
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.c",
"-o", exe
"-L{0}".format(self.prefix.lib),
"-I{0}".format(self.prefix.include),
"{0}.c".format(exe),
"-o",
exe
)
foo = which(exe)
foo()
@@ -5327,9 +5326,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
In our example above, test methods can use the following paths to reference
the copy of each entry listed in ``srcs``, respectively:
* ``self.test_suite.current_test_cache_dir.tests``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
.. admonition:: Library packages should build stand-alone tests
@@ -5348,7 +5347,7 @@ the copy of each entry listed in ``srcs``, respectively:
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the ``copy_test_sources`` method and
***after** the call to ``cache_extra_test_sources()``. This will
***after** the call to ``self.cache_extra_test_sources()``. This will
reduce the amount of unnecessary work in the test method **and** avoid
problems testing in shared instances and facility deployments.
@@ -5395,7 +5394,7 @@ property as shown below.
"""build and run custom-example"""
data_dir = self.test_suite.current_test_data_dir
exe = "custom-example"
src = datadir.join(f"{exe}.cpp")
src = datadir.join("{0}.cpp".format(exe))
...
# TODO: Build custom-example using src and exe
...
@@ -5445,7 +5444,7 @@ added to the package's ``test`` subdirectory.
db_filename, ".dump", output=str.split, error=str.split
)
for exp in expected:
assert re.search(exp, out), f"Expected '{exp}' in output"
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
If the file was instead copied from the ``tests`` subdirectory of the staged
source code, the path would be obtained as shown below.
@@ -5495,12 +5494,9 @@ Invoking the method is the equivalent of:
.. code-block:: python
errors = []
for check in expected:
if not re.search(check, actual):
errors.append(f"Expected '{check}' in output '{actual}'")
if errors:
raise RuntimeError("\n ".join(errors))
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
.. _accessing-files:
@@ -5540,7 +5536,7 @@ repository, and installation.
- ``self.test_suite.test_dir_for_spec(self.spec)``
* - Current Spec's Build-time Files
- ``self.test_suite.current_test_cache_dir``
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
* - Current Spec's Custom Test Files
- ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
@@ -6075,7 +6071,7 @@ in the extra attributes can implement this method like this:
@classmethod
def validate_detected_spec(cls, spec, extra_attributes):
"""Check that "compilers" is in the extra attributes."""
msg = ("the extra attribute 'compilers' must be set for "
msg = ("the extra attribute "compilers" must be set for "
"the detected spec '{0}'".format(spec))
assert "compilers" in extra_attributes, msg

View File

@@ -1,7 +1,7 @@
sphinx==7.2.5
sphinx==6.2.1
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==1.3.0
sphinx-rtd-theme==1.2.2
python-levenshtein==0.21.1
docutils==0.18.1
pygments==2.16.1
@@ -10,4 +10,4 @@ pytest==7.4.0
isort==5.12.0
black==23.7.0
flake8==6.1.0
mypy==1.5.1
mypy==1.5.0

View File

@@ -18,13 +18,11 @@
import sys
import tempfile
from contextlib import contextmanager
from itertools import accumulate
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
import llnl.util.symlink
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from llnl.util.symlink import islink, symlink
from spack.util.executable import Executable, which
from spack.util.path import path_to_os_path, system_path_filter
@@ -103,7 +101,7 @@ def _nop(args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (islink(src) and islink(dst))
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
@@ -171,7 +169,7 @@ def rename(src, dst):
if sys.platform == "win32":
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or islink(dst):
if os.path.exists(dst) or os.path.islink(dst):
os.remove(dst)
os.rename(src, dst)
@@ -568,7 +566,7 @@ def set_install_permissions(path):
# If this points to a file maintained in a Spack prefix, it is assumed that
# this function will be invoked on the target. If the file is outside a
# Spack-maintained prefix, the permissions should not be modified.
if islink(path):
if os.path.islink(path):
return
if os.path.isdir(path):
os.chmod(path, 0o755)
@@ -637,7 +635,7 @@ def chmod_x(entry, perms):
@system_path_filter
def copy_mode(src, dest):
"""Set the mode of dest to that of src unless it is a link."""
if islink(dest):
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
@@ -723,12 +721,26 @@ def install(src, dest):
copy(src, dest, _permissions=True)
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = os.readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
@system_path_filter
def copy_tree(
src: str,
dest: str,
symlinks: bool = True,
allow_broken_symlinks: bool = sys.platform != "win32",
ignore: Optional[Callable[[str], bool]] = None,
_permissions: bool = False,
):
@@ -751,8 +763,6 @@ def copy_tree(
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception. Defaults to true on unix.
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
@@ -760,8 +770,6 @@ def copy_tree(
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if allow_broken_symlinks and sys.platform == "win32":
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
if _permissions:
tty.debug("Installing {0} to {1}".format(src, dest))
else:
@@ -775,11 +783,6 @@ def copy_tree(
if not files:
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
# symlinks at the end.
links = []
for src in files:
abs_src = os.path.abspath(src)
if not abs_src.endswith(os.path.sep):
@@ -802,7 +805,7 @@ def copy_tree(
ignore=ignore,
follow_nonexisting=True,
):
if islink(s):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = os.readlink(s)
@@ -816,9 +819,7 @@ def escaped_path(path):
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
target = new_target
links.append((target, d, s))
continue
symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
@@ -833,17 +834,9 @@ def escaped_path(path):
set_install_permissions(d)
copy_mode(s, d)
for target, d, s in links:
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
@system_path_filter
def install_tree(
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
):
def install_tree(src, dest, symlinks=True, ignore=None):
"""Recursively install an entire directory tree rooted at *src*.
Same as :py:func:`copy_tree` with the addition of setting proper
@@ -854,21 +847,12 @@ def install_tree(
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (typing.Callable): function indicating which files to ignore
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception.
Raises:
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(
src,
dest,
symlinks=symlinks,
allow_broken_symlinks=allow_broken_symlinks,
ignore=ignore,
_permissions=True,
)
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
@system_path_filter
@@ -1272,12 +1256,7 @@ def traverse_tree(
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (typing.Callable): function indicating which files to ignore. This will also
ignore symlinks if they point to an ignored file (regardless of whether the symlink
is explicitly ignored); note this only supports one layer of indirection (i.e. if
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
but not x). To avoid this, make sure the ignore function also ignores the symlink
paths too.
ignore (typing.Callable): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
@@ -1304,24 +1283,11 @@ def traverse_tree(
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# If the source path is a link and the link's source is ignored, then ignore the link too,
# but only do this if the ignore is defined.
if ignore is not None:
if islink(source_child) and not follow_links:
target = readlink(source_child)
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
if any(map(ignore, all_parents)):
tty.warn(
f"Skipping {source_path} because the source or a part of the source's "
f"path is included in the ignores."
)
continue
# Treat as a directory
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
@@ -1347,11 +1313,7 @@ def traverse_tree(
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
if sys.platform == "win32":
if not os.path.lexists(path):
return False, False, False
return os.path.lexists(path), islink(path), os.path.isdir(path)
number of stat calls."""
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
@@ -1566,7 +1528,7 @@ def remove_if_dead_link(path):
Parameters:
path (str): The potential dead link
"""
if islink(path) and not os.path.exists(path):
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
@@ -1625,7 +1587,7 @@ def remove_linked_tree(path):
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
if os.path.exists(path):
if islink(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
@@ -1792,14 +1754,9 @@ def find(root, files, recursive=True):
files = [files]
if recursive:
tty.debug(f"Find (recursive): {root} {str(files)}")
result = _find_recursive(root, files)
return _find_recursive(root, files)
else:
tty.debug(f"Find (not recursive): {root} {str(files)}")
result = _find_non_recursive(root, files)
tty.debug(f"Find complete: {root} {str(files)}")
return result
return _find_non_recursive(root, files)
@system_path_filter
@@ -2731,7 +2688,7 @@ def remove_directory_contents(dir):
"""Remove all contents of a directory."""
if os.path.exists(dir):
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
if os.path.isfile(entry) or islink(entry):
if os.path.isfile(entry) or os.path.islink(entry):
os.unlink(entry)
else:
shutil.rmtree(entry)

View File

@@ -2,188 +2,77 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import os
import re
import shutil
import subprocess
import sys
import tempfile
from os.path import exists, join
from llnl.util import lang, tty
from spack.error import SpackError
from spack.util.path import system_path_filter
from llnl.util import lang
if sys.platform == "win32":
from win32file import CreateHardLink
is_windows = sys.platform == "win32"
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
def symlink(real_path, link_path):
"""
Create a link.
Create a symbolic link.
On non-Windows and Windows with System Administrator
privleges this will be a normal symbolic link via
os.symlink.
On Windows without privledges the link will be a
junction for a directory and a hardlink for a file.
On Windows the various link types are:
Symbolic Link: A link to a file or directory on the
same or different volume (drive letter) or even to
a remote file or directory (using UNC in its path).
Need System Administrator privileges to make these.
Hard Link: A link to a file on the same volume (drive
letter) only. Every file (file's data) has at least 1
hard link (file's name). But when this method creates
a new hard link there will be 2. Deleting all hard
links effectively deletes the file. Don't need System
Administrator privileges.
Junction: A link to a directory on the same or different
volume (drive letter) but not to a remote directory. Don't
need System Administrator privileges.
Parameters:
source_path (str): The real file or directory that the link points to.
Must be absolute OR relative to the link.
link_path (str): The path where the link will exist.
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
doesn't exist. This will still raise an exception on Windows.
On Windows, use junctions if os.symlink fails.
"""
source_path = os.path.normpath(source_path)
win_source_path = source_path
link_path = os.path.normpath(link_path)
# Never allow broken links on Windows.
if sys.platform == "win32" and allow_broken_symlinks:
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
if not allow_broken_symlinks:
# Perform basic checks to make sure symlinking will succeed
if os.path.lexists(link_path):
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
if not os.path.exists(source_path):
if os.path.isabs(source_path) and not allow_broken_symlinks:
# An absolute source path that does not exist will result in a broken link.
raise SymlinkError(
f"Source path ({source_path}) is absolute but does not exist. Resulting "
f"link would be broken so not making link."
)
else:
# os.symlink can create a link when the given source path is relative to
# the link path. Emulate this behavior and check to see if the source exists
# relative to the link patg ahead of link creation to prevent broken
# links from being made.
link_parent_dir = os.path.dirname(link_path)
relative_path = os.path.join(link_parent_dir, source_path)
if os.path.exists(relative_path):
# In order to work on windows, the source path needs to be modified to be
# relative because hardlink/junction dont resolve relative paths the same
# way as os.symlink. This is ignored on other operating systems.
win_source_path = relative_path
elif not allow_broken_symlinks:
raise SymlinkError(
f"The source path ({source_path}) is not relative to the link path "
f"({link_path}). Resulting link would be broken so not making link."
)
# Create the symlink
if sys.platform == "win32" and not _windows_can_symlink():
_windows_create_link(win_source_path, link_path)
if sys.platform != "win32":
os.symlink(real_path, link_path)
elif _win32_can_symlink():
# Windows requires target_is_directory=True when the target is a dir.
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
else:
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
try:
# Try to use junctions
_win32_junction(real_path, link_path)
except OSError as e:
if e.errno == errno.EEXIST:
# EEXIST error indicates that file we're trying to "link"
# is already present, don't bother trying to copy which will also fail
# just raise
raise
else:
# If all else fails, fall back to copying files
shutil.copyfile(real_path, link_path)
def islink(path: str) -> bool:
"""Override os.islink to give correct answer for spack logic.
For Non-Windows: a link can be determined with the os.path.islink method.
Windows-only methods will return false for other operating systems.
For Windows: spack considers symlinks, hard links, and junctions to
all be links, so if any of those are True, return True.
Args:
path (str): path to check if it is a link.
Returns:
bool - whether the path is any kind link or not.
"""
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
def islink(path):
return os.path.islink(path) or _win32_is_junction(path)
def _windows_is_hardlink(path: str) -> bool:
"""Determines if a path is a windows hard link. This is accomplished
by looking at the number of links using os.stat. A non-hard-linked file
will have a st_nlink value of 1, whereas a hard link will have a value
larger than 1. Note that both the original and hard-linked file will
return True because they share the same inode.
# '_win32' functions based on
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
def _win32_junction(path, link):
# junctions require absolute paths
if not os.path.isabs(link):
link = os.path.abspath(link)
Args:
path (str): Windows path to check for a hard link
# os.symlink will fail if link exists, emulate the behavior here
if exists(link):
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
Returns:
bool - Whether the path is a hard link or not.
"""
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
return False
if not os.path.isabs(path):
parent = os.path.join(link, os.pardir)
path = os.path.join(parent, path)
path = os.path.abspath(path)
return os.stat(path).st_nlink > 1
def _windows_is_junction(path: str) -> bool:
"""Determines if a path is a windows junction. A junction can be
determined using a bitwise AND operation between the file's
attribute bitmask and the known junction bitmask (0x400).
Args:
path (str): A non-file path
Returns:
bool - whether the path is a junction or not.
"""
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
return False
import ctypes.wintypes
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
get_file_attributes.restype = ctypes.wintypes.DWORD
invalid_file_attributes = 0xFFFFFFFF
reparse_point = 0x400
file_attr = get_file_attributes(str(path))
if file_attr == invalid_file_attributes:
return False
return file_attr & reparse_point > 0
CreateHardLink(link, path)
@lang.memoized
def _windows_can_symlink() -> bool:
"""
Determines if windows is able to make a symlink depending on
the system configuration and the level of the user's permissions.
"""
if sys.platform != "win32":
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
return False
def _win32_can_symlink():
tempdir = tempfile.mkdtemp()
dpath = os.path.join(tempdir, "dpath")
fpath = os.path.join(tempdir, "fpath.txt")
dpath = join(tempdir, "dpath")
fpath = join(tempdir, "fpath.txt")
dlink = os.path.join(tempdir, "dlink")
flink = os.path.join(tempdir, "flink.txt")
dlink = join(tempdir, "dlink")
flink = join(tempdir, "flink.txt")
import llnl.util.filesystem as fs
@@ -207,136 +96,24 @@ def _windows_can_symlink() -> bool:
return can_symlink_directories and can_symlink_files
def _windows_create_link(source: str, link: str):
def _win32_is_junction(path):
"""
Attempts to create a Hard Link or Junction as an alternative
to a symbolic link. This is called when symbolic links cannot
be created.
Determines if a path is a win32 junction
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
elif os.path.isdir(source):
_windows_create_junction(source=source, link=link)
elif os.path.isfile(source):
_windows_create_hard_link(path=source, link=link)
else:
raise SymlinkError(
f"Cannot create link from {source}. It is neither a file nor a directory."
)
if os.path.islink(path):
return False
if sys.platform == "win32":
import ctypes.wintypes
def _windows_create_junction(source: str, link: str):
"""Duly verify that the path and link are eligible to create a junction,
then create the junction.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
elif not os.path.exists(source):
raise SymlinkError("Source path does not exist, cannot create a junction.")
elif os.path.lexists(link):
raise SymlinkError("Link path already exists, cannot create a junction.")
elif not os.path.isdir(source):
raise SymlinkError("Source path is not a directory, cannot create a junction.")
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
GetFileAttributes.restype = ctypes.wintypes.DWORD
import subprocess
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
cmd = ["cmd", "/C", "mklink", "/J", link, source]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
tty.debug(out.decode())
if proc.returncode != 0:
err = err.decode()
tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err)
res = GetFileAttributes(path)
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
def _windows_create_hard_link(path: str, link: str):
"""Duly verify that the path and link are eligible to create a hard
link, then create the hard link.
"""
if sys.platform != "win32":
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
elif not os.path.exists(path):
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
elif os.path.lexists(link):
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
elif not os.path.isfile(path):
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
else:
tty.debug(f"Creating hard link {link} pointing to {path}")
CreateHardLink(link, path)
def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path):
return _windows_read_hard_link(path)
elif _windows_is_junction(path):
return _windows_read_junction(path)
else:
return os.readlink(path)
def _windows_read_hard_link(link: str) -> str:
"""Find all of the files that point to the same inode as the link"""
if sys.platform != "win32":
raise SymlinkError("Can't read hard link on non-Windows OS.")
link = os.path.abspath(link)
fsutil_cmd = ["fsutil", "hardlink", "list", link]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
# fsutil response does not include the drive name, so append it back to each linked file.
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
links.remove(link)
if len(links) == 1:
return links.pop()
elif len(links) > 1:
# TODO: How best to handle the case where 3 or more paths point to a single inode?
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
else:
raise SymlinkError("Cannot determine hard link source path.")
def _windows_read_junction(link: str):
"""Find the path that a junction points to."""
if sys.platform != "win32":
raise SymlinkError("Can't read junction on non-Windows OS.")
link = os.path.abspath(link)
link_basename = os.path.basename(link)
link_parent = os.path.dirname(link)
fsutil_cmd = ["dir", "/a:l", link_parent]
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = proc.communicate()
if proc.returncode != 0:
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
if matches:
return matches.group(1)
else:
raise SymlinkError("Could not find junction path.")
@system_path_filter
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
class SymlinkError(SpackError):
"""Exception class for errors raised while creating symlinks,
junctions and hard links
"""
return False

View File

@@ -780,7 +780,7 @@ def __enter__(self):
raise RuntimeError("file argument must be set by __init__ ")
# Open both write and reading on logfile
if isinstance(self.logfile, io.StringIO):
if type(self.logfile) == io.StringIO:
self._ioflag = True
# cannot have two streams on tempfile, so we must make our own
sys.stdout = self.logfile

View File

@@ -9,6 +9,7 @@
import io
import itertools
import json
import multiprocessing.pool
import os
import re
import shutil
@@ -875,18 +876,32 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
db: A spack database used for adding specs and then writing the index.
temp_dir (str): Location to write index.json and hash for pushing
concurrency (int): Number of parallel processes to use when fetching
"""
for file in file_list:
contents = read_method(file)
# Need full spec.json name or this gets confused with index.json.
if file.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(contents)
fetched_spec = Spec.from_dict(specfile_json)
elif file.endswith(".json"):
fetched_spec = Spec.from_json(contents)
else:
continue
Return:
None
"""
def _fetch_spec_from_mirror(spec_url):
spec_file_contents = read_method(spec_url)
if spec_file_contents:
# Need full spec.json name or this gets confused with index.json.
if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
return Spec.from_dict(specfile_json)
if spec_url.endswith(".json"):
return Spec.from_json(spec_file_contents)
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
fetched_specs = tp.map(
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
)
finally:
tp.terminate()
tp.join()
for fetched_spec in fetched_specs:
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)
@@ -2368,12 +2383,22 @@ def __init__(self, all_architectures):
self.possible_specs = specs
def __call__(self, spec: Spec, **kwargs):
def __call__(self, spec, **kwargs):
"""
Args:
spec: The spec being searched for
spec (str): The spec being searched for in its string representation or hash.
"""
return [s for s in self.possible_specs if s.satisfies(spec)]
matches = []
if spec.startswith("/"):
# Matching a DAG hash
query_hash = spec.replace("/", "")
for candidate_spec in self.possible_specs:
if candidate_spec.dag_hash().startswith(query_hash):
matches.append(candidate_spec)
else:
# Matching a spec constraint
matches = [s for s in self.possible_specs if s.satisfies(spec)]
return matches
class FetchIndexError(Exception):

View File

@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
]
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
for name, path in configuration_paths:
platform = spack.platforms.host().name
platform_scope = spack.config.ConfigScope(

View File

@@ -480,18 +480,11 @@ def _add_externals_if_missing() -> None:
spack.repo.PATH.get_pkg_class("bison"),
# GnuPG
spack.repo.PATH.get_pkg_class("gawk"),
# develop deps
spack.repo.PATH.get_pkg_class("git"),
]
if IS_WINDOWS:
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
externals = spack.detection.by_executable(search_list)
# System git is typically deprecated, so mark as non-buildable to force it as external
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
spack.detection.update_configuration(
non_buildable_externals, scope="bootstrap", buildable=False
)
detected_packages = spack.detection.by_executable(search_list)
spack.detection.update_configuration(detected_packages, scope="bootstrap")
def clingo_root_spec() -> str:

View File

@@ -23,7 +23,6 @@
from ._common import _root_spec
from .config import root_path, spec_for_current_python, store_path
from .core import _add_externals_if_missing
class BootstrapEnvironment(spack.environment.Environment):
@@ -186,7 +185,6 @@ def pytest_root_spec() -> str:
def ensure_environment_dependencies() -> None:
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
_add_externals_if_missing()
with BootstrapEnvironment() as env:
env.update_installations()
env.update_syspath_and_environ()

View File

@@ -1027,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run(
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
):
context = kwargs.get("context", "build")
@@ -1048,12 +1048,12 @@ def _setup_pkg_and_run(
pkg, dirty=kwargs.get("dirty", False), context=context
)
return_value = function(pkg, kwargs)
write_pipe.send(return_value)
child_pipe.send(return_value)
except StopPhase as e:
# Do not create a full ChildError from this, it's not an error
# it's a control statement.
write_pipe.send(e)
child_pipe.send(e)
except BaseException:
# catch ANYTHING that goes wrong in the child process
exc_type, exc, tb = sys.exc_info()
@@ -1102,10 +1102,10 @@ def _setup_pkg_and_run(
context,
package_context,
)
write_pipe.send(ce)
child_pipe.send(ce)
finally:
write_pipe.close()
child_pipe.close()
if input_multiprocess_fd is not None:
input_multiprocess_fd.close()
@@ -1149,7 +1149,7 @@ def child_fun():
For more information on `multiprocessing` child process creation
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
"""
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
parent_pipe, child_pipe = multiprocessing.Pipe()
input_multiprocess_fd = None
jobserver_fd1 = None
jobserver_fd2 = None
@@ -1174,7 +1174,7 @@ def child_fun():
serialized_pkg,
function,
kwargs,
write_pipe,
child_pipe,
input_multiprocess_fd,
jobserver_fd1,
jobserver_fd2,
@@ -1183,12 +1183,6 @@ def child_fun():
p.start()
# We close the writable end of the pipe now to be sure that p is the
# only process which owns a handle for it. This ensures that when p
# closes its handle for the writable end, read_pipe.recv() will
# promptly report the readable end as being ready.
write_pipe.close()
except InstallError as e:
e.pkg = pkg
raise
@@ -1198,16 +1192,7 @@ def child_fun():
if input_multiprocess_fd is not None:
input_multiprocess_fd.close()
def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal"
return f"{typ} {abs(p.exitcode)}"
try:
child_result = read_pipe.recv()
except EOFError:
p.join()
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
child_result = parent_pipe.recv()
p.join()
# If returns a StopPhase, raise it
@@ -1227,10 +1212,6 @@ def exitcode_msg(p):
child_result.print_context()
raise child_result
# Fallback. Usually caught beforehand in EOFError above.
if p.exitcode != 0:
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
return child_result

View File

@@ -55,8 +55,7 @@ def flags_to_build_system_args(self, flags):
setattr(self, "configure_flag_args", [])
for flag, values in flags.items():
if values:
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
values_str = "{0}={1}".format(var_name, " ".join(values))
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
self.configure_flag_args.append(values_str)
# Spack's fflags are meant for both F77 and FC, therefore we
# additionaly set FCFLAGS if required.

View File

@@ -30,7 +30,7 @@
class PythonExtension(spack.package_base.PackageBase):
maintainers("adamjstewart")
maintainers("adamjstewart", "pradyunsg")
@property
def import_modules(self):

View File

@@ -20,9 +20,9 @@
def misc_cache_location():
"""The ``MISC_CACHE`` is Spack's cache for small data.
"""The ``misc_cache`` is Spack's cache for small data.
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
Currently the ``misc_cache`` stores indexes for virtual dependency
providers and for which packages provide which tags.
"""
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
@@ -35,7 +35,7 @@ def _misc_cache():
#: Spack's cache for small data
MISC_CACHE: Union[
misc_cache: Union[
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_misc_cache)
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[
fetch_cache: Union[
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_fetch_cache)

View File

@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture if architecture else "no arch",
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
f"{compiler.display_str}" if compiler else "no compiler",
)

View File

@@ -69,10 +69,11 @@
def _add_scope_option(parser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
help="configuration scope to read/modify",
)
@@ -169,7 +170,7 @@ def _reset(args):
if not ok_to_continue:
raise RuntimeError("Aborting")
for scope in spack.config.CONFIG.file_scopes:
for scope in spack.config.config.file_scopes:
# The default scope should stay untouched
if scope.name == "defaults":
continue
@@ -186,7 +187,7 @@ def _reset(args):
if os.path.exists(bootstrap_yaml):
shutil.move(bootstrap_yaml, backup_file)
spack.config.CONFIG.clear_caches()
spack.config.config.clear_caches()
def _root(args):

View File

@@ -20,7 +20,6 @@
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.error
import spack.mirror
import spack.relocate
import spack.repo
@@ -79,11 +78,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
"Alternatively, one can decide to build a cache for only the package or only the "
"dependencies",
)
push.add_argument(
"--fail-fast",
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn)
@@ -155,11 +149,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
check.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope containing mirrors to check",
)
@@ -302,7 +297,6 @@ def push_fn(args):
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
failed = []
# tty printing
color = clr.get_color_when()
@@ -333,17 +327,11 @@ def push_fn(args):
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
raise
failed.append((format_spec(spec), e))
if skipped:
if len(specs) == 1:
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
elif len(skipped) == len(specs):
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
else:
tty.info(
"The following {} specs were skipped as they already exist in the buildcache:\n"
@@ -353,17 +341,6 @@ def push_fn(args):
)
)
if failed:
if len(failed) == 1:
raise failed[0][1]
raise spack.error.SpackError(
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
"\n".join(
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
),
)
def install_fn(args):
"""install from a binary package"""

View File

@@ -289,10 +289,6 @@ def ci_rebuild(args):
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
if signing_key:
spack_ci.import_signing_key(signing_key)
# Fail early if signing is required but we don't have a signing key
sign_binaries = require_signing is not None and require_signing.lower() == "true"
if sign_binaries and not spack_ci.can_sign_binaries():
@@ -422,6 +418,11 @@ def ci_rebuild(args):
dst_file = os.path.join(repro_dir, file_name)
shutil.copyfile(src_file, dst_file)
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
# import it.
if signing_key:
spack_ci.import_signing_key(signing_key)
# Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
@@ -678,7 +679,7 @@ def ci_rebuild(args):
input_spec=job_spec,
buildcache_mirror_url=buildcache_mirror_url,
pipeline_mirror_url=pipeline_mirror_url,
sign_binaries=spack_ci.can_sign_binaries(),
sign_binaries=sign_binaries,
):
msg = tty.msg if result.success else tty.warn
msg(

View File

@@ -118,7 +118,7 @@ def clean(parser, args):
if args.downloads:
tty.msg("Removing cached downloads")
spack.caches.FETCH_CACHE.destroy()
spack.caches.fetch_cache.destroy()
if args.failures:
tty.msg("Removing install failure marks")
@@ -126,7 +126,7 @@ def clean(parser, args):
if args.misc_cache:
tty.msg("Removing cached information on repositories")
spack.caches.MISC_CACHE.destroy()
spack.caches.misc_cache.destroy()
if args.python_cache:
tty.msg("Removing python cache files")

View File

@@ -812,9 +812,6 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)

View File

@@ -24,6 +24,7 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Find
find_parser = sp.add_parser(
@@ -35,7 +36,7 @@ def setup_parser(subparser):
find_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
@@ -49,7 +50,7 @@ def setup_parser(subparser):
remove_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=None,
help="configuration scope to modify",
)
@@ -59,7 +60,7 @@ def setup_parser(subparser):
list_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)
@@ -70,7 +71,7 @@ def setup_parser(subparser):
info_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)
@@ -92,7 +93,7 @@ def compiler_find(args):
n = len(new_compilers)
s = "s" if n > 1 else ""
config = spack.config.CONFIG
config = spack.config.config
filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
@@ -185,7 +186,7 @@ def compiler_list(args):
os_str = os
if target:
os_str += "-%s" % target
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.spec.display_str for c in compilers)))

View File

@@ -13,11 +13,12 @@
def setup_parser(subparser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
subparser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
help="configuration scope to read/modify",
)

View File

@@ -27,12 +27,13 @@
def setup_parser(subparser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# User can only choose one
subparser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
help="configuration scope to read/modify",
)
@@ -44,7 +45,7 @@ def setup_parser(subparser):
help="configuration section to print\n\noptions: %(choices)s",
nargs="?",
metavar="section",
choices=spack.config.SECTION_SCHEMAS,
choices=spack.config.section_schemas,
)
blame_parser = sp.add_parser(
@@ -54,7 +55,7 @@ def setup_parser(subparser):
"section",
help="configuration section to print\n\noptions: %(choices)s",
metavar="section",
choices=spack.config.SECTION_SCHEMAS,
choices=spack.config.section_schemas,
)
edit_parser = sp.add_parser("edit", help="edit configuration file")
@@ -63,7 +64,7 @@ def setup_parser(subparser):
help="configuration section to edit\n\noptions: %(choices)s",
metavar="section",
nargs="?",
choices=spack.config.SECTION_SCHEMAS,
choices=spack.config.section_schemas,
)
edit_parser.add_argument(
"--print-file", action="store_true", help="print the file name that would be edited"
@@ -145,10 +146,10 @@ def config_get(args):
scope, section = _get_scope_and_section(args)
if section is not None:
spack.config.CONFIG.print_section(section)
spack.config.config.print_section(section)
elif scope and scope.startswith("env:"):
config_file = spack.config.CONFIG.get_config_filename(scope, section)
config_file = spack.config.config.get_config_filename(scope, section)
if os.path.exists(config_file):
with open(config_file) as f:
print(f.read())
@@ -161,7 +162,7 @@ def config_get(args):
def config_blame(args):
"""Print out line-by-line blame of merged YAML."""
spack.config.CONFIG.print_section(args.section, blame=True)
spack.config.config.print_section(args.section, blame=True)
def config_edit(args):
@@ -180,7 +181,7 @@ def config_edit(args):
scope, section = _get_scope_and_section(args)
if not scope and not section:
tty.die("`spack config edit` requires a section argument or an active environment.")
config_file = spack.config.CONFIG.get_config_filename(scope, section)
config_file = spack.config.config.get_config_filename(scope, section)
if args.print_file:
print(config_file)
@@ -193,7 +194,7 @@ def config_list(args):
Used primarily for shell tab completion scripts.
"""
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
print(" ".join(list(spack.config.section_schemas)))
def config_add(args):
@@ -250,19 +251,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
def config_update(args):
# Read the configuration files
spack.config.CONFIG.get_config(args.section, scope=args.scope)
spack.config.config.get_config(args.section, scope=args.scope)
updates: List[spack.config.ConfigScope] = list(
filter(
lambda s: not isinstance(
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
),
spack.config.CONFIG.format_updates[args.section],
spack.config.config.format_updates[args.section],
)
)
cannot_overwrite, skip_system_scope = [], False
for scope in updates:
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
can_be_updated = _can_update_config_file(scope, cfg_file)
if not can_be_updated:
if scope.name == "system":
@@ -301,7 +302,7 @@ def config_update(args):
" the latest schema format:\n\n"
)
for scope in updates:
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
msg += (
"\nIf the configuration files are updated, versions of Spack "
@@ -324,7 +325,7 @@ def config_update(args):
# Make a backup copy and rewrite the file
bkp_file = cfg_file + ".bkp"
shutil.copy(cfg_file, bkp_file)
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
@@ -336,13 +337,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
def config_revert(args):
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
# Search for backup files in the configuration scopes
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
to_be_restored, cannot_overwrite = [], []
for scope in scopes:
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
cfg_file = spack.config.config.get_config_filename(scope, args.section)
bkp_file = cfg_file + ".bkp"
# If the backup files doesn't exist move to the next scope
@@ -456,7 +457,7 @@ def config_prefer_upstream(args):
existing = spack.config.get("packages", scope=scope)
new = spack.config.merge_yaml(existing, pkgs)
spack.config.set("packages", new, scope)
config_file = spack.config.CONFIG.get_config_filename(scope, section)
config_file = spack.config.config.get_config_filename(scope, section)
tty.msg("Updated config at {0}".format(config_file))

View File

@@ -239,13 +239,6 @@ def env_deactivate_setup_parser(subparser):
const="bat",
help="print bat commands to activate the environment",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to activate the environment",
)
def env_deactivate(args):

View File

@@ -13,7 +13,6 @@
import spack
import spack.cmd
import spack.cmd.common.arguments
import spack.config
import spack.cray_manifest as cray_manifest
import spack.detection
import spack.error
@@ -28,6 +27,7 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
find_parser.add_argument(
@@ -47,7 +47,7 @@ def setup_parser(subparser):
find_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope("packages"),
help="configuration scope to modify",
)
@@ -165,7 +165,7 @@ def external_find(args):
detected_packages, scope=args.scope, buildable=not args.not_buildable
)
if new_entries:
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
path = spack.config.config.get_config_filename(args.scope, "packages")
msg = "The following specs have been detected on this system and added to {0}"
tty.msg(msg.format(path))
spack.cmd.display_specs(new_entries)

View File

@@ -64,11 +64,11 @@ def section_title(s):
def version(s):
return spack.spec.VERSION_COLOR + s + plain_format
return spack.spec.version_color + s + plain_format
def variant(s):
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
return spack.spec.enabled_variant_color + s + plain_format
class VariantFormatter:

View File

@@ -52,13 +52,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"--first",

View File

@@ -90,6 +90,7 @@ def setup_parser(subparser):
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Add
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
@@ -98,7 +99,7 @@ def setup_parser(subparser):
add_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
@@ -118,7 +119,7 @@ def setup_parser(subparser):
remove_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
@@ -137,7 +138,7 @@ def setup_parser(subparser):
set_url_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
@@ -166,7 +167,7 @@ def setup_parser(subparser):
set_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
@@ -177,7 +178,7 @@ def setup_parser(subparser):
list_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)

View File

@@ -20,6 +20,7 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Create
create_parser = sp.add_parser("create", help=repo_create.__doc__)
@@ -44,7 +45,7 @@ def setup_parser(subparser):
list_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_list_scope(),
help="configuration scope to read from",
)
@@ -55,7 +56,7 @@ def setup_parser(subparser):
add_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
@@ -68,7 +69,7 @@ def setup_parser(subparser):
remove_parser.add_argument(
"--scope",
choices=scopes,
metavar=spack.config.SCOPES_METAVAR,
metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)

View File

@@ -137,7 +137,7 @@ def solve(parser, args):
# these are the same options as `spack spec`
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt

View File

@@ -77,7 +77,7 @@ def setup_parser(subparser):
def spec(parser, args):
install_status_fn = spack.spec.Spec.install_status
fmt = spack.spec.DISPLAY_FORMAT
fmt = spack.spec.display_format
if args.namespaces:
fmt = "{namespace}." + fmt

View File

@@ -209,11 +209,12 @@ def unit_test(parser, args, unknown_args):
# mock configuration used by unit tests
# Note: skip on windows here because for the moment,
# clingo is wholly unsupported from bootstrap
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
if pytest is None:
spack.bootstrap.ensure_environment_dependencies()
import pytest
if sys.platform != "win32":
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
if pytest is None:
spack.bootstrap.ensure_environment_dependencies()
import pytest
if args.pytest_help:
# make the pytest.main help output more accurate

View File

@@ -51,13 +51,6 @@ def setup_parser(subparser):
const="bat",
help="print bat commands to load the package",
)
shells.add_argument(
"--pwsh",
action="store_const",
dest="shell",
const="pwsh",
help="print pwsh commands to load the package",
)
subparser.add_argument(
"-a", "--all", action="store_true", help="unload all loaded Spack packages"

View File

@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
def compiler_config_files():
config_files = list()
config = spack.config.CONFIG
config = spack.config.config
for scope in config.file_scopes:
name = scope.name
compiler_config = config.get("compilers", scope=name)
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
"""
candidate_scopes = [scope]
if scope is None:
candidate_scopes = spack.config.CONFIG.scopes.keys()
candidate_scopes = spack.config.config.scopes.keys()
removal_happened = False
for current_scope in candidate_scopes:
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.CONFIG
config = spack.config.config
scope_to_compilers = {}
for scope in config.scopes:

View File

@@ -29,90 +29,6 @@
}
class CmdCall:
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
def __init__(self, *cmds):
if not cmds:
raise RuntimeError(
"""Attempting to run commands from CMD without specifying commands.
Please add commands to be run."""
)
self._cmds = cmds
def __call__(self):
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
return out.decode("utf-16le", errors="replace") # novermin
@property
def cmd_line(self):
base_call = "cmd /u /c "
commands = " && ".join([x.command_str() for x in self._cmds])
# If multiple commands are being invoked by a single subshell
# they must be encapsulated by a double quote. Always double
# quote to be sure of proper handling
# cmd will properly resolve nested double quotes as needed
#
# `set`` writes out the active env to the subshell stdout,
# and in this context we are always trying to obtain env
# state so it should always be appended
return base_call + f'"{commands} && set"'
class VarsInvocation:
def __init__(self, script):
self._script = script
def command_str(self):
return f'"{self._script}"'
@property
def script(self):
return self._script
class VCVarsInvocation(VarsInvocation):
def __init__(self, script, arch, msvc_version):
super(VCVarsInvocation, self).__init__(script)
self._arch = arch
self._msvc_version = msvc_version
@property
def sdk_ver(self):
"""Accessor for Windows SDK version property
Note: This property may not be set by
the calling context and as such this property will
return an empty string
This property will ONLY be set if the SDK package
is a dependency somewhere in the Spack DAG of the package
for which we are constructing an MSVC compiler env.
Otherwise this property should be unset to allow the VCVARS
script to use its internal heuristics to determine appropriate
SDK version
"""
if getattr(self, "_sdk_ver", None):
return self._sdk_ver + ".0"
return ""
@sdk_ver.setter
def sdk_ver(self, val):
self._sdk_ver = val
@property
def arch(self):
return self._arch
@property
def vcvars_ver(self):
return f"-vcvars_ver={self._msvc_version}"
def command_str(self):
script = super(VCVarsInvocation, self).command_str()
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
@@ -159,48 +75,22 @@ class Msvc(Compiler):
# file based on compiler executable path.
def __init__(self, *args, **kwargs):
# This positional argument "paths" is later parsed and process by the base class
# via the call to `super` later in this method
paths = args[3]
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
args[3][:] = new_pth
super().__init__(*args, **kwargs)
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
env_cmds = []
compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
env_cmds.append(self.vcvars_call)
# Below is a check for a valid fortran path
# paths has c, cxx, fc, and f77 paths in that order
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
if os.getenv("ONEAPI_ROOT"):
# If this found, it sets all the vars
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
# env first
env_cmds.extend(
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
else:
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
# idiomatically by the following relative path from the
# compiler.
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
@property
def msvc_version(self):
@@ -229,29 +119,15 @@ def platform_toolset_ver(self):
"""
return self.msvc_version[:2].joined.string[:3]
def _compiler_version(self, compiler):
"""Returns version object for given compiler"""
# ignore_errors below is true here due to ifx's
# non zero return code if it is not provided
# and input file
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(
compiler, version_arg=None, ignore_errors=True
),
).group(1)
)
@property
def cl_version(self):
"""Cl toolset version"""
return self._compiler_version(self.cc)
@property
def ifx_version(self):
"""Ifx compiler version associated with this version of MSVC"""
return self._compiler_version(self.fc)
return Version(
re.search(
Msvc.version_regex,
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
).group(1)
)
@property
def vs_root(self):
@@ -270,12 +146,27 @@ def setup_custom_environment(self, pkg, env):
# output, sort into dictionary, use that to make the build
# environment.
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
arch = arch.replace("-", "_")
# vcvars can target specific sdk versions, force it to pick up concretized sdk
# version, if needed by spec
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
sdk_ver = (
""
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
else pkg.spec["win-sdk"].version.string + ".0"
)
# provide vcvars with msvc version selected by concretization,
# not whatever it happens to pick up on the system (highest available version)
out = subprocess.check_output( # novermin
'cmd /u /c "{}" {} {} {} && set'.format(
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
),
stderr=subprocess.STDOUT,
)
if sys.version_info[0] >= 3:
out = out.decode("utf-16le", errors="replace") # novermin
out = self.msvc_compiler_environment()
int_env = dict(
(key, value)
for key, _, value in (line.partition("=") for line in out.splitlines())

View File

@@ -28,7 +28,6 @@
import spack.abi
import spack.compilers
import spack.config
import spack.environment
import spack.error
import spack.platforms
@@ -38,6 +37,7 @@
import spack.tengine
import spack.util.path
import spack.variant as vt
from spack.config import config
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
from spack.version import ClosedOpenRange, VersionList, ver
@@ -76,7 +76,7 @@ class Concretizer:
def __init__(self, abstract_spec=None):
if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not spack.config.get(
Concretizer.check_for_compiler_existence = not config.get(
"config:install_missing_compilers", False
)
self.abstract_spec = abstract_spec

View File

@@ -47,8 +47,6 @@
import spack.platforms
import spack.schema
import spack.schema.bootstrap
import spack.schema.cdash
import spack.schema.ci
import spack.schema.compilers
import spack.schema.concretizer
import spack.schema.config
@@ -66,7 +64,7 @@
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
SECTION_SCHEMAS = {
section_schemas = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
"mirrors": spack.schema.mirrors.schema,
@@ -82,16 +80,16 @@
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
all_schemas = copy.deepcopy(section_schemas)
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
#: Path to the default configuration
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
#: Hard-coded default values for some key configuration options.
#: This ensures that Spack will still work even if config.yaml in
#: the defaults scope is removed.
CONFIG_DEFAULTS = {
config_defaults = {
"config": {
"debug": False,
"connect_timeout": 10,
@@ -107,10 +105,10 @@
#: metavar to use for commands that accept scopes
#: this is shorter and more readable than listing all choices
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
#: Base name for the (internal) overrides scope.
_OVERRIDES_BASE_NAME = "overrides-"
overrides_base_name = "overrides-"
class ConfigScope:
@@ -136,7 +134,7 @@ def get_section_filename(self, section):
def get_section(self, section):
if section not in self.sections:
path = self.get_section_filename(section)
schema = SECTION_SCHEMAS[section]
schema = section_schemas[section]
data = read_config_file(path, schema)
self.sections[section] = data
return self.sections[section]
@@ -147,7 +145,7 @@ def _write_section(self, section):
# We copy data here to avoid adding defaults at write time
validate_data = copy.deepcopy(data)
validate(validate_data, SECTION_SCHEMAS[section])
validate(validate_data, section_schemas[section])
try:
mkdirp(self.path)
@@ -319,7 +317,7 @@ def __init__(self, name, data=None):
data = InternalConfigScope._process_dict_keyname_overrides(data)
for section in data:
dsec = data[section]
validate({section: dsec}, SECTION_SCHEMAS[section])
validate({section: dsec}, section_schemas[section])
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
def get_section_filename(self, section):
@@ -335,7 +333,7 @@ def _write_section(self, section):
"""This only validates, as the data is already in memory."""
data = self.get_section(section)
if data is not None:
validate(data, SECTION_SCHEMAS[section])
validate(data, section_schemas[section])
self.sections[section] = _mark_internal(data, self.name)
def __repr__(self):
@@ -432,7 +430,7 @@ def file_scopes(self) -> List[ConfigScope]:
return [
s
for s in self.scopes.values()
if (type(s) is ConfigScope or type(s) is SingleFileScope)
if (type(s) == ConfigScope or type(s) == SingleFileScope)
]
def highest_precedence_scope(self) -> ConfigScope:
@@ -713,11 +711,11 @@ def override(path_or_scope, value=None):
"""
if isinstance(path_or_scope, ConfigScope):
overrides = path_or_scope
CONFIG.push_scope(path_or_scope)
config.push_scope(path_or_scope)
else:
base_name = _OVERRIDES_BASE_NAME
base_name = overrides_base_name
# Ensure the new override gets a unique scope name
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
num_overrides = len(current_overrides)
while True:
scope_name = "{0}{1}".format(base_name, num_overrides)
@@ -727,19 +725,19 @@ def override(path_or_scope, value=None):
break
overrides = InternalConfigScope(scope_name)
CONFIG.push_scope(overrides)
CONFIG.set(path_or_scope, value, scope=scope_name)
config.push_scope(overrides)
config.set(path_or_scope, value, scope=scope_name)
try:
yield CONFIG
yield config
finally:
scope = CONFIG.remove_scope(overrides.name)
scope = config.remove_scope(overrides.name)
assert scope is overrides
#: configuration scopes added on the command line
#: set by ``spack.main.main()``.
COMMAND_LINE_SCOPES: List[str] = []
command_line_scopes: List[str] = []
def _add_platform_scope(cfg, scope_type, name, path):
@@ -783,14 +781,14 @@ def create():
cfg = Configuration()
# first do the builtin, hardcoded defaults
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
builtin = InternalConfigScope("_builtin", config_defaults)
cfg.push_scope(builtin)
# Builtin paths to configuration files in Spack
configuration_paths = [
# Default configuration scope is the lowest-level scope. These are
# versioned with Spack and can be overridden by systems, sites or users
CONFIGURATION_DEFAULTS_PATH
configuration_defaults_path
]
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
@@ -817,7 +815,7 @@ def create():
_add_platform_scope(cfg, ConfigScope, name, path)
# add command-line scopes
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
_add_command_line_scopes(cfg, command_line_scopes)
# we make a special scope for spack commands so that they can
# override configuration options.
@@ -827,7 +825,7 @@ def create():
#: This is the singleton configuration instance for Spack.
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
def add_from_file(filename, scope=None):
@@ -840,7 +838,7 @@ def add_from_file(filename, scope=None):
# update all sections from config dict
# We have to iterate on keys to keep overrides from the file
for section in data.keys():
if section in SECTION_SCHEMAS.keys():
if section in section_schemas.keys():
# Special handling for compiler scope difference
# Has to be handled after we choose a section
if scope is None:
@@ -851,7 +849,7 @@ def add_from_file(filename, scope=None):
new = merge_yaml(existing, value)
# We cannot call config.set directly (set is a type)
CONFIG.set(section, new, scope)
config.set(section, new, scope)
def add(fullpath, scope=None):
@@ -863,7 +861,6 @@ def add(fullpath, scope=None):
has_existing_value = True
path = ""
override = False
value = syaml.load_config(components[-1])
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = "::" if override else ":" if path else ""
@@ -884,14 +881,14 @@ def add(fullpath, scope=None):
existing = get_valid_type(path)
# construct value from this point down
value = syaml.load_config(components[-1])
for component in reversed(components[idx + 1 : -1]):
value = {component: value}
break
if override:
path += "::"
if has_existing_value:
path, _, value = fullpath.rpartition(":")
value = syaml.load_config(value)
existing = get(path, scope=scope)
# append values to lists
@@ -900,12 +897,12 @@ def add(fullpath, scope=None):
# merge value into existing
new = merge_yaml(existing, value)
CONFIG.set(path, new, scope)
config.set(path, new, scope)
def get(path, default=None, scope=None):
"""Module-level wrapper for ``Configuration.get()``."""
return CONFIG.get(path, default, scope)
return config.get(path, default, scope)
def set(path, value, scope=None):
@@ -913,26 +910,26 @@ def set(path, value, scope=None):
Accepts the path syntax described in ``get()``.
"""
return CONFIG.set(path, value, scope)
return config.set(path, value, scope)
def add_default_platform_scope(platform):
plat_name = os.path.join("defaults", platform)
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
plat_path = os.path.join(configuration_defaults_path[1], platform)
config.push_scope(ConfigScope(plat_name, plat_path))
def scopes():
"""Convenience function to get list of configuration scopes."""
return CONFIG.scopes
return config.scopes
def _validate_section_name(section):
"""Exit if the section is not a valid section."""
if section not in SECTION_SCHEMAS:
if section not in section_schemas:
raise ConfigSectionError(
"Invalid config section: '%s'. Options are: %s"
% (section, " ".join(SECTION_SCHEMAS.keys()))
% (section, " ".join(section_schemas.keys()))
)
@@ -993,7 +990,7 @@ def read_config_file(filename, schema=None):
if data:
if not schema:
key = next(iter(data))
schema = _ALL_SCHEMAS[key]
schema = all_schemas[key]
validate(data, schema)
return data
@@ -1092,7 +1089,7 @@ def get_valid_type(path):
test_data = {component: test_data}
try:
validate(test_data, SECTION_SCHEMAS[section])
validate(test_data, section_schemas[section])
except (ConfigFormatError, AttributeError) as e:
jsonschema_error = e.validation_error
if jsonschema_error.validator == "type":
@@ -1232,17 +1229,11 @@ def they_are(t):
return copy.copy(source)
#
# Process a path argument to config.set() that may contain overrides ('::' or
# trailing ':')
#
def process_config_path(path):
"""Process a path argument to config.set() that may contain overrides ('::' or
trailing ':')
Note: quoted value path components will be processed as a single value (escaping colons)
quoted path components outside of the value will be considered ill formed and will
raise.
e.g. `this:is:a:path:'value:with:colon'` will yield:
[this, is, a, path, value:with:colon]
"""
result = []
if path.startswith(":"):
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
@@ -1269,16 +1260,6 @@ def process_config_path(path):
front = syaml.syaml_str(front)
front.append = True
quote = "['\"]"
not_quote = "[^'\"]"
if re.match(f"^{quote}", path):
m = re.match(rf"^{quote}({not_quote}+){quote}$", path)
if not m:
raise ValueError("Quotes indicate value, but there are additional path entries")
result.append(m.group(1))
break
result.append(front)
return result
@@ -1297,9 +1278,9 @@ def default_modify_scope(section="config"):
If this is not 'compilers', a general (non-platform) scope is used.
"""
if section == "compilers":
return CONFIG.highest_precedence_scope().name
return spack.config.config.highest_precedence_scope().name
else:
return CONFIG.highest_precedence_non_platform_scope().name
return spack.config.config.highest_precedence_non_platform_scope().name
def default_list_scope():
@@ -1356,18 +1337,18 @@ def use_configuration(*scopes_or_paths):
Returns:
Configuration object associated with the scopes passed as arguments
"""
global CONFIG
global config
# Normalize input and construct a Configuration object
configuration = _config_from(scopes_or_paths)
CONFIG.clear_caches(), configuration.clear_caches()
config.clear_caches(), configuration.clear_caches()
saved_config, CONFIG = CONFIG, configuration
saved_config, config = config, configuration
try:
yield configuration
finally:
CONFIG = saved_config
config = saved_config
@llnl.util.lang.memoized

View File

@@ -5,8 +5,8 @@
"""Writers for different kind of recipes and related
convenience functions.
"""
import collections
import copy
from collections import namedtuple
from typing import Optional
import spack.environment as ev
@@ -159,13 +159,13 @@ def depfile(self):
@tengine.context_property
def run(self):
"""Information related to the run image."""
Run = namedtuple("Run", ["image"])
Run = collections.namedtuple("Run", ["image"])
return Run(image=self.final_image)
@tengine.context_property
def build(self):
"""Information related to the build image."""
Build = namedtuple("Build", ["image"])
Build = collections.namedtuple("Build", ["image"])
return Build(image=self.build_image)
@tengine.context_property
@@ -176,13 +176,12 @@ def strip(self):
@tengine.context_property
def paths(self):
"""Important paths in the image"""
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
return Paths(
environment="/opt/spack-environment",
store="/opt/software",
view_parent="/opt/views",
view="/opt/views/view",
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
hidden_view="/opt/._view",
view="/opt/view",
)
@tengine.context_property
@@ -258,7 +257,7 @@ def _package_info_from(self, package_list):
update, install, clean = commands_for(os_pkg_manager)
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
return Packages(update=update, install=install, list=package_list, clean=clean)
def _os_pkg_manager(self):
@@ -274,7 +273,7 @@ def _os_pkg_manager(self):
@tengine.context_property
def extra_instructions(self):
Extras = namedtuple("Extra", ["build", "final"])
Extras = collections.namedtuple("Extra", ["build", "final"])
extras = self.container_config.get("extra_instructions", {})
build, final = extras.get("build", None), extras.get("final", None)
return Extras(build=build, final=final)
@@ -296,7 +295,7 @@ def bootstrap(self):
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
bootstrap_recipe = env.get_template(template_path).render(**context)
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
@tengine.context_property
@@ -304,7 +303,7 @@ def render_phase(self):
render_bootstrap = bool(self.bootstrap_image)
render_build = not (self.last_phase == "bootstrap")
render_final = self.last_phase in (None, "final")
Render = namedtuple("Render", ["bootstrap", "build", "final"])
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
def __call__(self):

View File

@@ -42,7 +42,6 @@ class OpenMpi(Package):
import spack.patch
import spack.spec
import spack.url
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency, canonical_deptype, default_deptype
from spack.fetch_strategy import from_kwargs
@@ -408,7 +407,10 @@ def version(
def _execute_version(pkg, ver, **kwargs):
if (
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
any(
s in kwargs
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
)
and hasattr(pkg, "has_code")
and not pkg.has_code
):
@@ -758,7 +760,7 @@ def _execute_variant(pkg):
when_spec = make_when_spec(when)
when_specs = [when_spec]
if not re.match(spack.spec.IDENTIFIER_RE, name):
if not re.match(spack.spec.identifier_re, name):
directive = "variant"
msg = "Invalid variant name in {0}: '{1}'"
raise DirectiveError(directive, msg.format(pkg.name, name))

View File

@@ -11,7 +11,6 @@
import shutil
import sys
from contextlib import contextmanager
from pathlib import Path
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -105,7 +104,7 @@ def relative_path_for_spec(self, spec):
projection = spack.projections.get_projection(self.projections, spec)
path = spec.format(projection)
return str(Path(path))
return path
def write_spec(self, spec, path):
"""Write a spec out to a file."""

View File

@@ -1994,10 +1994,14 @@ def get_one_by_hash(self, dag_hash):
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
"""Returns all concretized specs in the environment satisfying any of the input specs"""
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
specs = [s.lookup_hash() for s in specs]
# Avoid double lookup by directly calling _satisfies.
return [
s
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
if any(s.satisfies(t) for t in specs)
if any(s._satisfies(t) for t in specs)
]
@spack.repo.autospec
@@ -2058,7 +2062,7 @@ def matching_spec(self, spec):
# If multiple root specs match, it is assumed that the abstract
# spec will most-succinctly summarize the difference between them
# (and the user can enter one of these to disambiguate)
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
fmt_str = "{hash:7} " + spack.spec.default_format
color = clr.get_color_when()
match_strings = [
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
@@ -2366,7 +2370,7 @@ def display_specs(concretized_specs):
def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
format=spack.spec.DISPLAY_FORMAT,
format=spack.spec.display_format,
status_fn=spack.spec.Spec.install_status,
hashlen=7,
hashes=True,
@@ -2444,13 +2448,13 @@ def make_repo_path(root):
def prepare_config_scope(env):
"""Add env's scope to the global configuration search path."""
for scope in env.config_scopes():
spack.config.CONFIG.push_scope(scope)
spack.config.config.push_scope(scope)
def deactivate_config_scope(env):
"""Remove any scopes from env from the global config path."""
for scope in env.config_scopes():
spack.config.CONFIG.remove_scope(scope.name)
spack.config.config.remove_scope(scope.name)
def manifest_file(env_name_or_dir):
@@ -2664,26 +2668,6 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.yaml_content = with_defaults_added
self.changed = False
def _all_matches(self, user_spec: str) -> List[str]:
"""Maps the input string to the first equivalent user spec in the manifest,
and returns it.
Args:
user_spec: user spec to be found
Raises:
ValueError: if no equivalent match is found
"""
result = []
for yaml_spec_str in self.pristine_configuration["specs"]:
if Spec(yaml_spec_str) == Spec(user_spec):
result.append(yaml_spec_str)
if not result:
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
return result
def add_user_spec(self, user_spec: str) -> None:
"""Appends the user spec passed as input to the list of root specs.
@@ -2704,9 +2688,8 @@ def remove_user_spec(self, user_spec: str) -> None:
SpackEnvironmentError: when the user spec is not in the list
"""
try:
for key in self._all_matches(user_spec):
self.pristine_configuration["specs"].remove(key)
self.configuration["specs"].remove(key)
self.pristine_configuration["specs"].remove(user_spec)
self.configuration["specs"].remove(user_spec)
except ValueError as e:
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
raise SpackEnvironmentError(msg) from e

View File

@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
cmds += "$Env:SPACK_ENV=%s\n" % env.path
else:
if "color" in os.getenv("TERM", "") and prompt:
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
@@ -82,7 +82,7 @@ def deactivate_header(shell):
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "Set-Item -Path Env:SPACK_ENV\n"
cmds += "Remove-Item Env:SPACK_ENV"
else:
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"

View File

@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
print()
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
spack.spec.architecture_color,
architecture,
spack.spec.COMPILER_COLOR,
spack.spec.compiler_color,
compiler,
)
tty.hline(colorize(header), char="-")

View File

@@ -592,9 +592,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
if node is spec:
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
elif source_pkg_dir:
fs.install_tree(
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(source_pkg_dir, dest_pkg_dir)
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
@@ -1318,6 +1316,7 @@ def _prepare_for_install(self, task: BuildTask) -> None:
"""
Check the database and leftover installation directories/files and
prepare for a new install attempt for an uninstalled package.
Preparation includes cleaning up installation and stage directories
and ensuring the database is up-to-date.
@@ -2395,9 +2394,7 @@ def _install_source(self) -> None:
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
fs.install_tree(
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
)
fs.install_tree(pkg.stage.source_path, src_target)
def _real_install(self) -> None:
import spack.builder

View File

@@ -51,7 +51,7 @@
stat_names = pstats.Stats.sort_arg_dict_default
#: top-level aliases for Spack commands
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
aliases = {"rm": "remove"}
#: help levels in order of detail (i.e., number of commands shown)
levels = ["short", "long"]
@@ -602,10 +602,10 @@ def setup_main_options(args):
key = syaml.syaml_str("repos")
key.override = True
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
[(key, [spack.paths.mock_packages_path])]
)
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
spack.repo.PATH = spack.repo.create(spack.config.config)
# If the user asked for it, don't check ssl certs.
if args.insecure:
@@ -930,7 +930,7 @@ def _main(argv=None):
# make spack.config aware of any command line configuration scopes
if args.config_scopes:
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
spack.config.command_line_scopes = args.config_scopes
# ensure options on spack command come before everything
setup_main_options(args)

View File

@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
if spec.satisfies(constraint):
if hasattr(constraint, "override") and constraint.override:
spec_configuration = {}
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
update_dictionary_extending_lists(spec_configuration, action)
# Transform keywords for dependencies or prerequisites into a list of spec
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
return
# Check if we have custom TCL module sections
for scope in spack.config.CONFIG.file_scopes:
for scope in spack.config.config.file_scopes:
# Skip default configuration
if scope.name.startswith("default"):
continue

View File

@@ -236,7 +236,7 @@ def install(self, prefix):
# Create a multimethod with this name if there is not one already
original_method = MultiMethodMeta._locals.get(method.__name__)
if not isinstance(original_method, SpecMultiMethod):
if not type(original_method) == SpecMultiMethod:
original_method = SpecMultiMethod(original_method)
if self.spec is not None:

View File

@@ -288,6 +288,9 @@ def next_spec(
)
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
else:
@@ -303,12 +306,13 @@ def all_specs(self) -> List[spack.spec.Spec]:
class SpecNodeParser:
"""Parse a single spec node from a stream of tokens"""
__slots__ = "ctx", "has_compiler", "has_version"
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
def __init__(self, ctx):
self.ctx = ctx
self.has_compiler = False
self.has_version = False
self.has_hash = False
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
"""Parse a single spec node from a stream of tokens
@@ -339,6 +343,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
while True:
if self.ctx.accept(TokenType.COMPILER):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_compiler:
raise spack.spec.DuplicateCompilerSpecError(
f"{initial_spec} cannot have multiple compilers"
@@ -348,6 +353,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
self.has_compiler = True
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_compiler:
raise spack.spec.DuplicateCompilerSpecError(
f"{initial_spec} cannot have multiple compilers"
@@ -361,6 +367,7 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
TokenType.VERSION_HASH_PAIR
):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
if self.has_version:
raise spack.spec.MultipleVersionError(
f"{initial_spec} cannot have multiple versions"
@@ -371,21 +378,25 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
initial_spec.attach_git_version_lookup()
self.has_version = True
elif self.ctx.accept(TokenType.BOOL_VARIANT):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
variant_value = self.ctx.current_token.value[0] == "+"
initial_spec._add_flag(
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
)
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
variant_value = self.ctx.current_token.value[0:2] == "++"
initial_spec._add_flag(
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
)
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
name = name.strip("'\" ")
value = value.strip("'\" ")
initial_spec._add_flag(name, value, propagate=False)
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
name = name.strip("'\" ")
value = value.strip("'\" ")
@@ -400,6 +411,12 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
return initial_spec
def hash_not_parsed_or_raise(self, spec, addition):
if not self.has_hash:
return
raise spack.spec.RedundantSpecError(spec, addition)
class FileParser:
"""Parse a single spec from a JSON or YAML file"""

View File

@@ -10,12 +10,11 @@
dependencies.
"""
import os
from pathlib import PurePath
import llnl.util.filesystem
#: This file lives in $prefix/lib/spack/spack/__file__
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
prefix = llnl.util.filesystem.ancestor(__file__, 4)
#: synonym for prefix
spack_root = prefix
@@ -89,7 +88,7 @@ def _get_user_cache_path():
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
user_cache_path = str(PurePath(_get_user_cache_path()))
user_cache_path = _get_user_cache_path()
#: junit, cdash, etc. reports about builds
reports_path = os.path.join(user_cache_path, "reports")

View File

@@ -64,7 +64,7 @@ def use_platform(new_platform):
host = _PickleableCallable(new_platform)
# Clear configuration and compiler caches
spack.config.CONFIG.clear_caches()
spack.config.config.clear_caches()
spack.compilers._cache_config_files = []
yield new_platform
@@ -73,5 +73,5 @@ def use_platform(new_platform):
host = original_host_fn
# Clear configuration and compiler caches
spack.config.CONFIG.clear_caches()
spack.config.config.clear_caches()
spack.compilers._cache_config_files = []

View File

@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
# Warn about invalid names that look like packages.
if not nm.valid_module_name(pkg_name):
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
if not pkg_name.startswith("."):
tty.warn(
'Skipping package at {0}. "{1}" is not '
"a valid Spack module name.".format(pkg_dir, pkg_name)
@@ -647,7 +647,7 @@ class RepoPath:
"""
def __init__(self, *repos, **kwargs):
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
cache = kwargs.get("cache", spack.caches.misc_cache)
self.repos = []
self.by_namespace = nm.NamespaceTrie()
@@ -966,7 +966,7 @@ def check(condition, msg):
# Indexes for this repository, computed lazily
self._repo_index = None
self._cache = cache or spack.caches.MISC_CACHE
self._cache = cache or spack.caches.misc_cache
def real_name(self, import_name):
"""Allow users to import Spack packages using Python identifiers.
@@ -1357,7 +1357,7 @@ def create_or_construct(path, namespace=None):
def _path(configuration=None):
"""Get the singleton RepoPath instance for Spack."""
configuration = configuration or spack.config.CONFIG
configuration = configuration or spack.config.config
return create(configuration=configuration)
@@ -1404,14 +1404,14 @@ def use_repositories(*paths_and_repos, **kwargs):
paths = [getattr(x, "root", x) for x in paths_and_repos]
scope_name = "use-repo-{}".format(uuid.uuid4())
repos_key = "repos:" if override else "repos"
spack.config.CONFIG.push_scope(
spack.config.config.push_scope(
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
)
PATH, saved = create(configuration=spack.config.CONFIG), PATH
PATH, saved = create(configuration=spack.config.config), PATH
try:
yield PATH
finally:
spack.config.CONFIG.remove_scope(scope_name=scope_name)
spack.config.config.remove_scope(scope_name=scope_name)
PATH = saved

View File

@@ -28,12 +28,6 @@
"unify": {
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
},
"duplicates": {
"type": "object",
"properties": {
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
},
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,159 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set, Tuple
import spack.dependency
import spack.package_base
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
self.specs = specs
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
if not tests:
self.link_run_types = ("link", "run")
self.all_types = ("link", "run", "build")
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
self._direct_build.update(current)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()

View File

@@ -1,21 +0,0 @@
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Avoid cycles in the DAG
%
% Some combinations of conditional dependencies can result in cycles;
% this ensures that we solve around them. Note that these rules are quite
% demanding on both grounding and solving, since they need to compute and
% consider all possible paths between pair of nodes.
%=============================================================================
#program no_cycle.
path(Parent, Child) :- depends_on(Parent, Child).
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
:- path(A, A).
#defined depends_on/2.

View File

@@ -1,29 +0,0 @@
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Heuristic to speed-up solves (node with ID 0)
%=============================================================================
%-----------------
% Domain heuristic
%-----------------
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
% Root node
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
% Providers
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]

View File

@@ -1,24 +0,0 @@
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Heuristic to speed-up solves (node with ID > 0)
%=============================================================================
% node(ID, _)
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
% node(ID, _), split build dependencies
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]

View File

@@ -3,11 +3,9 @@
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% OS compatibility rules for reusing solves.
% os_compatible(RecentOS, OlderOS)
% OlderOS binaries can be used on RecentOS
%=============================================================================
% macOS
os_compatible("monterey", "bigsur").

View File

@@ -1,27 +0,0 @@
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Minimize the number of literals that are not solved
%
% This minimization is used for the "when_possible" concretization mode,
% otherwise we assume that all literals must be solved.
%=============================================================================
% Give clingo the choice to solve an input spec or not
{ solve_literal(ID) } :- literal(ID).
literal_not_solved(ID) :- not solve_literal(ID), literal(ID).
% Make a problem with "zero literals solved" unsat. This is to trigger
% looking for solutions to the ASP problem with "errors", which results
% in better reporting for users. See #30669 for details.
1 { solve_literal(ID) : literal(ID) }.
opt_criterion(300, "number of input specs not concretized").
#minimize{ 0@300: #true }.
#minimize { 1@300,ID : literal_not_solved(ID) }.
#heuristic literal_solved(ID) : literal(ID). [1, sign]
#heuristic literal_solved(ID) : literal(ID). [50, init]

View File

@@ -112,49 +112,50 @@
"UnsatisfiableDependencySpecError",
"AmbiguousHashError",
"InvalidHashError",
"RedundantSpecError",
"SpecDeprecatedError",
]
#: Valid pattern for an identifier in Spack
IDENTIFIER_RE = r"\w[\w-]*"
identifier_re = r"\w[\w-]*"
COMPILER_COLOR = "@g" #: color for highlighting compilers
VERSION_COLOR = "@c" #: color for highlighting versions
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
HASH_COLOR = "@K" #: color for highlighting package hashes
compiler_color = "@g" #: color for highlighting compilers
version_color = "@c" #: color for highlighting versions
architecture_color = "@m" #: color for highlighting architectures
enabled_variant_color = "@B" #: color for highlighting enabled variants
disabled_variant_color = "r" #: color for highlighting disabled varaints
dependency_color = "@." #: color for highlighting dependencies
hash_color = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
COLOR_FORMATS = {
"%": COMPILER_COLOR,
"@": VERSION_COLOR,
"=": ARCHITECTURE_COLOR,
"+": ENABLED_VARIANT_COLOR,
"~": DISABLED_VARIANT_COLOR,
"^": DEPENDENCY_COLOR,
"#": HASH_COLOR,
color_formats = {
"%": compiler_color,
"@": version_color,
"=": architecture_color,
"+": enabled_variant_color,
"~": disabled_variant_color,
"^": dependency_color,
"#": hash_color,
}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``COLOR_FORMATS.keys()``.
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
#: ``color_formats.keys()``.
_separators = "[\\%s]" % "\\".join(color_formats.keys())
#: Default format for Spec.format(). This format can be round-tripped, so that:
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
default_format = (
"{name}{@versions}"
"{%compiler.name}{@compiler.versions}{compiler_flags}"
"{variants}{arch=architecture}{/abstract_hash}"
)
#: Display format, which eliminates extra `@=` in the output, for readability.
DISPLAY_FORMAT = (
display_format = (
"{name}{@version}"
"{%compiler.name}{@compiler.version}{compiler_flags}"
"{variants}{arch=architecture}{/abstract_hash}"
@@ -186,7 +187,7 @@ class InstallStatus(enum.Enum):
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
COLOR_FORMATS."""
color_formats."""
class insert_color:
def __init__(self):
@@ -199,9 +200,9 @@ def __call__(self, match):
return clr.cescape(sep)
self.last = sep
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
return "%s%s" % (color_formats[sep], clr.cescape(sep))
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
@lang.lazy_lexicographic_ordering
@@ -984,14 +985,16 @@ def __iter__(self):
def __len__(self):
return len(self.edges)
def add(self, edge: DependencySpec):
def add(self, edge):
"""Adds a new edge to this object.
Args:
edge (DependencySpec): edge to be added
"""
key = edge.spec.name if self.store_by_child else edge.parent.name
if key in self.edges:
lst = self.edges[key]
lst.append(edge)
lst.sort(key=_sort_by_dep_types)
else:
self.edges[key] = [edge]
current_list = self.edges.setdefault(key, [])
current_list.append(edge)
current_list.sort(key=_sort_by_dep_types)
def __str__(self):
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
@@ -1924,15 +1927,19 @@ def _lookup_hash(self):
store, or finally, binary caches."""
import spack.environment
matches = []
active_env = spack.environment.active_environment()
# First env, then store, then binary cache
matches = (
(active_env.all_matching_specs(self) if active_env else [])
or spack.store.STORE.db.query(self, installed=any)
or spack.binary_distribution.BinaryCacheQuery(True)(self)
)
if active_env:
env_matches = active_env.get_by_hash(self.abstract_hash) or []
matches = [m for m in env_matches if m._satisfies(self)]
if not matches:
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
matches = [m for m in db_matches if m._satisfies(self)]
if not matches:
query = spack.binary_distribution.BinaryCacheQuery(True)
remote_matches = query("/" + self.abstract_hash) or []
matches = [m for m in remote_matches if m._satisfies(self)]
if not matches:
raise InvalidHashError(self, self.abstract_hash)
@@ -1953,17 +1960,19 @@ def lookup_hash(self):
spec = self.copy(deps=False)
# root spec is replaced
if spec.abstract_hash:
spec._dup(self._lookup_hash())
new = self._lookup_hash()
spec._dup(new)
return spec
# Get dependencies that need to be replaced
for node in self.traverse(root=False):
if node.abstract_hash:
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
new = node._lookup_hash()
spec._add_dependency(new, deptypes=(), virtuals=())
# reattach nodes that were not otherwise satisfied by new dependencies
for node in self.traverse(root=False):
if not any(n.satisfies(node) for n in spec.traverse()):
if not any(n._satisfies(node) for n in spec.traverse()):
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
return spec
@@ -1976,7 +1985,9 @@ def replace_hash(self):
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
return
self._dup(self.lookup_hash())
spec_by_hash = self.lookup_hash()
self._dup(spec_by_hash)
def to_node_dict(self, hash=ht.dag_hash):
"""Create a dictionary representing the state of this Spec.
@@ -2972,12 +2983,9 @@ def _new_concretize(self, tests=False):
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
name = providers[0]
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
assert (
node in answer
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
assert name in answer
concretized = answer[node]
concretized = answer[name]
self._dup(concretized)
def concretize(self, tests=False):
@@ -3511,8 +3519,7 @@ def update_variant_validate(self, variant_name, values):
for value in values:
if self.variants.get(variant_name):
msg = (
f"cannot append the new value '{value}' to the single-valued "
f"variant '{self.variants[variant_name]}'"
"Cannot append a value to a single-valued " "variant with an already set value"
)
assert pkg_variant.multi, msg
self.variants[variant_name].append(value)
@@ -3712,19 +3719,15 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
"""
other = self._autospec(other)
lhs = self.lookup_hash() or self
rhs = other.lookup_hash() or other
return lhs._intersects(rhs, deps)
def _intersects(self, other: "Spec", deps: bool = True) -> bool:
if other.concrete and self.concrete:
return self.dag_hash() == other.dag_hash()
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
if (
self_hash
and other_hash
and not (self_hash.startswith(other_hash) or other_hash.startswith(self_hash))
):
return False
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
if self.virtual and other.virtual:
@@ -3784,8 +3787,19 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
# If we need to descend into dependencies, do it, otherwise we're done.
if deps:
return self._intersects_dependencies(other)
else:
return True
return True
def satisfies(self, other, deps=True):
"""
This checks constraints on common dependencies against each other.
"""
other = self._autospec(other)
lhs = self.lookup_hash() or self
rhs = other.lookup_hash() or other
return lhs._satisfies(rhs, deps=deps)
def _intersects_dependencies(self, other):
if not other._dependencies or not self._dependencies:
@@ -3822,7 +3836,7 @@ def _intersects_dependencies(self, other):
return True
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
"""Return True if all concrete specs matching self also match other, otherwise False.
Args:
@@ -3837,13 +3851,6 @@ def satisfies(self, other: "Spec", deps: bool = True) -> bool:
# objects.
return self.concrete and self.dag_hash() == other.dag_hash()
# If the right-hand side has an abstract hash, make sure it's a prefix of the
# left-hand side's (abstract) hash.
if other.abstract_hash:
compare_hash = self.dag_hash() if self.concrete else self.abstract_hash
if not compare_hash or not compare_hash.startswith(other.abstract_hash):
return False
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency.
@@ -4220,7 +4227,9 @@ def eq_node(self, other):
def _cmp_iter(self):
"""Lazily yield components of self for comparison."""
for item in self._cmp_node():
cmp_spec = self.lookup_hash() or self
for item in cmp_spec._cmp_node():
yield item
# This needs to be in _cmp_iter so that no specs with different process hashes
@@ -4231,10 +4240,10 @@ def _cmp_iter(self):
# TODO: they exist for speed. We should benchmark whether it's really worth
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
# TODO: spec hashing.
yield self.process_hash() if self.concrete else None
yield cmp_spec.process_hash() if cmp_spec.concrete else None
def deps():
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
yield dep.spec.name
yield tuple(sorted(dep.deptypes))
yield hash(dep.spec)
@@ -4244,7 +4253,7 @@ def deps():
def colorized(self):
return colorize_spec(self)
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
def format(self, format_string=default_format, **kwargs):
r"""Prints out particular pieces of a spec, depending on what is
in the format string.
@@ -4323,7 +4332,7 @@ def format(self, format_string=DEFAULT_FORMAT, **kwargs):
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
f = COLOR_FORMATS[c] + f + "@."
f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
def write_attribute(spec, attribute, color):
@@ -4522,7 +4531,7 @@ def tree(self, **kwargs):
status_fn = kwargs.pop("status_fn", False)
cover = kwargs.pop("cover", "nodes")
indent = kwargs.pop("indent", 0)
fmt = kwargs.pop("format", DEFAULT_FORMAT)
fmt = kwargs.pop("format", default_format)
prefix = kwargs.pop("prefix", None)
show_types = kwargs.pop("show_types", False)
deptypes = kwargs.pop("deptypes", "all")
@@ -5330,6 +5339,14 @@ class NoSuchSpecFileError(SpecFilenameError):
"""Raised when a spec file doesn't exist."""
class RedundantSpecError(spack.error.SpecError):
def __init__(self, spec, addition):
super().__init__(
"Attempting to add %s to spec %s which is already concrete."
" This is likely the result of adding to a spec specified by hash." % (addition, spec)
)
class SpecFormatStringError(spack.error.SpecError):
"""Called for errors in Spec format strings."""

View File

@@ -97,10 +97,8 @@ def remove(self, spec):
msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
# Remove may contain more than one string representation of the same spec
for item in remove:
self.yaml_list.remove(item)
assert len(remove) == 1
self.yaml_list.remove(remove[0])
# invalidate cache variables when we change the list
self._expanded_list = None
@@ -199,9 +197,7 @@ def _expand_matrix_constraints(matrix_config):
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
# Resolve abstract hashes so we can exclude by their concrete properties
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
flat_combo = [Spec(x) for x in flat_combo]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:

View File

@@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
if self.default_fetcher.cachable:
for rel_path in reversed(list(self.mirror_paths)):
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
cache_fetcher = spack.caches.fetch_cache.fetcher(
rel_path, digest, expand=expand, extension=extension
)
fetchers.insert(0, cache_fetcher)
@@ -577,7 +577,7 @@ def check(self):
self.fetcher.check()
def cache_local(self):
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached

View File

@@ -212,7 +212,7 @@ def create(configuration: ConfigurationType) -> Store:
Args:
configuration: configuration to create a store.
"""
configuration = configuration or spack.config.CONFIG
configuration = configuration or spack.config.config
config_dict = configuration.get("config")
root, unpadded_root, projections = parse_install_tree(config_dict)
hash_length = configuration.get("config:install_hash_length")
@@ -234,7 +234,7 @@ def create(configuration: ConfigurationType) -> Store:
def _create_global() -> Store:
result = create(configuration=spack.config.CONFIG)
result = create(configuration=spack.config.config)
return result
@@ -372,10 +372,10 @@ def use_store(
# Swap the store with the one just constructed and return it
ensure_singleton_created()
spack.config.CONFIG.push_scope(
spack.config.config.push_scope(
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
)
temporary_store = create(configuration=spack.config.CONFIG)
temporary_store = create(configuration=spack.config.config)
original_store, STORE = STORE, temporary_store
try:
@@ -383,7 +383,7 @@ def use_store(
finally:
# Restore the original store
STORE = original_store
spack.config.CONFIG.remove_scope(scope_name=scope_name)
spack.config.config.remove_scope(scope_name=scope_name)
class MatchError(spack.error.SpackError):

View File

@@ -94,14 +94,14 @@ class TestState:
def __init__(self):
if _SERIALIZE:
self.config = spack.config.CONFIG
self.config = spack.config.config
self.platform = spack.platforms.host
self.test_patches = store_patches()
self.store = spack.store.STORE
def restore(self):
if _SERIALIZE:
spack.config.CONFIG = self.config
spack.config.config = self.config
spack.repo.PATH = spack.repo.create(self.config)
spack.platforms.host = self.platform
spack.store.STORE = self.store

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import sys
import pytest
@@ -200,11 +199,15 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
],
)
@pytest.mark.usefixtures("mock_packages", "config")
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
# Monkeypatch so that all concretization is done as if the machine is core2
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
# use foobar=bar to make the problem simpler for the old concretizer
# the new concretizer should not need that help
if spack.config.get("config:concretizer") == "original":
pytest.skip("Fixing the parser broke this test for the original concretizer.")
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
root_target_range,
dep_target_range,
@@ -224,7 +227,6 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
(["21.11", "21.9"], None, False),
],
)
@pytest.mark.skipif(sys.platform == "win32", reason="Cray does not use windows")
def test_cray_platform_detection(versions, default, expected, tmpdir, monkeypatch, working_env):
ex_path = str(tmpdir.join("fake_craype_dir"))
fs.mkdirp(ex_path)

View File

@@ -37,7 +37,7 @@
from spack.paths import test_path
from spack.spec import Spec
pytestmark = pytest.mark.not_on_windows("does not run on windows")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
mirror_cmd = spack.main.SpackCommand("mirror")
install_cmd = spack.main.SpackCommand("install")
@@ -51,7 +51,7 @@
def cache_directory(tmpdir):
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
yield spack.config.caches
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
]
)
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
spack.config.config, old_config = cfg, spack.config.config
spack.config.config.set("repos", [spack.paths.mock_packages_path])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
@@ -138,9 +138,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
if not timeout:
spack.config.set("config:connect_timeout", 10, scope="user")
yield spack.config.CONFIG
yield spack.config.config
spack.config.CONFIG = old_config
spack.config.config = old_config
mutable_dir.remove()

View File

@@ -26,11 +26,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
user_path = str(tmpdir.join("store"))
with spack.store.use_store(user_path):
assert spack.store.STORE.root == user_path
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
assert spack.config.config.get("config:install_tree:root") == user_path
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
assert spack.store.STORE.root == user_path
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
assert spack.config.config.get("config:install_tree:root") == user_path
@pytest.mark.regression("38963")
@@ -40,11 +40,11 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
"""
user_path = str(tmpdir.join("store"))
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
assert spack.config.config.get("config:install_tree:padded_length") == 512
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
assert spack.config.config.get("config:install_tree:padded_length") == 0
assert spack.config.config.get("config:install_tree:padded_length") == 512
@pytest.mark.regression("38963")
@@ -54,15 +54,15 @@ def test_install_tree_customization_is_respected(mutable_config, tmp_path):
"""
spack.store.reinitialize()
store_dir = tmp_path / "store"
spack.config.CONFIG.set("config:install_tree:root", str(store_dir))
spack.config.config.set("config:install_tree:root", str(store_dir))
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
assert (
spack.config.CONFIG.get("config:install_tree:root")
spack.config.config.get("config:install_tree:root")
== spack.bootstrap.config.store_path()
)
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
assert spack.config.CONFIG.get("config:install_tree:root") == str(store_dir)
assert spack.config.config.get("config:install_tree:padded_length") == 0
assert spack.config.config.get("config:install_tree:root") == str(store_dir)
assert spack.store.STORE.root == str(store_dir)
@@ -185,12 +185,12 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
def test_nested_use_of_context_manager(mutable_config):
"""Test nested use of the context manager"""
user_config = spack.config.CONFIG
user_config = spack.config.config
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.config.CONFIG != user_config
assert spack.config.config != user_config
with spack.bootstrap.ensure_bootstrap_configuration():
assert spack.config.CONFIG != user_config
assert spack.config.CONFIG == user_config
assert spack.config.config != user_config
assert spack.config.config == user_config
@pytest.mark.parametrize("expected_missing", [False, True])

View File

@@ -5,6 +5,7 @@
import os
import os.path
import sys
import pytest
@@ -15,7 +16,7 @@
install = spack.main.SpackCommand("install")
pytestmark = pytest.mark.not_on_windows("does not run on windows")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):

View File

@@ -6,6 +6,7 @@
import os
import platform
import posixpath
import sys
import pytest
@@ -118,7 +119,7 @@ def __call__(self, *args, **kwargs):
return mock_module_cmd
@pytest.mark.not_on_windows("Static to Shared not supported on Win (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
def test_static_to_shared_library(build_environment):
os.environ["SPACK_TEST_COMMAND"] = "dump-args"

View File

@@ -5,6 +5,7 @@
import glob
import os
import sys
import py.path
import pytest
@@ -42,7 +43,7 @@ def _func(dir_str):
return _func
@pytest.mark.not_on_windows("make not available on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="make not available on Windows")
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
class TestTargets:
@pytest.mark.parametrize(
@@ -91,7 +92,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
s.package._if_ninja_target_execute("check")
@pytest.mark.not_on_windows("autotools not available on windows")
@pytest.mark.skipif(sys.platform == "win32", reason="autotools not available on windows")
@pytest.mark.usefixtures("config", "mock_packages")
class TestAutotoolsPackage:
def test_with_or_without(self, default_mock_concretization):

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import sys
import pytest
@@ -105,7 +106,10 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
assert value == expected
@pytest.mark.not_on_windows("log_ouput cannot currently be used outside of subprocess on Windows")
@pytest.mark.skipif(
sys.platform == "win32",
reason="log_ouput cannot currently be used outside of subprocess on Windows",
)
@pytest.mark.regression("33928")
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
@pytest.mark.disable_clean_stage_check
@@ -149,7 +153,7 @@ def test_monkey_patching_test_log_file():
# Windows context manager's __exit__ fails with ValueError ("I/O operation
# on closed file").
@pytest.mark.not_on_windows("Does not run on windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Does not run on windows")
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
"""Confirm able to run stand-alone test as a post-install callback."""
s = spack.spec.Spec("py-test-callback").concretized()

View File

@@ -31,16 +31,13 @@ def test_fetch_missing_cache(tmpdir, _fetch_method):
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_fetch(tmpdir, _fetch_method):
"""Ensure a fetch after expanding is effectively a no-op."""
cache_dir = tmpdir.join("cache")
stage_dir = tmpdir.join("stage")
mkdirp(cache_dir)
mkdirp(stage_dir)
cache = os.path.join(cache_dir, "cache.tar.gz")
testpath = str(tmpdir)
cache = os.path.join(testpath, "cache.tar.gz")
touch(cache)
url = url_util.path_to_file_url(cache)
with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=str(stage_dir)) as stage:
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
mkdirp(source_path)
fetcher.fetch()

View File

@@ -8,6 +8,7 @@
arguments correctly.
"""
import os
import sys
import pytest
@@ -144,7 +145,7 @@
+ test_args_without_paths
)
pytestmark = pytest.mark.not_on_windows("does not run on windows")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture(scope="function")

View File

@@ -5,6 +5,7 @@
import itertools
import os
import subprocess
import sys
import pytest
@@ -34,7 +35,7 @@ def test_urlencode_string():
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_import_signing_key(mock_gnupghome):
signing_key_dir = spack_paths.mock_gpg_keys_path
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
@@ -426,14 +427,18 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
assert len(mpileaks_specs) == 2, e.all_specs()
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_process_command(repro_dir):
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
help_sh = repro_dir / "help.sh"
assert help_sh.exists() and not result
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_process_command_fail(repro_dir, monkeypatch):
msg = "subprocess wait exception"
@@ -484,7 +489,9 @@ def test_ci_run_standalone_tests_missing_requirements(
assert "Reproduction directory is required" in err
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_run_standalone_tests_not_installed_junit(
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
):
@@ -502,7 +509,9 @@ def test_ci_run_standalone_tests_not_installed_junit(
assert os.path.getsize(log_file) > 0
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
@pytest.mark.skipif(
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
)
def test_ci_run_standalone_tests_not_installed_cdash(
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
):

View File

@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
import pytest
from llnl.util.filesystem import working_dir
@@ -33,7 +35,7 @@ def test_blame_by_percent(mock_packages):
assert "EMAIL" in out
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_blame_file(mock_packages):
"""Sanity check the blame command to make sure it works."""
with working_dir(spack.paths.prefix):
@@ -66,7 +68,7 @@ def test_blame_json(mock_packages):
assert key in loaded["authors"][0]
@pytest.mark.not_on_windows("git hangs")
@pytest.mark.skipif(sys.platform == "win32", reason="git hangs")
def test_blame_by_git(mock_packages, capfd):
"""Sanity check the blame command to make sure it works."""
with capfd.disabled():

View File

@@ -50,7 +50,7 @@ def test_reset_in_file_scopes(mutable_config, scopes):
bootstrap_yaml_files = []
for s in scopes:
_bootstrap("disable", "--scope={0}".format(s))
scope_path = spack.config.CONFIG.scopes[s].path
scope_path = spack.config.config.scopes[s].path
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
assert os.path.exists(bootstrap_yaml)
bootstrap_yaml_files.append(bootstrap_yaml)
@@ -80,7 +80,7 @@ def test_reset_in_environment(mutable_mock_env_path, mutable_config):
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
# Create a bootstrap.yaml with some config
_bootstrap("disable", "--scope=site")
scope_path = spack.config.CONFIG.scopes["site"].path
scope_path = spack.config.config.scopes["site"].path
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
assert os.path.exists(bootstrap_yaml)
@@ -174,7 +174,7 @@ def test_remove_and_add_a_source(mutable_config):
@pytest.mark.maybeslow
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
`spack bootstrap add`. Here we don't download data, since that would be an

View File

@@ -54,7 +54,7 @@ def test_pickle(tmpdir):
with tmpdir.as_cwd():
build_env("--pickle", _out_file, "zlib")
environment = pickle.load(open(_out_file, "rb"))
assert isinstance(environment, dict)
assert type(environment) == dict
assert "PATH" in environment

View File

@@ -6,6 +6,7 @@
import errno
import os
import shutil
import sys
import pytest
@@ -25,7 +26,7 @@
mirror = spack.main.SpackCommand("mirror")
uninstall = spack.main.SpackCommand("uninstall")
pytestmark = pytest.mark.not_on_windows("does not run on windows")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture()

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import sys
import pytest
@@ -35,7 +36,7 @@ def test_checksum_args(arguments, expected):
assert check == expected
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.parametrize(
"arguments,expected",
[
@@ -56,7 +57,7 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag
assert "version(" in output
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
# TODO: mock_fetch doesn't actually work with stage, working around with ignoring
# fail_on_error for now

View File

@@ -7,6 +7,7 @@
import json
import os
import shutil
import sys
import jsonschema
import pytest
@@ -40,7 +41,10 @@
uninstall_cmd = spack.main.SpackCommand("uninstall")
buildcache_cmd = spack.main.SpackCommand("buildcache")
pytestmark = [pytest.mark.not_on_windows("does not run on windows"), pytest.mark.maybeslow]
pytestmark = [
pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
pytest.mark.maybeslow,
]
@pytest.fixture()

View File

@@ -33,8 +33,8 @@ def __call__(self, *args, **kwargs):
monkeypatch.setattr(spack.package_base.PackageBase, "do_clean", Counter("package"))
monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
monkeypatch.setattr(spack.caches.FETCH_CACHE, "destroy", Counter("downloads"), raising=False)
monkeypatch.setattr(spack.caches.MISC_CACHE, "destroy", Counter("caches"))
monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
monkeypatch.setattr(spack.store.STORE.failure_tracker, "clear_all", Counter("failures"))
monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))

View File

@@ -7,6 +7,7 @@
import os
import shutil
import subprocess
import sys
import pytest
@@ -253,7 +254,9 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
# Note: this test is never expected to be supported on Windows
@pytest.mark.not_on_windows("Shell completion script generator fails on windows")
@pytest.mark.skipif(
sys.platform == "win32", reason="shell completion script generator fails on windows"
)
@pytest.mark.parametrize("shell", ["bash", "fish"])
def test_updated_completion_scripts(shell, tmpdir):
"""Make sure our shell tab completion scripts remain up-to-date."""

View File

@@ -64,7 +64,7 @@ def compilers_dir(mock_executable):
return clang_path.parent
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
@@ -127,7 +127,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_add(mutable_config, mock_packages, mock_executable):
"""Tests that we can add a compiler to configuration."""
expected_version = "4.5.3"
@@ -157,7 +157,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
assert new_compiler.version == spack.version.Version(expected_version)
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.regression("17590")
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
"""Ensure that we'll mix compilers with different suffixes when necessary."""
@@ -189,7 +189,7 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
}
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
@pytest.mark.regression("17590")
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
@@ -210,7 +210,7 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
new_dir = compilers_dir / "first_in_path"

View File

@@ -24,7 +24,7 @@
def _create_config(scope=None, data={}, section="packages"):
scope = scope or spack.config.default_modify_scope()
cfg_file = spack.config.CONFIG.get_config_filename(scope, section)
cfg_file = spack.config.config.get_config_filename(scope, section)
with open(cfg_file, "w") as f:
syaml.dump(data, stream=f)
return cfg_file
@@ -80,8 +80,8 @@ def test_config_edit(mutable_config, working_env):
"""Ensure `spack config edit` edits the right paths."""
dms = spack.config.default_modify_scope("compilers")
dms_path = spack.config.CONFIG.scopes[dms].path
user_path = spack.config.CONFIG.scopes["user"].path
dms_path = spack.config.config.scopes[dms].path
user_path = spack.config.config.scopes["user"].path
comp_path = os.path.join(dms_path, "compilers.yaml")
repos_path = os.path.join(user_path, "repos.yaml")
@@ -544,7 +544,7 @@ def test_config_update_not_needed(mutable_config):
def test_config_update_can_handle_comments(mutable_config):
# Create an outdated config file with comments
scope = spack.config.default_modify_scope()
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
cfg_file = spack.config.config.get_config_filename(scope, "config")
with open(cfg_file, mode="w") as f:
f.write(
"""
@@ -574,7 +574,7 @@ def test_config_update_can_handle_comments(mutable_config):
@pytest.mark.regression("18050")
def test_config_update_works_for_empty_paths(mutable_config):
scope = spack.config.default_modify_scope()
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
cfg_file = spack.config.config.get_config_filename(scope, "config")
with open(cfg_file, mode="w") as f:
f.write(
"""
@@ -627,7 +627,7 @@ def test_config_prefer_upstream(
output = config("prefer-upstream")
scope = spack.config.default_modify_scope("packages")
cfg_file = spack.config.CONFIG.get_config_filename(scope, "packages")
cfg_file = spack.config.config.get_config_filename(scope, "packages")
packages = syaml.load(open(cfg_file))["packages"]
# Make sure only the non-default variants are set.

View File

@@ -6,6 +6,7 @@
import os
import os.path
import platform
import sys
import pytest
@@ -16,7 +17,7 @@
debug = SpackCommand("debug")
pytestmark = pytest.mark.not_on_windows("does not run on windows")
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db

Some files were not shown because too many files have changed in this diff Show More