Compare commits

..

1 Commits

Author SHA1 Message Date
Todd Gamblin
3cdf4e7ccf packages: eliminate unnecessary implicit string concatenation
Python lets you do things like

```python
"these are " "one string"

'so are' "these"
```

This can be useful for breaking strings over multiple lines. It also often happens
unintentionally and indicates that there are subtle errors in the code.

There are a lot of variant descriptions that have implicit concatenation harmlessly
due to refactors, e.g.:

```python
    variant("myvariant", default=True, description="this used to be" "on two lines")
```

But there are also real bugs, like this, where the author probably omitted a comma and
didn't notice that `black` reformatted the implicit concatenation onto one line:

```python
args = [
     "--with-thing",
     "--with-second-thing" "--with-third-thing",
]
```

And other bugs like this, where the author probably intended to add a space, but didn't:

```python
options = "${CFLAGS}" "${SPECIAL_PIC_OPTION}"
```

Some things are harmless but confusing:

```python
"first part of string {0} " "second part {1}".format("zero", "one")
```

It's not broken. String concatenation happens *before* the `format()` call, and the
whole string is formatted. But it sure is hard to read.

Unfortunately, you can't detect this stuff with an AST pass, as implicit concatenation
is done at the parsing phase. I had to detect this with grep:

```console
> grep -l '^[^"]*"[^"]*" "' */package.py
> grep -l "^[^']*'[^']*' '" */package.py
```

- [x] Get rid of nearly all implicit string concatenation in packages

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-01-31 20:35:21 -08:00
782 changed files with 8243 additions and 13492 deletions

View File

@@ -81,10 +81,6 @@ jobs:
with: with:
with_coverage: ${{ needs.changes.outputs.core }} with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks: all-prechecks:
needs: [ prechecks ] needs: [ prechecks ]
if: ${{ always() }} if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with: with:
verbose: true verbose: true
fail_ci_if_error: false fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -1,7 +1,7 @@
black==25.1.0 black==24.10.0
clingo==5.7.1 clingo==5.7.1
flake8==7.1.2 flake8==7.1.1
isort==6.0.1 isort==5.13.2
mypy==1.15.0 mypy==1.11.2
types-six==1.17.0.20250304 types-six==1.17.0.20241205
vermin==1.6.0 vermin==1.6.0

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev spack -d bootstrap now --dev
spack -d style -t black spack -d style -t black
spack unit-test -V spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint # Further style checks from pylint
pylint: pylint:

1
.gitignore vendored
View File

@@ -201,6 +201,7 @@ tramp
# Org-mode # Org-mode
.org-id-locations .org-id-locations
*_archive
# flymake-mode # flymake-mode
*_flymake.* *_flymake.*

View File

@@ -43,28 +43,6 @@ concretizer:
# (e.g. py-setuptools, cmake etc.) # (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal strategy: minimal
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
# number used if there isn't a more specific alternative
max_dupes:
default: 1
# Virtuals
c: 2
cxx: 2
fortran: 1
# Regular packages
cmake: 2
gmake: 2
python: 2
python-venv: 2
py-cython: 2
py-flit-core: 2
py-pip: 2
py-setuptools: 2
py-wheel: 2
xcb-proto: 2
# Compilers
gcc: 2
llvm: 2
# Option to specify compatibility between operating systems for reuse of compilers and packages # Option to specify compatibility between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's # Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's # it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
@@ -85,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only # Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster). # for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -1,5 +1,5 @@
config: config:
locks: false locks: false
build_stage:: build_stage::
- '$user_cache_path/stage' - '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}' stage_name: '{name}-{version}-{hash:7}'

View File

@@ -1761,24 +1761,19 @@ Verifying installations
The ``spack verify`` command can be used to verify the validity of The ``spack verify`` command can be used to verify the validity of
Spack-installed packages any time after installation. Spack-installed packages any time after installation.
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify manifest``
^^^^^^^^^^^^^^^^^^^^^^^^^
At installation time, Spack creates a manifest of every file in the At installation time, Spack creates a manifest of every file in the
installation prefix. For links, Spack tracks the mode, ownership, and installation prefix. For links, Spack tracks the mode, ownership, and
destination. For directories, Spack tracks the mode, and destination. For directories, Spack tracks the mode, and
ownership. For files, Spack tracks the mode, ownership, modification ownership. For files, Spack tracks the mode, ownership, modification
time, hash, and size. The ``spack verify manifest`` command will check, time, hash, and size. The Spack verify command will check, for every
for every file in each package, whether any of those attributes have file in each package, whether any of those attributes have changed. It
changed. It will also check for newly added files or deleted files from will also check for newly added files or deleted files from the
the installation prefix. Spack can either check all installed packages installation prefix. Spack can either check all installed packages
using the `-a,--all` or accept specs listed on the command line to using the `-a,--all` or accept specs listed on the command line to
verify. verify.
The ``spack verify manifest`` command can also verify for individual files The ``spack verify`` command can also verify for individual files that
that they haven't been altered since installation time. If the given file they haven't been altered since installation time. If the given file
is not in a Spack installation prefix, Spack will report that it is is not in a Spack installation prefix, Spack will report that it is
not owned by any package. To check individual files instead of specs, not owned by any package. To check individual files instead of specs,
use the ``-f,--files`` option. use the ``-f,--files`` option.
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output ``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors. machine-readable json data for any errors.
^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify libraries``
^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack verify libraries`` command can be used to verify that packages
do not have accidental system dependencies. This command scans the install
prefixes of packages for executables and shared libraries, and resolves
their needed libraries in their RPATHs. When needed libraries cannot be
located, an error is reported. This typically indicates that a package
was linked against a system library, instead of a library provided by
a Spack package.
This verification can also be enabled as a post-install hook by setting
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
in :ref:`config.yaml <config-yaml>`.
----------------------- -----------------------
Filesystem requirements Filesystem requirements
----------------------- -----------------------

View File

@@ -223,10 +223,6 @@ def setup(sphinx):
("py:class", "spack.compiler.CompilerCache"), ("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly # TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"), ("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
by default. Can be purged with :ref:`spack clean --downloads by default. Can be purged with :ref:`spack clean --downloads
<cmd-spack-clean>`. <cmd-spack-clean>`.
.. _Misc Cache:
-------------------- --------------------
``misc_cache`` ``misc_cache``
-------------------- --------------------
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
aliases: aliases:
inst: install -v inst: install -v
-------------------------------
``concretization_cache:enable``
-------------------------------
When set to ``true``, Spack will utilize a cache of solver outputs from
successful concretization runs. When enabled, Spack will check the concretization
cache prior to running the solver. If a previous request to solve a given
problem is present in the cache, Spack will load the concrete specs and other
solver data from the cache rather than running the solver. Specs not previously
concretized will be added to the cache on a successful solve. The cache additionally
holds solver statistics, so commands like ``spack solve`` will still return information
about the run that produced a given solver result.
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
Cache is cleaned.
When ``false`` or ommitted, all concretization requests will be performed from scatch
----------------------------
``concretization_cache:url``
----------------------------
Path to the location where Spack will root the concretization cache. Currently this only supports
paths on the local filesystem.
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
------------------------------------
``concretization_cache:entry_limit``
------------------------------------
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.
-----------------------------------
``concretization_cache:size_limit``
-----------------------------------
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.

View File

@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
* :ref:`compilers.yaml <compiler-config>` * :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>` * :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>` * :ref:`config.yaml <config-yaml>`
* :ref:`include.yaml <include-yaml>`
* :ref:`mirrors.yaml <mirrors>` * :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>` * :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <packages-config>` * :ref:`packages.yaml <packages-config>`

View File

@@ -670,45 +670,24 @@ This configuration sets the default compiler for all packages to
Included configurations Included configurations
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Spack environments allow an ``include`` heading in their yaml schema. Spack environments allow an ``include`` heading in their yaml
This heading pulls in external configuration files and applies them to schema. This heading pulls in external configuration files and applies
the environment. them to the environment.
.. code-block:: yaml .. code-block:: yaml
spack: spack:
include: include:
- environment/relative/path/to/config.yaml - relative/path/to/config.yaml
- https://github.com/path/to/raw/config/compilers.yaml - https://github.com/path/to/raw/config/compilers.yaml
- /absolute/path/to/packages.yaml - /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. (See
:ref:`include-yaml` for more information on optional and conditional entries.)
Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.
Environments can include files or URLs. File paths can be relative or
absolute. URLs include the path to the text for individual files or
can be the path to a directory containing configuration files.
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
schemes). Spack-specific, environment and user path variables may be
used in these paths. See :ref:`config-file-variables` for more information.
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Configuration precedence Configuration precedence

View File

@@ -1,51 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _include-yaml:
===============================
Include Settings (include.yaml)
===============================
Spack allows you to include configuration files through ``include.yaml``.
Using the ``include:`` heading results in pulling in external configuration
information to be used by any Spack command.
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. For
example,
.. code-block:: yaml
include:
- /path/to/a/required/config.yaml
- path: /path/to/$os/$target/config
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
indicates that included ``config.yaml`` file is required (so must exist).
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
the path is only included if it exists. The condition ``os == "ventura"``
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
path is only included when the operating system (``os``) is ``ventura``.
The same conditions and variables in `Spec List References
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
can be used for conditional activation in the ``when`` clauses.
Included files can be specified by path or by their parent directory.
Paths may be absolute, relative (to the configuration file including the path),
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.

View File

@@ -71,7 +71,6 @@ or refer to the full manual below.
configuration configuration
config_yaml config_yaml
include_yaml
packages_yaml packages_yaml
build_settings build_settings
environments environments

View File

@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
build group on CDash called "Release Testing" (that group will be created if build group on CDash called "Release Testing" (that group will be created if
it didn't already exist). it didn't already exist).
.. _ci_artifacts:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CI Artifacts Directory Layout
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
it require user specified files be written to any specific directory.
------------------------
``concrete_environment``
------------------------
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
the concrete ``spack.lock`` for the CI environment.
--------
``logs``
--------
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
----------------
``reproduction``
----------------
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
An example of what a ``repro.json`` may look like is here.
.. code:: json
{
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
"job_spec_json": "adios2.json",
"ci_project_dir": "/builds/spack/spack"
}
---------
``tests``
---------
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
data in it depending on the package that was built and the availability of tests.
-------------
``user_data``
-------------
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
Users may use this to store additional logs or metrics or other types of files generated by the build job.
------------------------------------- -------------------------------------
Using a custom spack in your pipeline Using a custom spack in your pipeline
------------------------------------- -------------------------------------

View File

@@ -1,13 +1,13 @@
sphinx==8.2.3 sphinx==8.1.3
sphinxcontrib-programoutput==0.18 sphinxcontrib-programoutput==0.18
sphinx_design==0.6.1 sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2 sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1 python-levenshtein==0.26.1
docutils==0.21.2 docutils==0.21.2
pygments==2.19.1 pygments==2.19.1
urllib3==2.3.0 urllib3==2.3.0
pytest==8.3.5 pytest==8.3.4
isort==6.0.1 isort==5.13.2
black==25.1.0 black==24.10.0
flake8==7.1.2 flake8==7.1.1
mypy==1.11.1 mypy==1.11.1

View File

@@ -7,7 +7,6 @@
import fnmatch import fnmatch
import glob import glob
import hashlib import hashlib
import io
import itertools import itertools
import numbers import numbers
import os import os
@@ -21,7 +20,6 @@
from contextlib import contextmanager from contextlib import contextmanager
from itertools import accumulate from itertools import accumulate
from typing import ( from typing import (
IO,
Callable, Callable,
Deque, Deque,
Dict, Dict,
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
and vis versa. and vis versa.
""" """
def __init__( def __init__(self, package, link_install_prefix=True):
self,
package,
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
link_install_prefix: bool = True,
):
""" """
Args: Args:
package (spack.package_base.PackageBase): Package requiring links package (spack.package_base.PackageBase): Package requiring links
base_modification_prefix (str|pathlib.Path): Path representation indicating
the root directory in which to establish the simulated rpath, ie where the
symlinks that comprise the "rpath" behavior will be installed.
Note: This is a mutually exclusive option with `link_install_prefix` using
both is an error.
Default: None
link_install_prefix (bool): Link against package's own install or stage root. link_install_prefix (bool): Link against package's own install or stage root.
Packages that run their own executables during build and require rpaths to Packages that run their own executables during build and require rpaths to
the build directory during build time require this option. the build directory during build time require this option. Default: install
Default: install
root root
Note: This is a mutually exclusive option with `base_modification_prefix`, using
both is an error.
""" """
self.pkg = package self.pkg = package
self._addl_rpaths: set[str] = set() self._addl_rpaths = set()
if link_install_prefix and base_modification_prefix:
raise RuntimeError(
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
"Select either `link_install_prefix` to create an install prefix rpath"
" or specify a `base_modification_prefix` for any other link type. "
"Specifying both arguments is invalid."
)
if not (link_install_prefix or base_modification_prefix):
raise RuntimeError(
"Insufficient arguments given to WindowsSimulatedRpath.\n"
"WindowsSimulatedRPath requires one of link_install_prefix"
" or base_modification_prefix to be specified."
" Neither was provided."
)
self.link_install_prefix = link_install_prefix self.link_install_prefix = link_install_prefix
if base_modification_prefix: self._additional_library_dependents = set()
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
else:
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
self._additional_library_dependents: set[pathlib.Path] = set()
if not self.link_install_prefix:
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
@property @property
def library_dependents(self): def library_dependents(self):
""" """
Set of directories where package binaries/libraries are located. Set of directories where package binaries/libraries are located.
""" """
base_pths = set() return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
if self.link_install_prefix:
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
base_pths |= self._additional_library_dependents
return base_pths
def add_library_dependent(self, *dest): def add_library_dependent(self, *dest):
""" """
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
new_pth = pathlib.Path(pth).parent new_pth = pathlib.Path(pth).parent
else: else:
new_pth = pathlib.Path(pth) new_pth = pathlib.Path(pth)
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
if not path_is_in_prefix:
raise RuntimeError(
f"Attempting to generate rpath symlink out of rpath context:\
{str(self.base_modification_prefix)}"
)
self._additional_library_dependents.add(new_pth) self._additional_library_dependents.add(new_pth)
@property @property
@@ -2628,33 +2577,6 @@ def establish_link(self):
self._link(library, lib_dir) self._link(library, lib_dir)
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
so an executable can test the libraries/executables with proper access
to dependent dlls
Note: this is a no-op on all other platforms besides Windows
Args:
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
test_dir: the testing directory in which we should construct an rpath
"""
# link_install_prefix as false ensures we're not linking into the install prefix
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
# add the testing directory as a location to install rpath symlinks
mini_rpath.add_library_dependent(test_dir)
# check for whether build_directory is available, if not
# assume the stage root is the build dir
build_dir_attr = getattr(pkg, "build_directory", None)
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
# add the build dir & build dir bin
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
mini_rpath.add_rpath(os.path.join(build_directory))
# construct rpath
mini_rpath.establish_link()
@system_path_filter @system_path_filter
@memoized @memoized
def can_access_dir(path): def can_access_dir(path):
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
os.utime(f, (os.path.getatime(f), mtime)) os.utime(f, (os.path.getatime(f), mtime))
@contextmanager
def temporary_file_position(stream):
orig_pos = stream.tell()
yield
stream.seek(orig_pos)
@contextmanager
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
with temporary_file_position(stream):
stream.seek(loc, relative_to)
yield
@contextmanager @contextmanager
def temporary_dir( def temporary_dir(
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None

View File

@@ -11,11 +11,10 @@
import re import re
import sys import sys
import traceback import traceback
import types
import typing import typing
import warnings import warnings
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$" ignore_modules = r"^\.#|~$"
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
class Singleton: class Singleton:
"""Wrapper for lazily initialized singleton objects.""" """Simple wrapper for lazily initialized singleton objects."""
def __init__(self, factory: Callable[[], object]): def __init__(self, factory):
"""Create a new singleton to be inited with the factory function. """Create a new singleton to be inited with the factory function.
Most factories will simply create the object to be initialized and
return it.
In some cases, e.g. when bootstrapping some global state, the singleton
may need to be initialized incrementally. If the factory returns a generator
instead of a regular object, the singleton will assign each result yielded by
the generator to the singleton instance. This allows methods called by
the factory in later stages to refer back to the singleton.
Args: Args:
factory (function): function taking no arguments that creates the factory (function): function taking no arguments that
singleton instance. creates the singleton instance.
""" """
self.factory = factory self.factory = factory
self._instance = None self._instance = None
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
@property @property
def instance(self): def instance(self):
if self._instance is None: if self._instance is None:
instance = self.factory() self._instance = self.factory()
if isinstance(instance, types.GeneratorType):
# if it's a generator, assign every value
for value in instance:
self._instance = value
else:
# if not, just assign the result like a normal singleton
self._instance = instance
return self._instance return self._instance
def __getattr__(self, name): def __getattr__(self, name):
@@ -1100,88 +1080,3 @@ def __set__(self, instance, value):
def factory(self, instance, owner): def factory(self, instance, owner):
raise NotImplementedError("must be implemented by derived classes") raise NotImplementedError("must be implemented by derived classes")
KT = TypeVar("KT")
VT = TypeVar("VT")
class PriorityOrderedMapping(Mapping[KT, VT]):
"""Mapping that iterates over key according to an integer priority. If the priority is
the same for two keys, insertion order is what matters.
The priority is set when the key/value pair is added. If not set, the highest current priority
is used.
"""
_data: Dict[KT, VT]
_priorities: List[Tuple[int, KT]]
def __init__(self) -> None:
self._data = {}
# Tuple of (priority, key)
self._priorities = []
def __getitem__(self, key: KT) -> VT:
return self._data[key]
def __len__(self) -> int:
return len(self._data)
def __iter__(self):
yield from (key for _, key in self._priorities)
def __reversed__(self):
yield from (key for _, key in reversed(self._priorities))
def reversed_keys(self):
"""Iterates over keys from the highest priority, to the lowest."""
return reversed(self)
def reversed_values(self):
"""Iterates over values from the highest priority, to the lowest."""
yield from (self._data[key] for _, key in reversed(self._priorities))
def _highest_priority(self) -> int:
if not self._priorities:
return 0
result, _ = self._priorities[-1]
return result
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
"""Adds a key/value pair to the mapping, with a specific priority.
If the priority is None, then it is assumed to be the highest priority value currently
in the container.
Raises:
ValueError: when the same priority is already in the mapping
"""
if priority is None:
priority = self._highest_priority()
if key in self._data:
self.remove(key)
self._priorities.append((priority, key))
# We rely on sort being stable
self._priorities.sort(key=lambda x: x[0])
self._data[key] = value
assert len(self._data) == len(self._priorities)
def remove(self, key: KT) -> VT:
"""Removes a key from the mapping.
Returns:
The value associated with the key being removed
Raises:
KeyError: if the key is not in the mapping
"""
if key not in self._data:
raise KeyError(f"cannot find {key}")
popped_item = self._data.pop(key)
self._priorities = [(p, k) for p, k in self._priorities if k != key]
assert len(self._data) == len(self._priorities)
return popped_item

View File

@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
self.src_a = src_a self.src_a = src_a
self.src_b = src_b self.src_b = src_b
def __repr__(self) -> str:
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
def _samefile(a: str, b: str):
try:
return os.path.samefile(a, b)
except OSError:
return False
class SourceMergeVisitor(BaseDirectoryVisitor): class SourceMergeVisitor(BaseDirectoryVisitor):
""" """
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/ - A list of merge conflicts in dst/
""" """
def __init__( def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
):
self.ignore = ignore if ignore is not None else lambda f: False self.ignore = ignore if ignore is not None else lambda f: False
# On case-insensitive filesystems, normalize paths to detect duplications
self.normalize_paths = normalize_paths
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection> # When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir. # bit to the relative path in the destination dir.
self.projection: str = "" self.projection: str = ""
@@ -86,88 +71,10 @@ def __init__(
# and can run mkdir in order. # and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {} self.directories: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files # Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited. # are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {} self.files: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
def _in_directories(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the directory list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._directories_normalized
else:
return proj_rel_path in self.directories
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the directory that is mapped to a path
"""
if self.normalize_paths:
return self._directories_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.directories[proj_rel_path])
def _del_directory(self, proj_rel_path: str):
"""
Remove a directory from the list of directories
"""
del self.directories[proj_rel_path]
if self.normalize_paths:
del self._directories_normalized[proj_rel_path.lower()]
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a directory to the list of directories.
Also stores the normalized version for later lookups
"""
self.directories[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def _in_files(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the files list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._files_normalized
else:
return proj_rel_path in self.files
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the file that is mapped to a path
"""
if self.normalize_paths:
return self._files_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.files[proj_rel_path])
def _del_file(self, proj_rel_path: str):
"""
Remove a file from the list of files
"""
del self.files[proj_rel_path]
if self.normalize_paths:
del self._files_normalized[proj_rel_path.lower()]
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a file to the list of files
Also stores the normalized version for later lookups
"""
self.files[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool: def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
""" """
Register a directory if dst / rel_path is not blocked by a file or ignored. Register a directory if dst / rel_path is not blocked by a file or ignored.
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
if self.ignore(rel_path): if self.ignore(rel_path):
# Don't recurse when dir is ignored. # Don't recurse when dir is ignored.
return False return False
elif self._in_files(proj_rel_path): elif proj_rel_path in self.files:
# A file-dir conflict is fatal except if they're the same file (symlinked dir). # Can't create a dir where a file is.
src_a = os.path.join(*self._file(proj_rel_path)) src_a_root, src_a_relpath = self.files[proj_rel_path]
src_b = os.path.join(root, rel_path) self.fatal_conflicts.append(
MergeConflict(
if not _samefile(src_a, src_b): dst=proj_rel_path,
self.fatal_conflicts.append( src_a=os.path.join(src_a_root, src_a_relpath),
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b) src_b=os.path.join(root, rel_path),
) )
return False )
return False
# Remove the link in favor of the dir. elif proj_rel_path in self.directories:
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_directory(proj_rel_path, root, rel_path)
return True
elif self._in_directories(proj_rel_path):
# No new directory, carry on. # No new directory, carry on.
return True return True
else: else:
# Register new directory. # Register new directory.
self._add_directory(proj_rel_path, root, rel_path) self.directories[proj_rel_path] = (root, rel_path)
return True return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool: def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
if handle_as_dir: if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth) return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth, symlink=True) self.visit_file(root, rel_path, depth)
return False return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None: def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if self.ignore(rel_path): if self.ignore(rel_path):
pass pass
elif self._in_directories(proj_rel_path): elif proj_rel_path in self.directories:
# Can't create a file where a dir is, unless they are the same file (symlinked dir), # Can't create a file where a dir is; fatal error
# in which case we simply drop the symlink in favor of the actual dir. self.fatal_conflicts.append(
src_a = os.path.join(*self._directory(proj_rel_path)) MergeConflict(
src_b = os.path.join(root, rel_path) dst=proj_rel_path,
if not symlink or not _samefile(src_a, src_b): src_a=os.path.join(*self.directories[proj_rel_path]),
self.fatal_conflicts.append( src_b=os.path.join(root, rel_path),
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
) )
elif self._in_files(proj_rel_path): )
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct. # When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather # If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual # than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink. # file, not the symlink.
src_a = os.path.join(*self._file(proj_rel_path))
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path) src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict. # Distinct files produce a conflict.
self.file_conflicts.append( self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b) MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if not symlink: if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the # Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root. # order of the files dict, which is grouped by root.
existing_proj_rel_path, _, _ = self._file(proj_rel_path) del self.files[proj_rel_path]
self._del_file(existing_proj_rel_path) self.files[proj_rel_path] = (root, rel_path)
self._add_file(proj_rel_path, root, rel_path)
else: else:
# Otherwise register this file to be linked. # Otherwise register this file to be linked.
self._add_file(proj_rel_path, root, rel_path) self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None: def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
# Treat symlinked files as ordinary files (without "dereferencing") # Treat symlinked files as ordinary files (without "dereferencing")
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
path = "" path = ""
for part in self.projection.split(os.sep): for part in self.projection.split(os.sep):
path = os.path.join(path, part) path = os.path.join(path, part)
if not self._in_files(path): if path not in self.files:
self._add_directory(path, "<projection>", path) self.directories[path] = ("<projection>", path)
else: else:
# Can't create a dir where a file is. # Can't create a dir where a file is.
_, src_a_root, src_a_relpath = self._file(path) src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append( self.fatal_conflicts.append(
MergeConflict( MergeConflict(
dst=path, dst=path,
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool: def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir is a file in a src dir, add a conflict, # If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper # and don't traverse deeper
if self.src._in_files(rel_path): if rel_path in self.src.files:
_, src_a_root, src_a_relpath = self.src._file(rel_path) src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append( self.src.fatal_conflicts.append(
MergeConflict( MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path) rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir was also a src dir, remove the mkdir # If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper. # action, and traverse deeper.
if self.src._in_directories(rel_path): if rel_path in self.src.directories:
existing_proj_rel_path, _, _ = self.src._directory(rel_path) del self.src.directories[rel_path]
self.src._del_directory(existing_proj_rel_path)
return True return True
# If the destination dir does not appear in the src dir, # If the destination dir does not appear in the src dir,
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
be seen as files; we should not accidentally merge be seen as files; we should not accidentally merge
source dir with a symlinked dest dir. source dir with a symlinked dest dir.
""" """
# Always conflict
self.visit_file(root, rel_path, depth) if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if self.src._in_directories(rel_path):
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
self.src.fatal_conflicts.append( self.src.fatal_conflicts.append(
MergeConflict( MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path) rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
) )
) )
elif self.src._in_files(rel_path): if rel_path in self.src.files:
_, src_a_root, src_a_relpath = self.src._file(rel_path) src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append( self.src.fatal_conflicts.append(
MergeConflict( MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path) rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)

View File

@@ -269,7 +269,7 @@ def __init__(
@staticmethod @staticmethod
def _poll_interval_generator( def _poll_interval_generator(
_wait_times: Optional[Tuple[float, float, float]] = None, _wait_times: Optional[Tuple[float, float, float]] = None
) -> Generator[float, None, None]: ) -> Generator[float, None, None]:
"""This implements a backoff scheme for polling a contended resource """This implements a backoff scheme for polling a contended resource
by suggesting a succession of wait times between polls. by suggesting a succession of wait times between polls.

View File

@@ -2,7 +2,8 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Utility classes for logging the output of blocks of code.""" """Utility classes for logging the output of blocks of code.
"""
import atexit import atexit
import ctypes import ctypes
import errno import errno
@@ -343,6 +344,26 @@ def close(self):
self.file.close() self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs): def log_output(*args, **kwargs):
"""Context manager that logs its output to a file. """Context manager that logs its output to a file.
@@ -426,6 +447,7 @@ def __init__(
self.echo = echo self.echo = echo
self.debug = debug self.debug = debug
self.buffer = buffer self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn self.filter_fn = filter_fn
self._active = False # used to prevent re-entry self._active = False # used to prevent re-entry
@@ -497,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails # just don't forward input if this fails
pass pass
self.process = multiprocessing.Process( with replace_environment(self.env):
target=_writer_daemon, self.process = multiprocessing.Process(
args=( target=_writer_daemon,
input_fd, args=(
read_fd, input_fd,
self.write_fd, read_fd,
self.echo, self.write_fd,
self.log_file, self.echo,
child_pipe, self.log_file,
self.filter_fn, child_pipe,
), self.filter_fn,
) ),
self.process.daemon = True # must set before start() )
self.process.start() self.process.daemon = True # must set before start()
self.process.start()
finally: finally:
if input_fd: if input_fd:
@@ -706,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does. Does not support the use of 'v' toggling as nixlog does.
""" """
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None): def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug self.debug = debug
self.echo = echo self.echo = echo
self.logfile = file_like self.logfile = file_like
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close() reader.close()
self._active = True self._active = True
self._thread = Thread( with replace_environment(self.env):
target=background_reader, args=(self.reader, self.echo_writer, self._kill) self._thread = Thread(
) target=background_reader, args=(self.reader, self.echo_writer, self._kill)
self._thread.start() )
self._thread.start()
return self return self
def __exit__(self, exc_type, exc_val, exc_tb): def __exit__(self, exc_type, exc_val, exc_tb):

View File

@@ -10,21 +10,9 @@
import spack.util.git import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0" __version__ = "0.24.0.dev0"
spack_version = __version__ spack_version = __version__
#: The current Package API version implemented by this version of Spack. The Package API defines
#: the Python interface for packages as well as the layout of package repositories. The minor
#: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version.
package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
#: compatibility with vX.0.
min_package_api_version = (1, 0)
def __try_int(v): def __try_int(v):
try: try:
@@ -91,6 +79,4 @@ def get_short_version() -> str:
"get_version", "get_version",
"get_spack_commit", "get_spack_commit",
"get_short_version", "get_short_version",
"package_api_version",
"min_package_api_version",
] ]

View File

@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items(): for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg): def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants: if not spec.virtual or not spec.variants:
return return
error = error_cls( error = error_cls(
f"{pkg_name}: {msg}", f"{pkg_name}: {msg}",

View File

@@ -923,7 +923,7 @@ class FileTypes:
UNKNOWN = 2 UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]") NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int: def file_type(f: IO[bytes]) -> int:
@@ -2529,10 +2529,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies allow_missing: when true, allows installing a node with missing dependencies
""" """
# Early termination # Early termination
if spec.external or not spec.concrete: if spec.external or spec.virtual:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format())) warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return return
elif spec.installed and not force: elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format())) warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return return

View File

@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable # Install the spec that should make the module importable
with spack.config.override(self.mirror_scope): with spack.config.override(self.mirror_scope):
PackageInstaller( PackageInstaller([concrete_spec.package], fail_fast=True).install()
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info): if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info self.last_search = info
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
for current_config in bootstrapping_sources(): for current_config in bootstrapping_sources():
if not source_is_enabled(current_config): if not source_is_enabled(current_config):
continue continue
with exception_handler.forward(current_config["name"], Exception): with exception_handler.forward(current_config["name"], Exception):
if create_bootstrapper(current_config).try_import(module, abstract_spec): if create_bootstrapper(current_config).try_import(module, abstract_spec):
return return

View File

@@ -12,7 +12,6 @@
import spack.phase_callbacks import spack.phase_callbacks
import spack.spec import spack.spec
import spack.util.prefix import spack.util.prefix
from spack.directives import depends_on
from .cmake import CMakeBuilder, CMakePackage from .cmake import CMakeBuilder, CMakePackage
@@ -372,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
CMakeBuilder = CachedCMakeBuilder CMakeBuilder = CachedCMakeBuilder
# These dependencies are assumed in the builder
depends_on("c", type="build")
depends_on("cxx", type="build")
def flag_handler(self, name, flags): def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"): if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache return None, None, None # handled in the cmake cache

View File

@@ -70,16 +70,10 @@ def build_directory(self):
"""Return the directory containing the main Cargo.toml.""" """Return the directory containing the main Cargo.toml."""
return self.pkg.stage.source_path return self.pkg.stage.source_path
@property
def std_build_args(self):
"""Standard arguments for ``cargo build`` provided as a property for
convenience of package writers."""
return ["-j", str(self.pkg.module.make_jobs)]
@property @property
def build_args(self): def build_args(self):
"""Arguments for ``cargo build``.""" """Arguments for ``cargo build``."""
return [] return ["-j", str(self.pkg.module.make_jobs)]
@property @property
def check_args(self): def check_args(self):
@@ -94,9 +88,7 @@ def build(
) -> None: ) -> None:
"""Runs ``cargo install`` in the source directory""" """Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.cargo( pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
)
def install( def install(
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix

View File

@@ -11,7 +11,6 @@
from typing import Any, List, Optional, Tuple from typing import Any, List, Optional, Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
from llnl.util import tty
from llnl.util.lang import stable_partition from llnl.util.lang import stable_partition
import spack.builder import spack.builder
@@ -459,23 +458,19 @@ def cmake(
) -> None: ) -> None:
"""Runs ``cmake`` in the build directory""" """Runs ``cmake`` in the build directory"""
if spec.is_develop: # skip cmake phase if it is an incremental develop build
# skip cmake phase if it is an incremental develop build # These are the files that will re-run CMake that are generated from a successful
# configure step
# Determine the files that will re-run CMake that are generated from a successful primary_generator = _extract_primary_generator(self.generator)
# configure step based on state if primary_generator == "Unix Makefiles":
primary_generator = _extract_primary_generator(self.generator)
configure_artifact = "Makefile" configure_artifact = "Makefile"
if primary_generator == "Ninja": elif primary_generator == "Ninja":
configure_artifact = "ninja.build" configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)): if spec.is_develop and os.path.isfile(
tty.msg( os.path.join(self.build_directory, configure_artifact)
"Incremental build criteria satisfied." ):
"Skipping CMake configure step. To force configuration run" return
f" `spack clean {pkg.name}`"
)
return
options = self.std_cmake_args options = self.std_cmake_args
options += self.cmake_args() options += self.cmake_args()

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts """Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage. and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix Maintainers: ax3l, Rombur, davidbeckingsale
""" """
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89", "89",
"90", "90",
"90a", "90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
) )
# FIXME: keep cuda and cuda_arch separate to make usage easier until # FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture # CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10") depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11") depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12") depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13") depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20") depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21") depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30") depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32") depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35") depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37") depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50") depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52") depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53") depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60") depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61") depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62") depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70") depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72") depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75") depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80") depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86") depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87") depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89") depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90") depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a") depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular # From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers # platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to # (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8") conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3") conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6") conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0") conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5") conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7") conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3") conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5") conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6") conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114 # https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0") conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
variant("strip", default=False, description="Strip targets on install") variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build") depends_on("meson", type="build")
depends_on("ninja", type="build") depends_on("ninja", type="build")
# Meson uses pkg-config for dependency detection, and this dependency is
# often overlooked by packages that use meson as a build system.
depends_on("pkgconfig", type="build")
# Python detection in meson requires distutils to be importable, but distutils no longer # Python detection in meson requires distutils to be importable, but distutils no longer
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement, # exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
# because the distutils-precedence.pth startup file that setuptools ships with is not run # because the distutils-precedence.pth startup file that setuptools ships with is not run

View File

@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh $ source {prefix}/{component}/{version}/env/vars.sh
""" """
# Only if environment modifications are desired (default is +envmods) # Only if environment modifications are desired (default is +envmods)
if "+envmods" in self.spec: if "~envmods" not in self.spec:
env.extend( env.extend(
EnvironmentModifications.from_sourcing_file( EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args self.component_prefix.env.join("vars.sh"), *self.env_script_args

View File

@@ -14,9 +14,8 @@
import zipfile import zipfile
from collections import namedtuple from collections import namedtuple
from typing import Callable, Dict, List, Set from typing import Callable, Dict, List, Set
from urllib.request import Request from urllib.request import HTTPHandler, Request, build_opener
import llnl.path
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
@@ -63,8 +62,6 @@
PushResult = namedtuple("PushResult", "success url") PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions(): def get_change_revisions():
"""If this is a git repo get the revisions to use when checking """If this is a git repo get the revisions to use when checking
@@ -84,9 +81,6 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
whether or not the stack was changed. Returns True if the environment whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise.""" `.gitlab-ci.yml` file itself changed). Returns False otherwise."""
# git returns posix paths always, normalize input to be comptaible
# with that
env_path = llnl.path.convert_to_posix_path(env_path)
git = spack.util.git.git() git = spack.util.git.git()
if git: if git:
with fs.working_dir(spack.paths.prefix): with fs.working_dir(spack.paths.prefix):
@@ -616,7 +610,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir) copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def download_and_extract_artifacts(url, work_dir) -> str: def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download """Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir and extract the contents into the given work_dir
@@ -624,10 +618,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
url (str): Complete url to artifacts.zip file url (str): Complete url to artifacts.zip file
work_dir (str): Path to destination where artifacts should be extracted work_dir (str): Path to destination where artifacts should be extracted
Output:
Artifacts root path relative to the archive root
""" """
tty.msg(f"Fetching artifacts from: {url}") tty.msg(f"Fetching artifacts from: {url}")
@@ -637,33 +627,31 @@ def download_and_extract_artifacts(url, work_dir) -> str:
if token: if token:
headers["PRIVATE-TOKEN"] = token headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET") opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip") artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try: if not os.path.exists(work_dir):
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT) os.makedirs(work_dir)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
with zipfile.ZipFile(artifacts_zip_path) as zip_file: with open(artifacts_zip_path, "wb") as out_file:
zip_file.extractall(work_dir) shutil.copyfileobj(response, out_file)
# Get the artifact root
artifact_root = ""
for f in zip_file.filelist:
if "spack.lock" in f.filename:
artifact_root = os.path.dirname(os.path.dirname(f.filename))
break
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
finally:
try:
os.remove(artifacts_zip_path)
except FileNotFoundError:
# If the file doesn't exist we are already raising
pass
return artifact_root zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path)
def get_spack_info(): def get_spack_info():
@@ -777,7 +765,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
return True return True
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head): def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job, """Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
attempt to setup an environment in which the failure can be reproduced attempt to setup an environment in which the failure can be reproduced
locally. This entails the following: locally. This entails the following:
@@ -791,11 +779,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commands to run to reproduce the build once inside the container. commands to run to reproduce the build once inside the container.
""" """
work_dir = os.path.realpath(work_dir) work_dir = os.path.realpath(work_dir)
if os.path.exists(work_dir) and os.listdir(work_dir):
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
platform_script_ext = "ps1" if IS_WINDOWS else "sh" platform_script_ext = "ps1" if IS_WINDOWS else "sh"
artifact_root = download_and_extract_artifacts(url, work_dir) download_and_extract_artifacts(url, work_dir)
gpg_path = None gpg_path = None
if gpg_url: if gpg_url:
@@ -857,9 +842,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
with open(repro_file, encoding="utf-8") as fd: with open(repro_file, encoding="utf-8") as fd:
repro_details = json.load(fd) repro_details = json.load(fd)
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
repro_dir = os.path.dirname(repro_file) repro_dir = os.path.dirname(repro_file)
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep) rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
@@ -920,20 +902,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commit_regex = re.compile(r"commit\s+([^\s]+)") commit_regex = re.compile(r"commit\s+([^\s]+)")
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)") merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
if use_local_head: # Try the more specific merge commit regex first
commit_1 = "HEAD" m = merge_commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else: else:
# Try the more specific merge commit regex first # Not a merge commit, just get the commit sha
m = merge_commit_regex.search(spack_info) m = commit_regex.search(spack_info)
if m: if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1) commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
commit_1 = m.group(1)
setup_result = False setup_result = False
if commit_1: if commit_1:
@@ -1008,8 +987,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False "entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
) )
# Attempt to create a unique name for the reproducer container
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
docker_command = [ docker_command = [
runtime, runtime,
"run", "run",
@@ -1017,14 +994,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"-t", "-t",
"--rm", "--rm",
"--name", "--name",
f"spack_reproducer{container_suffix}", "spack_reproducer",
"-v", "-v",
":".join([work_dir, mounted_workdir, "Z"]), ":".join([work_dir, mounted_workdir, "Z"]),
"-v", "-v",
":".join( ":".join(
[ [
os.path.join(work_dir, artifact_root), os.path.join(work_dir, "jobs_scratch_dir"),
os.path.join(mount_as_dir, artifact_root), os.path.join(mount_as_dir, "jobs_scratch_dir"),
"Z", "Z",
] ]
), ),

View File

@@ -4,7 +4,7 @@
import re import re
import sys import sys
from typing import Dict, Optional, Tuple from typing import Dict, Optional
import llnl.string import llnl.string
import llnl.util.lang import llnl.util.lang
@@ -181,11 +181,7 @@ def checksum(parser, args):
print() print()
if args.add_to_package: if args.add_to_package:
path = spack.repo.PATH.filename_for_package_name(pkg.name) add_versions_to_package(pkg, version_lines, args.batch)
num_versions_added = add_versions_to_pkg(path, version_lines)
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
if not args.batch and sys.stdin.isatty():
editor(path)
def print_checksum_status(pkg: PackageBase, version_hashes: dict): def print_checksum_status(pkg: PackageBase, version_hashes: dict):
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
tty.die("Invalid checksums found.") tty.die("Invalid checksums found.")
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]: def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
"""Returns a tuple of number of versions added and the package's modified contents.""" """
Add checksumed versions to a package's instructions and open a user's
editor so they may double check the work of the function.
Args:
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
version_lines (str): A string of rendered version lines.
"""
# Get filename and path for package
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = 0 num_versions_added = 0
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))") version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)') version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
if match: if match:
new_versions.append((Version(match.group(1)), ver_line)) new_versions.append((Version(match.group(1)), ver_line))
split_contents = version_statement_re.split(package_src) with open(filename, "r+", encoding="utf-8") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)
for i, subsection in enumerate(split_contents): for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit # If there are no more versions to add we should exit
if len(new_versions) <= 0: if len(new_versions) <= 0:
break break
# Check if the section contains a version # Check if the section contains a version
contents_version = version_re.match(subsection) contents_version = version_re.match(subsection)
if contents_version is not None: if contents_version is not None:
parsed_version = Version(contents_version.group(1)) parsed_version = Version(contents_version.group(1))
if parsed_version < new_versions[0][0]: if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"] split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1 num_versions_added += 1
elif parsed_version == new_versions[0][0]: elif parsed_version == new_versions[0][0]:
new_versions.pop(0) new_versions.pop(0)
return num_versions_added, "".join(split_contents) # Seek back to the start of the file so we can rewrite the file contents.
f.seek(0)
f.writelines("".join(split_contents))
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
tty.msg(f"Open {filename} to review the additions.")
def add_versions_to_pkg(path: str, version_lines: str) -> int: if sys.stdout.isatty() and not is_batch:
"""Add new versions to a package.py file. Returns the number of versions added.""" editor(filename)
with open(path, "r", encoding="utf-8") as f:
package_src = f.read()
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
if num_versions_added > 0:
with open(path, "w", encoding="utf-8") as f:
f.write(package_src)
return num_versions_added

View File

@@ -176,11 +176,6 @@ def setup_parser(subparser):
reproduce.add_argument( reproduce.add_argument(
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true" "-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
) )
reproduce.add_argument(
"--use-local-head",
help="Use the HEAD of the local Spack instead of reproducing a commit",
action="store_true",
)
gpg_group = reproduce.add_mutually_exclusive_group(required=False) gpg_group = reproduce.add_mutually_exclusive_group(required=False)
gpg_group.add_argument( gpg_group.add_argument(
"--gpg-file", help="Path to public GPG key for validating binary cache installs" "--gpg-file", help="Path to public GPG key for validating binary cache installs"
@@ -613,12 +608,7 @@ def ci_reproduce(args):
gpg_key_url = None gpg_key_url = None
return spack_ci.reproduce_ci_job( return spack_ci.reproduce_ci_job(
args.job_url, args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
args.working_dir,
args.autostart,
gpg_key_url,
args.runtime,
args.use_local_head,
) )

View File

@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
# the const from the constructor or a value from the CLI. # the const from the constructor or a value from the CLI.
# Note that this is only called if the argument is actually # Note that this is only called if the argument is actually
# specified on the command line. # specified on the command line.
spack.config.CONFIG.ensure_scope_ordering()
spack.config.set(self.config_path, self.const, scope="command_line") spack.config.set(self.config_path, self.const, scope="command_line")

View File

@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
if spack.config.get(key_path, scope=scope): if spack.config.get(key_path, scope=scope):
ideal_scope_to_modify = scope ideal_scope_to_modify = scope
break break
# If we find our key in a specific scope, that's the one we want
# to modify. Otherwise we use the default write scope.
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
update_path = f"{key_path}:[{str(spec)}]" update_path = f"{key_path}:[{str(spec)}]"
spack.config.add(update_path, scope=write_scope) spack.config.add(update_path, scope=ideal_scope_to_modify)
else: else:
raise ValueError("'config change' can currently only change 'require' sections") raise ValueError("'config change' can currently only change 'require' sections")

View File

@@ -2,11 +2,23 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform import platform
import re
import sys
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack import spack
import spack.paths
import spack.platforms import spack.platforms
import spack.spec import spack.spec
import spack.store
import spack.util.git
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack" description = "debugging commands for troubleshooting Spack"
section = "developer" section = "developer"
@@ -15,9 +27,63 @@
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command") sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
sp.add_parser("report", help="print information useful for bug reports") sp.add_parser("report", help="print information useful for bug reports")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = spack.util.git.git()
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.paths.prefix):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
with working_dir(wd):
files = [spack.store.STORE.db._index_path]
files += glob("%s/*/*/*/.spack/spec.json" % base)
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
def report(args): def report(args):
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system() host_os = host_platform.default_operating_system()
@@ -29,5 +95,5 @@ def report(args):
def debug(parser, args): def debug(parser, args):
if args.debug_command == "report": action = {"create-db-tarball": create_db_tarball, "report": report}
report(args) action[args.debug_command](args)

View File

@@ -9,9 +9,9 @@
import spack.cmd import spack.cmd
import spack.environment as ev import spack.environment as ev
import spack.package_base
import spack.store import spack.store
from spack.cmd.common import arguments from spack.cmd.common import arguments
from spack.solver.input_analysis import create_graph_analyzer
description = "show dependencies of a package" description = "show dependencies of a package"
section = "basic" section = "basic"
@@ -68,17 +68,15 @@ def dependencies(parser, args):
else: else:
spec = specs[0] spec = specs[0]
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies( dependencies = spack.package_base.possible_dependencies(
spec, spec,
transitive=args.transitive, transitive=args.transitive,
expand_virtuals=args.expand_virtuals, expand_virtuals=args.expand_virtuals,
allowed_deps=args.deptype, depflag=args.deptype,
) )
if not args.expand_virtuals:
dependencies.update(virtuals)
if spec.name in dependencies: if spec.name in dependencies:
dependencies.remove(spec.name) del dependencies[spec.name]
if dependencies: if dependencies:
colify(sorted(dependencies)) colify(sorted(dependencies))

View File

@@ -125,7 +125,7 @@ def develop(parser, args):
version = spec.versions.concrete_range_as_version version = spec.versions.concrete_range_as_version
if not version: if not version:
# look up the maximum version so infintiy versions are preferred for develop # look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys()) version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}") tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version]) spec.versions = spack.version.VersionList([version])

View File

@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
package does not explicitly forbid redistributing source.""" package does not explicitly forbid redistributing source."""
if self.private: if self.private:
return True return True
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x): elif x.package_class.redistribute_source(x):
return True return True
else: else:
tty.debug( tty.debug(

View File

@@ -41,11 +41,7 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package) specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites # Check prerequisites
non_virtual = [ non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
str(s)
for s in specs
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
]
if non_virtual: if non_virtual:
msg = "non-virtual specs cannot be part of the query " msg = "non-virtual specs cannot be part of the query "
msg += "[{0}]\n".format(", ".join(non_virtual)) msg += "[{0}]\n".format(", ".join(non_virtual))

View File

@@ -6,7 +6,7 @@
import os import os
import re import re
import sys import sys
from itertools import islice, zip_longest from itertools import zip_longest
from typing import Dict, List, Optional from typing import Dict, List, Optional
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -423,8 +423,7 @@ def _run_import_check(
continue continue
for m in is_abs_import.finditer(contents): for m in is_abs_import.finditer(contents):
# Find at most two occurences: the first is the import itself, the second is its usage. if contents.count(m.group(1)) == 1:
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
to_remove.append(m.group(0)) to_remove.append(m.group(0))
exit_code = 1 exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out) print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
@@ -439,7 +438,7 @@ def _run_import_check(
module = _module_part(root, m.group(0)) module = _module_part(root, m.group(0))
if not module or module in to_add: if not module or module in to_add:
continue continue
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents): if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue continue
to_add.add(module) to_add.add(module)
exit_code = 1 exit_code = 1

View File

@@ -252,9 +252,7 @@ def has_test_and_tags(pkg_class):
hashes = env.all_hashes() if env else None hashes = env.all_hashes() if env else None
specs = spack.store.STORE.db.query(hashes=hashes) specs = spack.store.STORE.db.query(hashes=hashes)
specs = list( specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
)
spack.cmd.display_specs(specs, long=True) spack.cmd.display_specs(specs, long=True)

View File

@@ -216,7 +216,7 @@ def unit_test(parser, args, unknown_args):
# Ensure clingo is available before switching to the # Ensure clingo is available before switching to the
# mock configuration used by unit tests # mock configuration used by unit tests
with spack.bootstrap.ensure_bootstrap_configuration(): with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise() spack.bootstrap.ensure_core_dependencies()
if pytest is None: if pytest is None:
spack.bootstrap.ensure_environment_dependencies() spack.bootstrap.ensure_environment_dependencies()
import pytest import pytest

View File

@@ -2,48 +2,35 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse import argparse
import io
from typing import List, Optional
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import visit_directory_tree
import spack.cmd import spack.cmd
import spack.environment as ev import spack.environment as ev
import spack.spec
import spack.store import spack.store
import spack.verify import spack.verify
import spack.verify_libraries
from spack.cmd.common import arguments
description = "verify spack installations on disk" description = "check that all spack packages are on disk as installed"
section = "admin" section = "admin"
level = "long" level = "long"
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
def setup_parser(subparser):
setup_parser.parser = subparser
def setup_parser(subparser: argparse.ArgumentParser): subparser.add_argument(
global MANIFEST_SUBPARSER
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
MANIFEST_SUBPARSER = sp.add_parser(
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
)
MANIFEST_SUBPARSER.add_argument(
"-l", "--local", action="store_true", help="verify only locally installed packages" "-l", "--local", action="store_true", help="verify only locally installed packages"
) )
MANIFEST_SUBPARSER.add_argument( subparser.add_argument(
"-j", "--json", action="store_true", help="ouptut json-formatted errors" "-j", "--json", action="store_true", help="ouptut json-formatted errors"
) )
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages") subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
MANIFEST_SUBPARSER.add_argument( subparser.add_argument(
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify" "specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
) )
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group() type = subparser.add_mutually_exclusive_group()
manifest_sp_type.add_argument( type.add_argument(
"-s", "-s",
"--specs", "--specs",
action="store_const", action="store_const",
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
default="specs", default="specs",
help="treat entries as specs (default)", help="treat entries as specs (default)",
) )
manifest_sp_type.add_argument( type.add_argument(
"-f", "-f",
"--files", "--files",
action="store_const", action="store_const",
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
help="treat entries as absolute filenames\n\ncannot be used with '-a'", help="treat entries as absolute filenames\n\ncannot be used with '-a'",
) )
libraries_subparser = sp.add_parser(
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
)
arguments.add_common_arguments(libraries_subparser, ["constraint"])
def verify(parser, args): def verify(parser, args):
cmd = args.verify_command
if cmd == "libraries":
return verify_libraries(args)
elif cmd == "manifest":
return verify_manifest(args)
parser.error("invalid verify subcommand")
def verify_libraries(args):
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
errors = 0
for spec in specs_from_db:
try:
pkg = spec.package
except Exception:
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
if error_msg is not None:
errors += 1
tty.error(error_msg)
if errors:
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
return 1
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
if not visitor.problems:
return None
output = io.StringIO()
visitor.write(output, indent=4, brief=True)
message = output.getvalue().rstrip()
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
def verify_manifest(args):
"""verify that install directories have not been modified since installation"""
local = args.local local = args.local
if args.type == "files": if args.type == "files":
if args.all: if args.all:
MANIFEST_SUBPARSER.error("cannot use --all with --files") setup_parser.parser.print_help()
return 1
for file in args.specs_or_files: for file in args.specs_or_files:
results = spack.verify.check_file_manifest(file) results = spack.verify.check_file_manifest(file)
@@ -153,7 +87,8 @@ def verify_manifest(args):
env = ev.active_environment() env = ev.active_environment()
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args)) specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
else: else:
MANIFEST_SUBPARSER.error("use --all or specify specs to verify") setup_parser.parser.print_help()
return 1
for spec in specs: for spec in specs:
tty.debug("Verifying package %s") tty.debug("Verifying package %s")

View File

@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
opt, i, answer = min(result.answers) opt, i, answer = min(result.answers)
name = spec.name name = spec.name
# TODO: Consolidate this code with similar code in solve.py # TODO: Consolidate this code with similar code in solve.py
if spack.repo.PATH.is_virtual(spec.name): if spec.virtual:
providers = [s.name for s in answer.values() if s.package.provides(name)] providers = [s.name for s in answer.values() if s.package.provides(name)]
name = providers[0] name = providers[0]

View File

@@ -32,10 +32,9 @@
import copy import copy
import functools import functools
import os import os
import os.path
import re import re
import sys import sys
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
import jsonschema import jsonschema
@@ -43,6 +42,7 @@
import spack.error import spack.error
import spack.paths import spack.paths
import spack.platforms
import spack.schema import spack.schema
import spack.schema.bootstrap import spack.schema.bootstrap
import spack.schema.cdash import spack.schema.cdash
@@ -54,19 +54,17 @@
import spack.schema.develop import spack.schema.develop
import spack.schema.env import spack.schema.env
import spack.schema.env_vars import spack.schema.env_vars
import spack.schema.include
import spack.schema.merged
import spack.schema.mirrors import spack.schema.mirrors
import spack.schema.modules import spack.schema.modules
import spack.schema.packages import spack.schema.packages
import spack.schema.repos import spack.schema.repos
import spack.schema.upstreams import spack.schema.upstreams
import spack.schema.view import spack.schema.view
import spack.util.remote_file_cache as rfc_util
import spack.util.spack_yaml as syaml
from spack.util.cpus import cpus_available
from .enums import ConfigScopePriority # Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
import spack.util.web as web_util
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section #: Dict from section names -> schema for that section
SECTION_SCHEMAS: Dict[str, Any] = { SECTION_SCHEMAS: Dict[str, Any] = {
@@ -74,7 +72,6 @@
"concretizer": spack.schema.concretizer.schema, "concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema, "definitions": spack.schema.definitions.schema,
"env_vars": spack.schema.env_vars.schema, "env_vars": spack.schema.env_vars.schema,
"include": spack.schema.include.schema,
"view": spack.schema.view.schema, "view": spack.schema.view.schema,
"develop": spack.schema.develop.schema, "develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema, "mirrors": spack.schema.mirrors.schema,
@@ -122,17 +119,6 @@
#: Type used for raw YAML configuration #: Type used for raw YAML configuration
YamlConfigDict = Dict[str, Any] YamlConfigDict = Dict[str, Any]
#: prefix for name of included configuration scopes
INCLUDE_SCOPE_PREFIX = "include"
#: safeguard for recursive includes -- maximum include depth
MAX_RECURSIVE_INCLUDES = 100
def _include_cache_location():
"""Location to cache included configuration files."""
return os.path.join(spack.paths.user_cache_path, "includes")
class ConfigScope: class ConfigScope:
def __init__(self, name: str) -> None: def __init__(self, name: str) -> None:
@@ -140,9 +126,6 @@ def __init__(self, name: str) -> None:
self.writable = False self.writable = False
self.sections = syaml.syaml_dict() self.sections = syaml.syaml_dict()
#: names of any included scopes
self.included_scopes: List[str] = []
def get_section_filename(self, section: str) -> str: def get_section_filename(self, section: str) -> str:
raise NotImplementedError raise NotImplementedError
@@ -425,18 +408,26 @@ def _method(self, *args, **kwargs):
return _method return _method
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
ScopeWithPriority = Tuple[int, ConfigScope]
class Configuration: class Configuration:
"""A hierarchical configuration, merging a number of scopes at different priorities.""" """A full Spack configuration, from a hierarchy of config files.
This class makes it easy to add a new scope on top of an existing one.
"""
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9 # convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
scopes: lang.PriorityOrderedMapping[str, ConfigScope] scopes: Dict[str, ConfigScope]
def __init__(self) -> None: def __init__(self, *scopes: ConfigScope) -> None:
self.scopes = lang.PriorityOrderedMapping() """Initialize a configuration with an initial list of scopes.
Args:
scopes: list of scopes to add to this
Configuration, ordered from lowest to highest precedence
"""
self.scopes = collections.OrderedDict()
for scope in scopes:
self.push_scope(scope)
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list) self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
def ensure_unwrapped(self) -> "Configuration": def ensure_unwrapped(self) -> "Configuration":
@@ -444,64 +435,36 @@ def ensure_unwrapped(self) -> "Configuration":
return self return self
def highest(self) -> ConfigScope: def highest(self) -> ConfigScope:
"""Scope with the highest precedence""" """Scope with highest precedence"""
return next(self.scopes.reversed_values()) # type: ignore return next(reversed(self.scopes.values())) # type: ignore
@_config_mutator @_config_mutator
def push_scope( def ensure_scope_ordering(self):
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0 """Ensure that scope order matches documented precedent"""
) -> None: # FIXME: We also need to consider that custom configurations and other orderings
"""Adds a scope to the Configuration, at a given priority. # may not be preserved correctly
if "command_line" in self.scopes:
# TODO (when dropping python 3.6): self.scopes.move_to_end
self.scopes["command_line"] = self.remove_scope("command_line")
If a priority is not given, it is assumed to be the current highest priority. @_config_mutator
def push_scope(self, scope: ConfigScope) -> None:
"""Add a higher precedence scope to the Configuration."""
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
self.scopes[scope.name] = scope
Args: @_config_mutator
scope: scope to be added def pop_scope(self) -> ConfigScope:
priority: priority of the scope """Remove the highest precedence scope and return it."""
""" name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2) tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
return scope
# TODO: As a follow on to #48784, change this to create a graph of the
# TODO: includes AND ensure properly sorted such that the order included
# TODO: at the highest level is reflected in the value of an option that
# TODO: is set in multiple included files.
# before pushing the scope itself, push any included scopes recursively, at same priority
includes = scope.get_section("include")
if includes:
include_paths = [included_path(data) for data in includes["include"]]
for path in reversed(include_paths):
included_scope = include_path_scope(path)
if not included_scope:
continue
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
mark = path.path._start_mark if syaml.marked(path.path) else "" # type: ignore
raise RecursiveIncludeError(
f"Maximum include recursion exceeded in {path.path}", str(mark)
)
# record this inclusion so that remove_scope() can use it
scope.included_scopes.append(included_scope.name)
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
self.scopes.add(scope.name, value=scope, priority=priority)
@_config_mutator @_config_mutator
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]: def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
"""Removes a scope by name, and returns it. If the scope does not exist, returns None.""" """Remove scope by name; has no effect when ``scope_name`` does not exist"""
try: scope = self.scopes.pop(scope_name, None)
scope = self.scopes.remove(scope_name) tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
except KeyError as e:
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
return None
# transitively remove included scopes
for inc in scope.included_scopes:
assert inc in self.scopes, f"Included scope '{inc}' was never added to configuration!"
self.remove_scope(inc)
scope.included_scopes.clear() # clean up includes for bookkeeping
return scope return scope
@property @property
@@ -510,13 +473,15 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
return (s for s in self.scopes.values() if s.writable) return (s for s in self.scopes.values() if s.writable)
def highest_precedence_scope(self) -> ConfigScope: def highest_precedence_scope(self) -> ConfigScope:
"""Writable scope with the highest precedence.""" """Writable scope with highest precedence."""
return next(s for s in self.scopes.reversed_values() if s.writable) return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
def highest_precedence_non_platform_scope(self) -> ConfigScope: def highest_precedence_non_platform_scope(self) -> ConfigScope:
"""Writable non-platform scope with the highest precedence""" """Writable non-platform scope with highest precedence"""
return next( return next(
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent s
for s in reversed(self.scopes.values()) # type: ignore
if s.writable and not s.is_platform_dependent
) )
def matching_scopes(self, reg_expr) -> List[ConfigScope]: def matching_scopes(self, reg_expr) -> List[ConfigScope]:
@@ -783,7 +748,7 @@ def override(
""" """
if isinstance(path_or_scope, ConfigScope): if isinstance(path_or_scope, ConfigScope):
overrides = path_or_scope overrides = path_or_scope
CONFIG.push_scope(path_or_scope, priority=None) CONFIG.push_scope(path_or_scope)
else: else:
base_name = _OVERRIDES_BASE_NAME base_name = _OVERRIDES_BASE_NAME
# Ensure the new override gets a unique scope name # Ensure the new override gets a unique scope name
@@ -797,7 +762,7 @@ def override(
break break
overrides = InternalConfigScope(scope_name) overrides = InternalConfigScope(scope_name)
CONFIG.push_scope(overrides, priority=None) CONFIG.push_scope(overrides)
CONFIG.set(path_or_scope, value, scope=scope_name) CONFIG.set(path_or_scope, value, scope=scope_name)
try: try:
@@ -807,86 +772,13 @@ def override(
assert scope is overrides assert scope is overrides
def _add_platform_scope( def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
) -> None:
"""Add a platform-specific subdirectory for the current platform.""" """Add a platform-specific subdirectory for the current platform."""
import spack.platforms # circular dependency
platform = spack.platforms.host().name platform = spack.platforms.host().name
scope = DirectoryConfigScope( scope = DirectoryConfigScope(
f"{name}/{platform}", os.path.join(path, platform), writable=writable f"{name}/{platform}", os.path.join(path, platform), writable=writable
) )
cfg.push_scope(scope, priority=priority) cfg.push_scope(scope)
#: Class for the relevance of an optional path conditioned on a limited
#: python code that evaluates to a boolean and or explicit specification
#: as optional.
class IncludePath(NamedTuple):
path: str
when: str
sha256: str
optional: bool
def included_path(entry: Union[str, dict]) -> IncludePath:
"""Convert the included path entry into an IncludePath.
Args:
entry: include configuration entry
Returns: converted entry, where an empty ``when`` means the path is
not conditionally included
"""
if isinstance(entry, str):
return IncludePath(path=entry, sha256="", when="", optional=False)
path = entry["path"]
sha256 = entry.get("sha256", "")
when = entry.get("when", "")
optional = entry.get("optional", False)
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
"""Instantiate an appropriate configuration scope for the given path.
Args:
include: optional include path
Returns: configuration scope
Raises:
ValueError: included path has an unsupported URL scheme, is required
but does not exist; configuration stage directory argument is missing
ConfigFileError: unable to access remote configuration file(s)
"""
# circular dependencies
import spack.spec
if (not include.when) or spack.spec.eval_conditional(include.when):
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
if not config_path:
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
return DirectoryConfigScope(config_name, config_path)
if os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
if not include.optional:
path = f" at ({config_path})" if config_path != include.path else ""
raise ValueError(f"Required path ({include.path}) does not exist{path}")
return None
def config_paths_from_entry_points() -> List[Tuple[str, str]]: def config_paths_from_entry_points() -> List[Tuple[str, str]]:
@@ -914,17 +806,18 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
return config_paths return config_paths
def create_incremental() -> Generator[Configuration, None, None]: def create() -> Configuration:
"""Singleton Configuration instance. """Singleton Configuration instance.
This constructs one instance associated with this module and returns This constructs one instance associated with this module and returns
it. It is bundled inside a function so that configuration can be it. It is bundled inside a function so that configuration can be
initialized lazily. initialized lazily.
""" """
cfg = Configuration()
# first do the builtin, hardcoded defaults # first do the builtin, hardcoded defaults
cfg = create_from( builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS)) cfg.push_scope(builtin)
)
# Builtin paths to configuration files in Spack # Builtin paths to configuration files in Spack
configuration_paths = [ configuration_paths = [
@@ -954,29 +847,16 @@ def create_incremental() -> Generator[Configuration, None, None]:
# add each scope and its platform-specific directory # add each scope and its platform-specific directory
for name, path in configuration_paths: for name, path in configuration_paths:
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES) cfg.push_scope(DirectoryConfigScope(name, path))
# Each scope can have per-platform overrides in subdirectories
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
# yield the config incrementally so that each config level's init code can get # Each scope can have per-platfom overrides in subdirectories
# data from the one below. This can be tricky, but it enables us to have a _add_platform_scope(cfg, name, path)
# single unified config system.
#
# TODO: think about whether we want to restrict what types of config can be used
# at each level. e.g., we may want to just more forcibly disallow remote
# config (which uses ssl and other config options) for some of the scopes,
# to make the bootstrap issues more explicit, even if allowing config scope
# init to reference lower scopes is more flexible.
yield cfg
return cfg
def create() -> Configuration:
"""Create a configuration using create_incremental(), return the last yielded result."""
return list(create_incremental())[-1]
#: This is the singleton configuration instance for Spack. #: This is the singleton configuration instance for Spack.
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore CONFIG: Configuration = lang.Singleton(create) # type: ignore
def add_from_file(filename: str, scope: Optional[str] = None) -> None: def add_from_file(filename: str, scope: Optional[str] = None) -> None:
@@ -1072,11 +952,10 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
Accepts the path syntax described in ``get()``. Accepts the path syntax described in ``get()``.
""" """
result = CONFIG.set(path, value, scope) return CONFIG.set(path, value, scope)
return result
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]: def scopes() -> Dict[str, ConfigScope]:
"""Convenience function to get list of configuration scopes.""" """Convenience function to get list of configuration scopes."""
return CONFIG.scopes return CONFIG.scopes
@@ -1530,7 +1409,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
@contextlib.contextmanager @contextlib.contextmanager
def use_configuration( def use_configuration(
*scopes_or_paths: Union[ScopeWithOptionalPriority, str] *scopes_or_paths: Union[ConfigScope, str]
) -> Generator[Configuration, None, None]: ) -> Generator[Configuration, None, None]:
"""Use the configuration scopes passed as arguments within the context manager. """Use the configuration scopes passed as arguments within the context manager.
@@ -1545,7 +1424,7 @@ def use_configuration(
global CONFIG global CONFIG
# Normalize input and construct a Configuration object # Normalize input and construct a Configuration object
configuration = create_from(*scopes_or_paths) configuration = _config_from(scopes_or_paths)
CONFIG.clear_caches(), configuration.clear_caches() CONFIG.clear_caches(), configuration.clear_caches()
saved_config, CONFIG = CONFIG, configuration saved_config, CONFIG = CONFIG, configuration
@@ -1556,44 +1435,115 @@ def use_configuration(
CONFIG = saved_config CONFIG = saved_config
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
if isinstance(entry, tuple):
return entry
default_priority = ConfigScopePriority.CONFIG_FILES
if isinstance(entry, ConfigScope):
return default_priority, entry
# Otherwise we need to construct it
path = os.path.normpath(entry)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
return default_priority, DirectoryConfigScope(name, path)
@lang.memoized @lang.memoized
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration: def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
"""Creates a configuration object from the scopes passed in input. scopes = []
for scope_or_path in scopes_or_paths:
# If we have a config scope we are already done
if isinstance(scope_or_path, ConfigScope):
scopes.append(scope_or_path)
continue
# Otherwise we need to construct it
path = os.path.normpath(scope_or_path)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
scopes.append(DirectoryConfigScope(name, path))
configuration = Configuration(*scopes)
return configuration
def raw_github_gitlab_url(url: str) -> str:
"""Transform a github URL to the raw form to avoid undesirable html.
Args: Args:
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string url: url to be converted to raw form
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
string is given, a DirectoryConfigScope is created from it.
Examples: Returns:
Raw github/gitlab url or the original url
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
>>> cfg = create_from(
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
... (ConfigScopePriority.BUILTIN, builtin_scope)
... )
""" """
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths] # Note we rely on GitHub to redirect the 'raw' URL returned here to the
result = Configuration() # actual URL under https://raw.githubusercontent.com/ with '/blob'
for priority, scope in scopes_with_priority: # removed and or, '/blame' if needed.
result.push_scope(scope, priority=priority) if "github" in url or "gitlab" in url:
return result return url.replace("/blob/", "/raw/")
return url
def collect_urls(base_url: str) -> list:
"""Return a list of configuration URLs.
Arguments:
base_url: URL for a configuration (yaml) file or a directory
containing yaml file(s)
Returns:
List of configuration file(s) or empty list if none
"""
if not base_url:
return []
extension = ".yaml"
if base_url.endswith(extension):
return [base_url]
# Collect configuration URLs if the base_url is a "directory".
_, links = web_util.spider(base_url, 0)
return [link for link in links if link.endswith(extension)]
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
"""Retrieve configuration file(s) at the specified URL.
Arguments:
url: URL for a configuration (yaml) file or a directory containing
yaml file(s)
dest_dir: destination directory
skip_existing: Skip files that already exist in dest_dir if
``True``; otherwise, replace those files
Returns:
Path to the corresponding file if URL is or contains a
single file and it is the only file in the destination directory or
the root (dest_dir) directory if multiple configuration files exist
or are retrieved.
"""
def _fetch_file(url):
raw = raw_github_gitlab_url(url)
tty.debug(f"Reading config from url {raw}")
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
if not url:
raise ConfigFileError("Cannot retrieve configuration without a URL")
# Return the local path to the cached configuration file OR to the
# directory containing the cached configuration files.
config_links = collect_urls(url)
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
paths = []
for config_url in config_links:
basename = os.path.basename(config_url)
if skip_existing and basename in existing_files:
tty.warn(
f"Will not fetch configuration from {config_url} since a "
f"version already exists in {dest_dir}"
)
path = os.path.join(dest_dir, basename)
else:
path = _fetch_file(config_url)
if path:
paths.append(path)
if paths:
return dest_dir if len(paths) > 1 else paths[0]
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
def get_mark_from_yaml_data(obj): def get_mark_from_yaml_data(obj):
@@ -1722,7 +1672,3 @@ def get_path(path, data):
# give up and return None if nothing worked # give up and return None if nothing worked
return None return None
class RecursiveIncludeError(spack.error.SpackError):
"""Too many levels of recursive includes."""

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and # Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value # warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True}) env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True: if not env_dict["concretizer"]["unify"] is True:
warnings.warn( warnings.warn(
'"concretizer:unify" is not set to "true", which means the ' '"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same " "generated image may contain different variants of the same "

View File

@@ -41,8 +41,6 @@
Union, Union,
) )
import spack.repo
try: try:
import uuid import uuid
@@ -1558,12 +1556,7 @@ def _query(
# If we did fine something, the query spec can't be virtual b/c we matched an actual # If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything, # package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual. # check all the deferred specs *if* the query is virtual.
if ( if not results and query_spec is not None and deferred and query_spec.virtual:
not results
and query_spec is not None
and deferred
and spack.repo.PATH.is_virtual(query_spec.name)
):
results = [spec for spec in deferred if spec.satisfies(query_spec)] results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results return results

View File

@@ -310,7 +310,7 @@ def find_windows_kit_roots() -> List[str]:
@staticmethod @staticmethod
def find_windows_kit_bin_paths( def find_windows_kit_bin_paths(
kit_base: Union[Optional[str], Optional[list]] = None, kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]: ) -> List[str]:
"""Returns Windows kit bin directory per version""" """Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
@@ -325,7 +325,7 @@ def find_windows_kit_bin_paths(
@staticmethod @staticmethod
def find_windows_kit_lib_paths( def find_windows_kit_lib_paths(
kit_base: Union[Optional[str], Optional[list]] = None, kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]: ) -> List[str]:
"""Returns Windows kit lib directory per version""" """Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base

View File

@@ -243,7 +243,7 @@ def prefix_from_path(self, *, path: str) -> str:
raise NotImplementedError("must be implemented by derived classes") raise NotImplementedError("must be implemented by derived classes")
def detect_specs( def detect_specs(
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str] self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
) -> List["spack.spec.Spec"]: ) -> List["spack.spec.Spec"]:
"""Given a list of files matching the search patterns, returns a list of detected specs. """Given a list of files matching the search patterns, returns a list of detected specs.
@@ -259,8 +259,6 @@ def detect_specs(
) )
return [] return []
from spack.repo import PATH as repo_path
result = [] result = []
for candidate_path, items_in_prefix in _group_by_prefix( for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths) llnl.util.lang.dedupe(paths)
@@ -307,10 +305,7 @@ def detect_specs(
resolved_specs[spec] = candidate_path resolved_specs[spec] = candidate_path
try: try:
# Validate the spec calling a package specific method spec.validate_detection()
pkg_cls = repo_path.get_pkg_class(spec.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(spec, spec.extra_attributes)
except Exception as e: except Exception as e:
msg = ( msg = (
f'"{spec}" has been detected on the system but will ' f'"{spec}" has been detected on the system but will '

View File

@@ -568,7 +568,7 @@ def patch(
""" """
def _execute_patch( def _execute_patch(
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency], pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
) -> None: ) -> None:
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep

View File

@@ -25,7 +25,7 @@
} }
def _check_concrete(spec: "spack.spec.Spec") -> None: def _check_concrete(spec):
"""If the spec is not concrete, raise a ValueError""" """If the spec is not concrete, raise a ValueError"""
if not spec.concrete: if not spec.concrete:
raise ValueError("Specs passed to a DirectoryLayout must be concrete!") raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
spec = _get_spec(prefix) spec = _get_spec(prefix)
if spec: if spec:
spec.set_prefix(prefix) spec.prefix = prefix
specs.append(spec) specs.append(spec)
continue continue
@@ -84,7 +84,7 @@ class DirectoryLayout:
def __init__( def __init__(
self, self,
root: str, root,
*, *,
projections: Optional[Dict[str, str]] = None, projections: Optional[Dict[str, str]] = None,
hash_length: Optional[int] = None, hash_length: Optional[int] = None,
@@ -120,17 +120,17 @@ def __init__(
self.manifest_file_name = "install_manifest.json" self.manifest_file_name = "install_manifest.json"
@property @property
def hidden_file_regexes(self) -> Tuple[str]: def hidden_file_regexes(self):
return ("^{0}$".format(re.escape(self.metadata_dir)),) return ("^{0}$".format(re.escape(self.metadata_dir)),)
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str: def relative_path_for_spec(self, spec):
_check_concrete(spec) _check_concrete(spec)
projection = spack.projections.get_projection(self.projections, spec) projection = spack.projections.get_projection(self.projections, spec)
path = spec.format_path(projection) path = spec.format_path(projection)
return str(Path(path)) return str(Path(path))
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None: def write_spec(self, spec, path):
"""Write a spec out to a file.""" """Write a spec out to a file."""
_check_concrete(spec) _check_concrete(spec)
with open(path, "w", encoding="utf-8") as f: with open(path, "w", encoding="utf-8") as f:
@@ -138,7 +138,7 @@ def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
# the full provenance, so it's availabe if we want it later # the full provenance, so it's availabe if we want it later
spec.to_json(f, hash=ht.dag_hash) spec.to_json(f, hash=ht.dag_hash)
def write_host_environment(self, spec: "spack.spec.Spec") -> None: def write_host_environment(self, spec):
"""The host environment is a json file with os, kernel, and spack """The host environment is a json file with os, kernel, and spack
versioning. We use it in the case that an analysis later needs to versioning. We use it in the case that an analysis later needs to
easily access this information. easily access this information.
@@ -148,7 +148,7 @@ def write_host_environment(self, spec: "spack.spec.Spec") -> None:
with open(env_file, "w", encoding="utf-8") as fd: with open(env_file, "w", encoding="utf-8") as fd:
sjson.dump(environ, fd) sjson.dump(environ, fd)
def read_spec(self, path: str) -> "spack.spec.Spec": def read_spec(self, path):
"""Read the contents of a file and parse them as a spec""" """Read the contents of a file and parse them as a spec"""
try: try:
with open(path, encoding="utf-8") as f: with open(path, encoding="utf-8") as f:
@@ -159,28 +159,26 @@ def read_spec(self, path: str) -> "spack.spec.Spec":
# Too late for conversion; spec_file_path() already called. # Too late for conversion; spec_file_path() already called.
spec = spack.spec.Spec.from_yaml(f) spec = spack.spec.Spec.from_yaml(f)
else: else:
raise SpecReadError(f"Did not recognize spec file extension: {extension}") raise SpecReadError(
"Did not recognize spec file extension:" " {0}".format(extension)
)
except Exception as e: except Exception as e:
if spack.config.get("config:debug"): if spack.config.get("config:debug"):
raise raise
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}") raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
# Specs read from actual installations are always concrete # Specs read from actual installations are always concrete
spec._mark_concrete() spec._mark_concrete()
return spec return spec
def spec_file_path(self, spec: "spack.spec.Spec") -> str: def spec_file_path(self, spec):
"""Gets full path to spec file""" """Gets full path to spec file"""
_check_concrete(spec) _check_concrete(spec)
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml) yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name) json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
return yaml_path if os.path.exists(yaml_path) else json_path return yaml_path if os.path.exists(yaml_path) else json_path
def deprecated_file_path( def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
self,
deprecated_spec: "spack.spec.Spec",
deprecator_spec: Optional["spack.spec.Spec"] = None,
) -> str:
"""Gets full path to spec file for deprecated spec """Gets full path to spec file for deprecated spec
If the deprecator_spec is provided, use that. Otherwise, assume If the deprecator_spec is provided, use that. Otherwise, assume
@@ -214,16 +212,16 @@ def deprecated_file_path(
return yaml_path if os.path.exists(yaml_path) else json_path return yaml_path if os.path.exists(yaml_path) else json_path
def metadata_path(self, spec: "spack.spec.Spec") -> str: def metadata_path(self, spec):
return os.path.join(spec.prefix, self.metadata_dir) return os.path.join(spec.prefix, self.metadata_dir)
def env_metadata_path(self, spec: "spack.spec.Spec") -> str: def env_metadata_path(self, spec):
return os.path.join(self.metadata_path(spec), "install_environment.json") return os.path.join(self.metadata_path(spec), "install_environment.json")
def build_packages_path(self, spec: "spack.spec.Spec") -> str: def build_packages_path(self, spec):
return os.path.join(self.metadata_path(spec), self.packages_dir) return os.path.join(self.metadata_path(spec), self.packages_dir)
def create_install_directory(self, spec: "spack.spec.Spec") -> None: def create_install_directory(self, spec):
_check_concrete(spec) _check_concrete(spec)
# Create install directory with properly configured permissions # Create install directory with properly configured permissions
@@ -241,7 +239,7 @@ def create_install_directory(self, spec: "spack.spec.Spec") -> None:
self.write_spec(spec, self.spec_file_path(spec)) self.write_spec(spec, self.spec_file_path(spec))
def ensure_installed(self, spec: "spack.spec.Spec") -> None: def ensure_installed(self, spec):
""" """
Throws InconsistentInstallDirectoryError if: Throws InconsistentInstallDirectoryError if:
1. spec prefix does not exist 1. spec prefix does not exist
@@ -268,7 +266,7 @@ def ensure_installed(self, spec: "spack.spec.Spec") -> None:
"Spec file in %s does not match hash!" % spec_file_path "Spec file in %s does not match hash!" % spec_file_path
) )
def path_for_spec(self, spec: "spack.spec.Spec") -> str: def path_for_spec(self, spec):
"""Return absolute path from the root to a directory for the spec.""" """Return absolute path from the root to a directory for the spec."""
_check_concrete(spec) _check_concrete(spec)
@@ -279,13 +277,23 @@ def path_for_spec(self, spec: "spack.spec.Spec") -> str:
assert not path.startswith(self.root) assert not path.startswith(self.root)
return os.path.join(self.root, path) return os.path.join(self.root, path)
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None: def remove_install_directory(self, spec, deprecated=False):
"""Removes a prefix and any empty parent directories from the root. """Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong. Raised RemoveFailedError if something goes wrong.
""" """
path = self.path_for_spec(spec) path = self.path_for_spec(spec)
assert path.startswith(self.root) assert path.startswith(self.root)
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if sys.platform == "win32":
kwargs = {
"ignore_errors": False,
"onerror": fs.readonly_file_handler(ignore_errors=False),
}
else:
kwargs = {} # the default value for ignore_errors is false
if deprecated: if deprecated:
if os.path.exists(path): if os.path.exists(path):
try: try:
@@ -296,16 +304,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
raise RemoveFailedError(spec, path, e) from e raise RemoveFailedError(spec, path, e) from e
elif os.path.exists(path): elif os.path.exists(path):
try: try:
if sys.platform == "win32": shutil.rmtree(path, **kwargs)
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
shutil.rmtree(
path,
ignore_errors=False,
onerror=fs.readonly_file_handler(ignore_errors=False),
)
else:
shutil.rmtree(path)
except OSError as e: except OSError as e:
raise RemoveFailedError(spec, path, e) from e raise RemoveFailedError(spec, path, e) from e

View File

@@ -12,13 +12,3 @@ class InstallRecordStatus(enum.Flag):
DEPRECATED = enum.auto() DEPRECATED = enum.auto()
MISSING = enum.auto() MISSING = enum.auto()
ANY = INSTALLED | DEPRECATED | MISSING ANY = INSTALLED | DEPRECATED | MISSING
class ConfigScopePriority(enum.IntEnum):
"""Priorities of the different kind of config scopes used by Spack"""
BUILTIN = 0
CONFIG_FILES = 1
CUSTOM = 2
ENVIRONMENT = 3
COMMAND_LINE = 4

View File

@@ -10,6 +10,8 @@
import re import re
import shutil import shutil
import stat import stat
import urllib.parse
import urllib.request
import warnings import warnings
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
@@ -30,6 +32,7 @@
import spack.paths import spack.paths
import spack.repo import spack.repo
import spack.schema.env import spack.schema.env
import spack.schema.merged
import spack.spec import spack.spec
import spack.spec_list import spack.spec_list
import spack.store import spack.store
@@ -40,6 +43,7 @@
import spack.util.path import spack.util.path
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.util.url
from spack import traverse from spack import traverse
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY from spack.schema.env import TOP_LEVEL_KEY
@@ -47,8 +51,6 @@
from spack.spec_list import SpecList from spack.spec_list import SpecList
from spack.util.path import substitute_path_variables from spack.util.path import substitute_path_variables
from ..enums import ConfigScopePriority
SpecPair = spack.concretize.SpecPair SpecPair = spack.concretize.SpecPair
#: environment variable used to indicate the active environment #: environment variable used to indicate the active environment
@@ -385,7 +387,6 @@ def create_in_dir(
# dev paths in this environment to refer to their original # dev paths in this environment to refer to their original
# locations. # locations.
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir) _rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
return env return env
@@ -402,8 +403,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
dev_path = substitute_path_variables(entry["path"]) dev_path = substitute_path_variables(entry["path"])
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir) expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute) # Skip if the expanded path is the same (e.g. when absolute)
if entry["path"] == expanded_path: if dev_path == expanded_path:
continue continue
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path)) tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
@@ -418,34 +419,6 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
env._re_read() env._re_read()
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
"""When initializing the environment from a manifest file and we plan
to store the environment in a different directory, we have to rewrite
relative repo paths to absolute ones and expand environment variables."""
with env:
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
if not repos_specs:
return
for i, entry in enumerate(repos_specs):
repo_path = substitute_path_variables(entry)
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute)
if entry == expanded_path:
continue
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
repos_specs[i] = expanded_path
spack.config.set("repos", repos_specs, scope=env.scope_name)
env.repos_specs = None
# If we changed the environment's spack.yaml scope, that will not be reflected
# in the manifest that we read
env._re_read()
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str: def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
"""Returns the directory associated with a named environment. """Returns the directory associated with a named environment.
@@ -573,6 +546,13 @@ def _write_yaml(data, str_or_file):
syaml.dump_config(data, str_or_file, default_flow_style=False) syaml.dump_config(data, str_or_file, default_flow_style=False)
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.spec.get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
return eval(string, valid_variables)
def _is_dev_spec_and_has_changed(spec): def _is_dev_spec_and_has_changed(spec):
"""Check if the passed spec is a dev build and whether it has changed since the """Check if the passed spec is a dev build and whether it has changed since the
last installation""" last installation"""
@@ -1005,7 +985,7 @@ def _process_definition(self, entry):
"""Process a single spec definition item.""" """Process a single spec definition item."""
when_string = entry.get("when") when_string = entry.get("when")
if when_string is not None: if when_string is not None:
when = spack.spec.eval_conditional(when_string) when = _eval_conditional(when_string)
assert len([x for x in entry if x != "when"]) == 1 assert len([x for x in entry if x != "when"]) == 1
else: else:
when = True when = True
@@ -1550,6 +1530,9 @@ def _get_specs_to_concretize(
return new_user_specs, kept_user_specs, specs_to_concretize return new_user_specs, kept_user_specs, specs_to_concretize
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]: def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
# Avoid cyclic dependency
import spack.solver.asp
# Exit early if the set of concretized specs is the set of user specs # Exit early if the set of concretized specs is the set of user specs
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize() new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
if not new_user_specs: if not new_user_specs:
@@ -2409,8 +2392,6 @@ def invalidate_repository_cache(self):
def __enter__(self): def __enter__(self):
self._previous_active = _active_environment self._previous_active = _active_environment
if self._previous_active:
deactivate()
activate(self) activate(self)
return self return self
@@ -2660,23 +2641,20 @@ def _ensure_env_dir():
# error handling for bad manifests is handled on other code paths # error handling for bad manifests is handled on other code paths
return return
# TODO: make this recursive
includes = manifest[TOP_LEVEL_KEY].get("include", []) includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes: for include in includes:
included_path = spack.config.included_path(include) if os.path.isabs(include):
path = included_path.path
if os.path.isabs(path):
continue continue
abspath = pathlib.Path(os.path.normpath(environment_dir / path)) abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath])) common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir: if common_path != environment_dir:
tty.debug(f"Will not copy relative include file from outside environment: {path}") tty.debug(f"Will not copy relative include from outside environment: {include}")
continue continue
orig_abspath = os.path.normpath(envfile.parent / path) orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath): if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{path}'") tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue continue
fs.touchp(abspath) fs.touchp(abspath)
@@ -2899,7 +2877,7 @@ def extract_name(_item):
continue continue
condition_str = item.get("when", "True") condition_str = item.get("when", "True")
if not spack.spec.eval_conditional(condition_str): if not _eval_conditional(condition_str):
continue continue
yield idx, item yield idx, item
@@ -2960,20 +2938,127 @@ def __iter__(self):
def __str__(self): def __str__(self):
return str(self.manifest_file) return str(self.manifest_file)
@property
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
"""List of included configuration scopes from the manifest.
Scopes are listed in the YAML file in order from highest to
lowest precedence, so configuration from earlier scope will take
precedence over later ones.
This routine returns them in the order they should be pushed onto
the internal scope stack (so, in reverse, from lowest to highest).
Returns: Configuration scopes associated with the environment manifest
Raises:
SpackEnvironmentError: if the manifest includes a remote file but
no configuration stage directory has been identified
"""
scopes: List[spack.config.ConfigScope] = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self[TOP_LEVEL_KEY].get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path)
# If scheme is not valid, config_path is not a url
# of a type Spack is generally aware
if spack.util.url.validate_scheme(include_url.scheme):
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.manifest_dir, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
elif os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = f"env:{self.name}:{config_path}"
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
scopes.append(
spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema
)
)
else:
missing.append(config_path)
continue
if missing:
msg = "Detected {0} missing include path(s):".format(len(missing))
msg += "\n {0}".format("\n ".join(missing))
raise spack.config.ConfigFileError(msg)
return scopes
@property @property
def env_config_scopes(self) -> List[spack.config.ConfigScope]: def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""A list of all configuration scopes for the environment manifest. On the first call this """A list of all configuration scopes for the environment manifest. On the first call this
instantiates all the scopes, on subsequent calls it returns the cached list.""" instantiates all the scopes, on subsequent calls it returns the cached list."""
if self._config_scopes is not None: if self._config_scopes is not None:
return self._config_scopes return self._config_scopes
scopes: List[spack.config.ConfigScope] = [ scopes: List[spack.config.ConfigScope] = [
*self.included_config_scopes,
spack.config.SingleFileScope( spack.config.SingleFileScope(
self.scope_name, self.scope_name,
str(self.manifest_file), str(self.manifest_file),
spack.schema.env.schema, spack.schema.env.schema,
yaml_path=[TOP_LEVEL_KEY], yaml_path=[TOP_LEVEL_KEY],
) ),
] ]
ensure_no_disallowed_env_config_mods(scopes) ensure_no_disallowed_env_config_mods(scopes)
self._config_scopes = scopes self._config_scopes = scopes
@@ -2982,12 +3067,14 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
def prepare_config_scope(self) -> None: def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path.""" """Add the manifest's scopes to the global configuration search path."""
for scope in self.env_config_scopes: for scope in self.env_config_scopes:
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT) spack.config.CONFIG.push_scope(scope)
spack.config.CONFIG.ensure_scope_ordering()
def deactivate_config_scope(self) -> None: def deactivate_config_scope(self) -> None:
"""Remove any of the manifest's scopes from the global config path.""" """Remove any of the manifest's scopes from the global config path."""
for scope in self.env_config_scopes: for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name) spack.config.CONFIG.remove_scope(scope.name)
spack.config.CONFIG.ensure_scope_ordering()
@contextlib.contextmanager @contextlib.contextmanager
def use_config(self): def use_config(self):

View File

@@ -9,8 +9,7 @@
import shutil import shutil
import stat import stat
import sys import sys
import tempfile from typing import Callable, Dict, Optional
from typing import Callable, Dict, List, Optional
from typing_extensions import Literal from typing_extensions import Literal
@@ -78,7 +77,7 @@ def view_copy(
# Order of this dict is somewhat irrelevant # Order of this dict is somewhat irrelevant
prefix_to_projection = { prefix_to_projection = {
str(s.prefix): view.get_projection_for_spec(s) s.prefix: view.get_projection_for_spec(s)
for s in spec.traverse(root=True, order="breadth") for s in spec.traverse(root=True, order="breadth")
if not s.external if not s.external
} }
@@ -185,7 +184,7 @@ def __init__(
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None: def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
self._link(src, dst, self, spec) self._link(src, dst, self, spec)
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None: def add_specs(self, *specs, **kwargs):
""" """
Add given specs to view. Add given specs to view.
@@ -200,19 +199,19 @@ def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
""" """
raise NotImplementedError raise NotImplementedError
def add_standalone(self, spec: spack.spec.Spec) -> bool: def add_standalone(self, spec):
""" """
Add (link) a standalone package into this view. Add (link) a standalone package into this view.
""" """
raise NotImplementedError raise NotImplementedError
def check_added(self, spec: spack.spec.Spec) -> bool: def check_added(self, spec):
""" """
Check if the given concrete spec is active in this view. Check if the given concrete spec is active in this view.
""" """
raise NotImplementedError raise NotImplementedError
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None: def remove_specs(self, *specs, **kwargs):
""" """
Removes given specs from view. Removes given specs from view.
@@ -231,25 +230,25 @@ def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
""" """
raise NotImplementedError raise NotImplementedError
def remove_standalone(self, spec: spack.spec.Spec) -> None: def remove_standalone(self, spec):
""" """
Remove (unlink) a standalone package from this view. Remove (unlink) a standalone package from this view.
""" """
raise NotImplementedError raise NotImplementedError
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str: def get_projection_for_spec(self, spec):
""" """
Get the projection in this view for a spec. Get the projection in this view for a spec.
""" """
raise NotImplementedError raise NotImplementedError
def get_all_specs(self) -> List[spack.spec.Spec]: def get_all_specs(self):
""" """
Get all specs currently active in this view. Get all specs currently active in this view.
""" """
raise NotImplementedError raise NotImplementedError
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]: def get_spec(self, spec):
""" """
Return the actual spec linked in this view (i.e. do not look it up Return the actual spec linked in this view (i.e. do not look it up
in the database by name). in the database by name).
@@ -263,7 +262,7 @@ def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
""" """
raise NotImplementedError raise NotImplementedError
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None: def print_status(self, *specs, **kwargs):
""" """
Print a short summary about the given specs, detailing whether.. Print a short summary about the given specs, detailing whether..
* ..they are active in the view. * ..they are active in the view.
@@ -694,7 +693,7 @@ def _sanity_check_view_projection(self, specs):
raise ConflictingSpecsError(current_spec, conflicting_spec) raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec seen[metadata_dir] = current_spec
def add_specs(self, *specs, **kwargs) -> None: def add_specs(self, *specs: spack.spec.Spec) -> None:
"""Link a root-to-leaf topologically ordered list of specs into the view.""" """Link a root-to-leaf topologically ordered list of specs into the view."""
assert all((s.concrete for s in specs)) assert all((s.concrete for s in specs))
if len(specs) == 0: if len(specs) == 0:
@@ -709,10 +708,7 @@ def add_specs(self, *specs, **kwargs) -> None:
def skip_list(file): def skip_list(file):
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
# Determine if the root is on a case-insensitive filesystem visitor = SourceMergeVisitor(ignore=skip_list)
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
# Gather all the directories to be made and files to be linked # Gather all the directories to be made and files to be linked
for spec in specs: for spec in specs:
@@ -831,7 +827,7 @@ def get_projection_for_spec(self, spec):
##################### #####################
# utility functions # # utility functions #
##################### #####################
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]: def get_spec_from_file(filename):
try: try:
with open(filename, "r", encoding="utf-8") as f: with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f) return spack.spec.Spec.from_yaml(f)
@@ -888,8 +884,3 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError): class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually.""" """Raised when a view has a projections file and is given one manually."""
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))

View File

@@ -42,10 +42,10 @@
import llnl.util.tty.color import llnl.util.tty.color
import spack.deptypes as dt import spack.deptypes as dt
import spack.repo
import spack.spec import spack.spec
import spack.tengine import spack.tengine
import spack.traverse import spack.traverse
from spack.solver.input_analysis import create_graph_analyzer
def find(seq, predicate): def find(seq, predicate):
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
def _static_edges(specs, depflag): def _static_edges(specs, depflag):
for spec in specs: for spec in specs:
*_, edges = create_graph_analyzer().possible_dependencies( pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
spec.name, expand_virtuals=True, allowed_deps=depflag possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
)
for parent_name, dependencies in edges.items(): for parent_name, dependencies in possible.items():
for dependency_name in dependencies: for dependency_name in dependencies:
yield spack.spec.DependencySpec( yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name), spack.spec.Spec(parent_name),

View File

@@ -2,14 +2,198 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import fnmatch
import io import io
import os
import re
from typing import Dict, List, Union
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import visit_directory_tree from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
from llnl.util.lang import stable_partition
import spack.config import spack.config
import spack.error import spack.error
import spack.verify_libraries import spack.util.elf as elf
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
#: default search paths of the dynamic linker.
ALLOW_UNRESOLVED = [
# kernel
"linux-vdso.so.*",
"libselinux.so.*",
# musl libc
"ld-musl-*.so.*",
# glibc
"ld-linux*.so.*",
"ld64.so.*",
"libanl.so.*",
"libc.so.*",
"libdl.so.*",
"libm.so.*",
"libmemusage.so.*",
"libmvec.so.*",
"libnsl.so.*",
"libnss_compat.so.*",
"libnss_db.so.*",
"libnss_dns.so.*",
"libnss_files.so.*",
"libnss_hesiod.so.*",
"libpcprofile.so.*",
"libpthread.so.*",
"libresolv.so.*",
"librt.so.*",
"libSegFault.so.*",
"libthread_db.so.*",
"libutil.so.*",
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
"libasan.so.*",
"libatomic.so.*",
"libcc1.so.*",
"libgcc_s.so.*",
"libgfortran.so.*",
"libgomp.so.*",
"libitm.so.*",
"liblsan.so.*",
"libquadmath.so.*",
"libssp.so.*",
"libstdc++.so.*",
"libtsan.so.*",
"libubsan.so.*",
# systemd
"libudev.so.*",
# cuda driver
"libcuda.so.*",
]
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
return (
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
and parent.is_little_endian == child.is_little_endian
and parent.is_64_bit == child.is_64_bit
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
)
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
try:
with open(candidate_path, "rb") as g:
return is_compatible(current_elf, elf.parse_elf(g))
except (OSError, elf.ElfParsingError):
return False
class Problem:
def __init__(
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
) -> None:
self.resolved = resolved
self.unresolved = unresolved
self.relative_rpaths = relative_rpaths
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
self.problems: Dict[str, Problem] = {}
self._allow_unresolved_regex = re.compile(
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
)
def allow_unresolved(self, needed: bytes) -> bool:
try:
name = needed.decode("utf-8")
except UnicodeDecodeError:
return False
return bool(self._allow_unresolved_regex.match(name))
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# We work with byte strings for paths.
path = os.path.join(root, rel_path).encode("utf-8")
# For $ORIGIN interpolation: should not have trailing dir seperator.
origin = os.path.dirname(path)
# Retrieve the needed libs + rpaths.
try:
with open(path, "rb") as f:
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
except (OSError, elf.ElfParsingError):
# Not dealing with an invalid ELF file.
return
# If there's no needed libs all is good
if not parsed_elf.has_needed:
return
# Get the needed libs and rpaths (notice: byte strings)
# Don't force an encoding cause paths are just a bag of bytes.
needed_libs = parsed_elf.dt_needed_strs
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
# supported in general. Also remove empty paths.
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
# Do not allow relative rpaths (they are relative to the current working directory)
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
# If there's a / in the needed lib, it's opened directly, otherwise it needs
# a search.
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
# Do not allow relative paths in direct libs (they are relative to the current working
# directory)
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
resolved: Dict[bytes, bytes] = {}
for lib in search_libs:
if self.allow_unresolved(lib):
continue
for rpath in rpaths:
candidate = os.path.join(rpath, lib)
if candidate_matches(parsed_elf, candidate):
resolved[lib] = candidate
break
else:
unresolved.append(lib)
# Check if directly opened libs are compatible
for lib in direct_libs:
if candidate_matches(parsed_elf, lib):
resolved[lib] = lib
else:
unresolved.append(lib)
if unresolved or relative_rpaths:
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# There can be binaries in .spack/test which shouldn't be checked.
if rel_path == ".spack":
return False
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
return False
class CannotLocateSharedLibraries(spack.error.SpackError):
pass
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
try:
return byte_str.decode("utf-8")
except UnicodeDecodeError:
return byte_str
def post_install(spec, explicit): def post_install(spec, explicit):
@@ -20,23 +204,36 @@ def post_install(spec, explicit):
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"): if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
return return
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor( visitor = ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries] [*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
) )
visit_directory_tree(spec.prefix, visitor) visit_directory_tree(spec.prefix, visitor)
# All good?
if not visitor.problems: if not visitor.problems:
return return
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n") # For now just list the issues (print it in ldd style, except we don't recurse)
visitor.write(output) output = io.StringIO()
output.write("not all executables and libraries can resolve their dependencies:\n")
for path, problem in visitor.problems.items():
output.write(path)
output.write("\n")
for needed, full_path in problem.resolved.items():
output.write(" ")
if needed == full_path:
output.write(maybe_decode(needed))
else:
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
output.write("\n")
for not_found in problem.unresolved:
output.write(f" {maybe_decode(not_found)} => not found\n")
for relative_rpath in problem.relative_rpaths:
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
message = output.getvalue().strip() message = output.getvalue().strip()
if policy == "error": if policy == "error":
raise CannotLocateSharedLibraries(message) raise CannotLocateSharedLibraries(message)
tty.warn(message) tty.warn(message)
class CannotLocateSharedLibraries(spack.error.SpackError):
pass

View File

@@ -21,6 +21,7 @@
from llnl.util.lang import nullcontext from llnl.util.lang import nullcontext
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.build_environment
import spack.config import spack.config
import spack.error import spack.error
import spack.package_base import spack.package_base
@@ -397,7 +398,7 @@ def stand_alone_tests(self, kwargs):
Args: Args:
kwargs (dict): arguments to be used by the test process kwargs (dict): arguments to be used by the test process
""" """
import spack.build_environment # avoid circular dependency import spack.build_environment
spack.build_environment.start_build_process(self.pkg, test_process, kwargs) spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
@@ -462,8 +463,6 @@ def write_tested_status(self):
@contextlib.contextmanager @contextlib.contextmanager
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False): def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
import spack.build_environment # avoid circular dependency
wdir = "." if work_dir is None else work_dir wdir = "." if work_dir is None else work_dir
tester = pkg.tester tester = pkg.tester
assert test_name and test_name.startswith( assert test_name and test_name.startswith(
@@ -567,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
# copy test data into test stage data dir # copy test data into test stage data dir
try: try:
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname) pkg_cls = test_spec.package_class
except spack.repo.UnknownPackageError: except spack.repo.UnknownPackageError:
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found") tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
return return
@@ -624,7 +623,7 @@ def test_functions(
vpkgs = virtuals(pkg) vpkgs = virtuals(pkg)
for vname in vpkgs: for vname in vpkgs:
try: try:
classes.append(spack.repo.PATH.get_pkg_class(vname)) classes.append((Spec(vname)).package_class)
except spack.repo.UnknownPackageError: except spack.repo.UnknownPackageError:
tty.debug(f"{vname}: virtual does not appear to have a package file") tty.debug(f"{vname}: virtual does not appear to have a package file")
@@ -669,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
# grab test functions associated with the spec, which may be virtual # grab test functions associated with the spec, which may be virtual
try: try:
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname)) tests = test_functions(spec.package_class)
except spack.repo.UnknownPackageError: except spack.repo.UnknownPackageError:
# Some virtuals don't have a package so we don't want to report # Some virtuals don't have a package so we don't want to report
# them as not having tests when that isn't appropriate. # them as not having tests when that isn't appropriate.

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or # Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested. # if build deps are explicitly requested.
if include_build_deps or not ( if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
): ):
depflag |= dt.BUILD depflag |= dt.BUILD
if self.run_tests(pkg): if self.run_tests(pkg):
@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
# DEBUGGING TIP - to debug this section, insert an IPython # DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture # embed here, and run the sections below without log capture
log_contextmanager = log_output( log_contextmanager = log_output(
log_file, self.echo, True, filter_fn=self.filter_fn log_file,
self.echo,
True,
env=self.unmodified_env,
filter_fn=self.filter_fn,
) )
with log_contextmanager as logger: with log_contextmanager as logger:

View File

@@ -47,8 +47,6 @@
import spack.util.environment import spack.util.environment
import spack.util.lock import spack.util.lock
from .enums import ConfigScopePriority
#: names of profile statistics #: names of profile statistics
stat_names = pstats.Stats.sort_arg_dict_default stat_names = pstats.Stats.sort_arg_dict_default
@@ -874,19 +872,14 @@ def add_command_line_scopes(
scopes = ev.environment_path_scopes(name, path) scopes = ev.environment_path_scopes(name, path)
if scopes is None: if scopes is None:
if os.path.isdir(path): # directory with config files if os.path.isdir(path): # directory with config files
cfg.push_scope( cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
spack.config.DirectoryConfigScope(name, path, writable=False), spack.config._add_platform_scope(cfg, name, path, writable=False)
priority=ConfigScopePriority.CUSTOM,
)
spack.config._add_platform_scope(
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
)
continue continue
else: else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}") raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
for scope in scopes: for scope in scopes:
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM) cfg.push_scope(scope)
def _main(argv=None): def _main(argv=None):
@@ -959,9 +952,7 @@ def _main(argv=None):
# Push scopes from the command line last # Push scopes from the command line last
if args.config_scopes: if args.config_scopes:
add_command_line_scopes(spack.config.CONFIG, args.config_scopes) add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
spack.config.CONFIG.push_scope( spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
)
setup_main_options(args) setup_main_options(args)
# ------------------------------------------------------------------------ # ------------------------------------------------------------------------
@@ -1007,7 +998,6 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
args, unknown = parser.parse_known_args(main_args.command) args, unknown = parser.parse_known_args(main_args.command)
# we need to inherit verbose since the install command checks for it # we need to inherit verbose since the install command checks for it
args.verbose = main_args.verbose args.verbose = main_args.verbose
args.lines = main_args.lines
# Now that we know what command this is and what its args are, determine # Now that we know what command this is and what its args are, determine
# whether we can continue with a bad environment and raise if not. # whether we can continue with a bad environment and raise if not.

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod @staticmethod
def from_url(url: str): def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL.""" """Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes: if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError( raise ValueError(
f'"{url}" is not a valid mirror URL. ' f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}." f"Scheme must be one of {supported_url_schemes}."

View File

@@ -330,17 +330,18 @@ class BaseConfiguration:
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"} default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None: def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
# Module where type(self) is defined
m = inspect.getmodule(self)
assert m is not None # make mypy happy
self.module = m
# Spec for which we want to generate a module file # Spec for which we want to generate a module file
self.spec = spec self.spec = spec
self.name = module_set_name self.name = module_set_name
self.explicit = explicit self.explicit = explicit
# Dictionary of configuration options that should be applied to the spec # Dictionary of configuration options that should be applied
# to the spec
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec) self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
@property
def module(self):
return inspect.getmodule(self)
@property @property
def projections(self): def projections(self):
"""Projection from specs to module names""" """Projection from specs to module names"""
@@ -774,6 +775,10 @@ def __init__(
) -> None: ) -> None:
self.spec = spec self.spec = spec
# This class is meant to be derived. Get the module of the
# actual writer.
self.module = inspect.getmodule(self)
assert self.module is not None # make mypy happy
m = self.module m = self.module
# Create the triplet of configuration/layout/context # Create the triplet of configuration/layout/context
@@ -811,10 +816,6 @@ def __init__(
name = type(self).__name__ name = type(self).__name__
raise ModulercHeaderNotDefined(msg.format(name)) raise ModulercHeaderNotDefined(msg.format(name))
@property
def module(self):
return inspect.getmodule(self)
def _get_template(self): def _get_template(self):
"""Gets the template that will be rendered for this spec.""" """Gets the template that will be rendered for this spec."""
# Get templates and put them in the order of importance: # Get templates and put them in the order of importance:

View File

@@ -209,7 +209,7 @@ def provides(self):
# All the other tokens in the hierarchy must be virtual dependencies # All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens: for x in self.hierarchy_tokens:
if self.spec.package.provides(x): if self.spec.package.provides(x):
provides[x] = self.spec provides[x] = self.spec[x]
return provides return provides
@property @property

View File

@@ -383,7 +383,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication.""" """Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector() opener = urllib.request.OpenerDirector()
for handler in [ for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(), urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()), urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(), spack.util.web.SpackHTTPDefaultErrorHandler(),

View File

@@ -2,7 +2,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402 # flake8: noqa: F401
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from """spack.package defines the public API for Spack packages, by re-exporting useful symbols from
other modules. Packages should import this module, instead of importing from spack.* directly other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack.""" to ensure forward compatibility with future versions of Spack."""

View File

@@ -22,6 +22,7 @@
import textwrap import textwrap
import time import time
import traceback import traceback
import typing
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal from typing_extensions import Literal
@@ -125,10 +126,9 @@ def windows_establish_runtime_linkage(self):
# Spack should in general not modify things it has not installed # Spack should in general not modify things it has not installed
# we can reasonably expect externals to have their link interface properly established # we can reasonably expect externals to have their link interface properly established
if sys.platform == "win32" and not self.spec.external: if sys.platform == "win32" and not self.spec.external:
win_rpath = fsys.WindowsSimulatedRPath(self) self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
win_rpath.add_library_dependent(*self.win_add_library_dependent()) self.win_rpath.add_rpath(*self.win_add_rpath())
win_rpath.add_rpath(*self.win_add_rpath()) self.win_rpath.establish_link()
win_rpath.establish_link()
#: Registers which are the detectable packages, by repo and package name #: Registers which are the detectable packages, by repo and package name
@@ -697,6 +697,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs. #: Verbosity level, preserved across installs.
_verbose = None _verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package #: Package homepage where users can find more information about the package
homepage: Optional[str] = None homepage: Optional[str] = None
@@ -710,6 +713,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Do not include @ here in order not to unnecessarily ping the users. #: Do not include @ here in order not to unnecessarily ping the users.
maintainers: List[str] = [] maintainers: List[str] = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
#: Set to ``True`` to indicate the stand-alone test requires a compiler. #: Set to ``True`` to indicate the stand-alone test requires a compiler.
#: It is used to ensure a compiler and build dependencies like 'cmake' #: It is used to ensure a compiler and build dependencies like 'cmake'
#: are available to build a custom test code. #: are available to build a custom test code.
@@ -743,6 +759,7 @@ def __init__(self, spec):
# Set up timing variables # Set up timing variables
self._fetch_time = 0.0 self._fetch_time = 0.0
self.win_rpath = fsys.WindowsSimulatedRPath(self)
super().__init__() super().__init__()
def __getitem__(self, key: str) -> "PackageBase": def __getitem__(self, key: str) -> "PackageBase":
@@ -808,6 +825,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
except StopIteration: except StopIteration:
raise ValueError(f"No variant '{name}' on spec: {self.spec}") raise ValueError(f"No variant '{name}' on spec: {self.spec}")
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Return dict of possible dependencies of this package.
Args:
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
Returns:
(dict): dictionary mapping dependency names to *their*
immediate dependencies
Each item in the returned dictionary maps a (potentially
transitive) dependency of this package to its possible
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
"""
visited = {} if visited is None else visited
missing = {} if missing is None else missing
visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies_by_name(when=True).items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for deplist in conditions.values():
for dep in deplist:
depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
if spack.repo.PATH.is_virtual(name):
if virtuals is not None:
virtuals.add(name)
if expand_virtuals:
providers = spack.repo.PATH.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
dep_names = [name]
# add the dependency names to the visited dict
visited.setdefault(cls.name, set()).update(set(dep_names))
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
)
return visited
@classproperty @classproperty
def package_dir(cls): def package_dir(cls):
"""Directory where the package.py file lives.""" """Directory where the package.py file lives."""
@@ -2172,6 +2287,55 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags build_system_flags = PackageBase.build_system_flags
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc"
os.environ["CXX"] = "CC"
os.environ["FC"] = "ftn"
os.environ["F77"] = "ftn"
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
packages.append(pos)
continue
if not isinstance(pos, spack.spec.Spec):
pos = spack.spec.Spec(pos)
if spack.repo.PATH.is_virtual(pos.name):
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool: def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated. """Return True iff the version is deprecated.

View File

@@ -83,7 +83,6 @@ def __init__(
level: int, level: int,
working_dir: str, working_dir: str,
reverse: bool = False, reverse: bool = False,
ordering_key: Optional[Tuple[str, int]] = None,
) -> None: ) -> None:
"""Initialize a new Patch instance. """Initialize a new Patch instance.
@@ -93,7 +92,6 @@ def __init__(
level: patch level level: patch level
working_dir: relative path *within* the stage to change to working_dir: relative path *within* the stage to change to
reverse: reverse the patch reverse: reverse the patch
ordering_key: key used to ensure patches are applied in a consistent order
""" """
# validate level (must be an integer >= 0) # validate level (must be an integer >= 0)
if not isinstance(level, int) or not level >= 0: if not isinstance(level, int) or not level >= 0:
@@ -107,13 +105,6 @@ def __init__(
self.working_dir = working_dir self.working_dir = working_dir
self.reverse = reverse self.reverse = reverse
# The ordering key is passed when executing package.py directives, and is only relevant
# after a solve to build concrete specs with consistently ordered patches. For concrete
# specs read from a file, we add patches in the order of its patches variants and the
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
# on whether ordering_key is None where it's used, just to make static analysis happy.
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
def apply(self, stage: "spack.stage.Stage") -> None: def apply(self, stage: "spack.stage.Stage") -> None:
"""Apply a patch to source in a stage. """Apply a patch to source in a stage.
@@ -211,8 +202,9 @@ def __init__(
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name) msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
raise ValueError(msg) raise ValueError(msg)
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key) super().__init__(pkg, abs_path, level, working_dir, reverse)
self.path = abs_path self.path = abs_path
self.ordering_key = ordering_key
@property @property
def sha256(self) -> str: def sha256(self) -> str:
@@ -274,11 +266,13 @@ def __init__(
archive_sha256: sha256 sum of the *archive*, if the patch is compressed archive_sha256: sha256 sum of the *archive*, if the patch is compressed
(only required for compressed URL patches) (only required for compressed URL patches)
""" """
super().__init__(pkg, url, level, working_dir, reverse, ordering_key) super().__init__(pkg, url, level, working_dir, reverse)
self.url = url self.url = url
self._stage: Optional["spack.stage.Stage"] = None self._stage: Optional["spack.stage.Stage"] = None
self.ordering_key = ordering_key
if allowed_archive(self.url) and not archive_sha256: if allowed_archive(self.url) and not archive_sha256:
raise spack.error.PatchDirectiveError( raise spack.error.PatchDirectiveError(
"Compressed patches require 'archive_sha256' " "Compressed patches require 'archive_sha256' "

View File

@@ -108,8 +108,6 @@ def _get_user_cache_path():
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.) #: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
default_misc_cache_path = os.path.join(user_cache_path, "cache") default_misc_cache_path = os.path.join(user_cache_path, "cache")
#: concretization cache for Spack concretizations
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
# Below paths pull configuration from the host environment. # Below paths pull configuration from the host environment.
# #

View File

@@ -236,15 +236,22 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter) _set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
def _warn_if_link_cant_be_relocated(link: str, target: str):
if not os.path.isabs(target):
return
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None: def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
"""Relocate links to a new install prefix.""" """Relocate links to a new install prefix."""
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys())) regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
for link in links: for link in links:
old_target = readlink(link) old_target = readlink(link)
if not os.path.isabs(old_target):
continue
match = regex.match(old_target) match = regex.match(old_target)
# No match.
if match is None: if match is None:
_warn_if_link_cant_be_relocated(link, old_target)
continue continue
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :] new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
@@ -283,21 +290,21 @@ def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix)
def is_macho_magic(magic: bytes) -> bool: def is_macho_magic(magic: bytes) -> bool:
return ( return (
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be. # In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
magic.startswith(b"\xcf\xfa\xed\xfe") magic.startswith(b"\xCF\xFA\xED\xFE")
or magic.startswith(b"\xfe\xed\xfa\xcf") or magic.startswith(b"\xFE\xED\xFA\xCF")
or magic.startswith(b"\xce\xfa\xed\xfe") or magic.startswith(b"\xCE\xFA\xED\xFE")
or magic.startswith(b"\xfe\xed\xfa\xce") or magic.startswith(b"\xFE\xED\xFA\xCE")
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists). # universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are # Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
# the number of binaries; in JVM class files it's the java version number. We assume there # the number of binaries; in JVM class files it's the java version number. We assume there
# are less than 10 binaries in a universal binary. # are less than 10 binaries in a universal binary.
or (magic.startswith(b"\xca\xfe\xba\xbe") and int.from_bytes(magic[4:8], "big") < 10) or (magic.startswith(b"\xCA\xFE\xBA\xBE") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xbe\xba\xfe\xca") and int.from_bytes(magic[4:8], "little") < 10) or (magic.startswith(b"\xBE\xBA\xFE\xCA") and int.from_bytes(magic[4:8], "little") < 10)
) )
def is_elf_magic(magic: bytes) -> bool: def is_elf_magic(magic: bytes) -> bool:
return magic.startswith(b"\x7fELF") return magic.startswith(b"\x7FELF")
def is_binary(filename: str) -> bool: def is_binary(filename: str) -> bool:
@@ -406,8 +413,8 @@ def fixup_macos_rpaths(spec):
entries which makes it harder to adjust with ``install_name_tool entries which makes it harder to adjust with ``install_name_tool
-delete_rpath``. -delete_rpath``.
""" """
if spec.external or not spec.concrete: if spec.external or spec.virtual:
tty.warn("external/abstract spec cannot be fixed up: {0!s}".format(spec)) tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
return False return False
if "platform=darwin" not in spec: if "platform=darwin" not in spec:

View File

@@ -32,7 +32,6 @@
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
import spack
import spack.caches import spack.caches
import spack.config import spack.config
import spack.error import spack.error
@@ -50,8 +49,6 @@
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name> #: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
ROOT_PYTHON_NAMESPACE = "spack.pkg" ROOT_PYTHON_NAMESPACE = "spack.pkg"
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
def python_package_for_repo(namespace): def python_package_for_repo(namespace):
"""Returns the full namespace of a repository, given its relative one """Returns the full namespace of a repository, given its relative one
@@ -912,52 +909,19 @@ def __reduce__(self):
return RepoPath.unmarshal, self.marshal() return RepoPath.unmarshal, self.marshal()
def _parse_package_api_version(
config: Dict[str, Any],
min_api: Tuple[int, int] = spack.min_package_api_version,
max_api: Tuple[int, int] = spack.package_api_version,
) -> Tuple[int, int]:
api = config.get("api")
if api is None:
package_api = (1, 0)
else:
if not isinstance(api, str):
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
api_match = _API_REGEX.match(api)
if api_match is None:
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
package_api = (int(api_match.group(1)), int(api_match.group(2)))
if min_api <= package_api <= max_api:
return package_api
min_str = ".".join(str(i) for i in min_api)
max_str = ".".join(str(i) for i in max_api)
curr_str = ".".join(str(i) for i in package_api)
raise BadRepoError(
f"Package API v{curr_str} is not supported by this version of Spack ("
f"must be between v{min_str} and v{max_str})"
)
class Repo: class Repo:
"""Class representing a package repository in the filesystem. """Class representing a package repository in the filesystem.
Each package repository must have a top-level configuration file called `repo.yaml`. Each package repository must have a top-level configuration file
called `repo.yaml`.
It contains the following keys: Currently, `repo.yaml` must define:
`namespace`: `namespace`:
A Python namespace where the repository's packages should live. A Python namespace where the repository's packages should live.
`subdirectory`: `subdirectory`:
An optional subdirectory name where packages are placed An optional subdirectory name where packages are placed
`api`:
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
For the repo to be compatible with the current version of Spack, the version must be
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
:py:data:`spack.package_api_version`.
""" """
def __init__( def __init__(
@@ -994,7 +958,7 @@ def check(condition, msg):
f"{os.path.join(root, repo_config_name)} must define a namespace.", f"{os.path.join(root, repo_config_name)} must define a namespace.",
) )
self.namespace: str = config["namespace"] self.namespace = config["namespace"]
check( check(
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace), re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. " f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
@@ -1007,14 +971,12 @@ def check(condition, msg):
# Keep name components around for checking prefixes. # Keep name components around for checking prefixes.
self._names = self.full_namespace.split(".") self._names = self.full_namespace.split(".")
packages_dir: str = config.get("subdirectory", packages_dir_name) packages_dir = config.get("subdirectory", packages_dir_name)
self.packages_path = os.path.join(self.root, packages_dir) self.packages_path = os.path.join(self.root, packages_dir)
check( check(
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'" os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
) )
self.package_api = _parse_package_api_version(config)
# Class attribute overrides by package name # Class attribute overrides by package name
self.overrides = overrides or {} self.overrides = overrides or {}
@@ -1064,7 +1026,7 @@ def is_prefix(self, fullname: str) -> bool:
parts = fullname.split(".") parts = fullname.split(".")
return self._names[: len(parts)] == parts return self._names[: len(parts)] == parts
def _read_config(self) -> Dict[str, Any]: def _read_config(self) -> Dict[str, str]:
"""Check for a YAML config file in this db's root directory.""" """Check for a YAML config file in this db's root directory."""
try: try:
with open(self.config_file, encoding="utf-8") as reponame_file: with open(self.config_file, encoding="utf-8") as reponame_file:
@@ -1406,8 +1368,6 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
config.write(f" namespace: '{namespace}'\n") config.write(f" namespace: '{namespace}'\n")
if subdir != packages_dir_name: if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n") config.write(f" subdirectory: '{subdir}'\n")
x, y = spack.package_api_version
config.write(f" api: v{x}.{y}\n")
except OSError as e: except OSError as e:
# try to clean up. # try to clean up.

View File

@@ -1,7 +1,6 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections import collections
import hashlib import hashlib
import os import os
@@ -14,7 +13,7 @@
import xml.sax.saxutils import xml.sax.saxutils
from typing import Dict, Optional from typing import Dict, Optional
from urllib.parse import urlencode from urllib.parse import urlencode
from urllib.request import Request from urllib.request import HTTPSHandler, Request, build_opener
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
@@ -25,10 +24,10 @@
import spack.spec import spack.spec
import spack.tengine import spack.tengine
import spack.util.git import spack.util.git
import spack.util.web as web_util
from spack.error import SpackError from spack.error import SpackError
from spack.util.crypto import checksum from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from .base import Reporter from .base import Reporter
from .extract import extract_test_parts from .extract import extract_test_parts
@@ -177,7 +176,7 @@ def build_report_for_package(self, report_dir, package, duration):
# something went wrong pre-cdash "configure" phase b/c we have an exception and only # something went wrong pre-cdash "configure" phase b/c we have an exception and only
# "update" was encounterd. # "update" was encounterd.
# dump the report in the configure line so teams can see what the issue is # dump the report in the configure line so teams can see what the issue is
if len(phases_encountered) == 1 and package.get("exception"): if len(phases_encountered) == 1 and package["exception"]:
# TODO this mapping is not ideal since these are pre-configure errors # TODO this mapping is not ideal since these are pre-configure errors
# we need to determine if a more appropriate cdash phase can be utilized # we need to determine if a more appropriate cdash phase can be utilized
# for now we will add a message to the log explaining this # for now we will add a message to the log explaining this
@@ -434,6 +433,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file. # Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192) md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
with open(filename, "rb") as f: with open(filename, "rb") as f:
params_dict = { params_dict = {
"build": self.buildname, "build": self.buildname,
@@ -443,21 +443,26 @@ def upload(self, filename):
} }
encoded_params = urlencode(params_dict) encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params) url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f, method="PUT") request = Request(url, data=f)
request.add_header("Content-Type", "text/xml") request.add_header("Content-Type", "text/xml")
request.add_header("Content-Length", os.path.getsize(filename)) request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken: if self.authtoken:
request.add_header("Authorization", "Bearer {0}".format(self.authtoken)) request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try: try:
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT) # By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
if self.current_package_name not in self.buildIds: if self.current_package_name not in self.buildIds:
resp_value = codecs.getreader("utf-8")(response).read() resp_value = response.read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value) match = self.buildid_regexp.search(resp_value)
if match: if match:
buildid = match.group(1) buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid self.buildIds[self.current_package_name] = buildid
except Exception as e: except Exception as e:
print(f"Upload to CDash failed: {e}") print("Upload to CDash failed: {0}".format(e))
def finalize_report(self): def finalize_report(self):
if self.buildIds: if self.buildIds:

View File

@@ -7,7 +7,8 @@
import warnings import warnings
import jsonschema import jsonschema
import jsonschema.validators
import llnl.util.lang
from spack.error import SpecSyntaxError from spack.error import SpecSyntaxError
@@ -17,59 +18,59 @@ class DeprecationMessage(typing.NamedTuple):
error: bool error: bool
def _validate_spec(validator, is_spec, instance, schema): # jsonschema is imported lazily as it is heavy to import
"""Check if all additional keys are valid specs.""" # and increases the start-up time
import spack.spec_parser def _make_validator():
def _validate_spec(validator, is_spec, instance, schema):
"""Check if the attributes on instance are valid specs."""
import spack.spec_parser
if not validator.is_type(instance, "object"): if not validator.is_type(instance, "object"):
return return
properties = schema.get("properties") or {} for spec_str in instance:
try:
spack.spec_parser.parse(spec_str)
except SpecSyntaxError:
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
for spec_str in instance: def _deprecated_properties(validator, deprecated, instance, schema):
if spec_str in properties: if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
continue return
try:
spack.spec_parser.parse(spec_str) if not deprecated:
except SpecSyntaxError: return
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
deprecations = {
name: DeprecationMessage(message=x["message"], error=x["error"])
for x in deprecated
for name in x["names"]
}
# Get a list of the deprecated properties, return if there is none
issues = [entry for entry in instance if entry in deprecations]
if not issues:
return
# Process issues
errors = []
for name in issues:
msg = deprecations[name].message.format(name=name)
if deprecations[name].error:
errors.append(msg)
else:
warnings.warn(msg)
if errors:
yield jsonschema.ValidationError("\n".join(errors))
return jsonschema.validators.extend(
jsonschema.Draft7Validator,
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
)
def _deprecated_properties(validator, deprecated, instance, schema): Validator = llnl.util.lang.Singleton(_make_validator)
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
return
if not deprecated:
return
deprecations = {
name: DeprecationMessage(message=x["message"], error=x["error"])
for x in deprecated
for name in x["names"]
}
# Get a list of the deprecated properties, return if there is none
issues = [entry for entry in instance if entry in deprecations]
if not issues:
return
# Process issues
errors = []
for name in issues:
msg = deprecations[name].message.format(name=name)
if deprecations[name].error:
errors.append(msg)
else:
warnings.warn(msg)
if errors:
yield jsonschema.ValidationError("\n".join(errors))
Validator = jsonschema.validators.extend(
jsonschema.Draft7Validator,
{"additionalKeysAreSpecs": _validate_spec, "deprecatedProperties": _deprecated_properties},
)
def _append(string: str) -> bool: def _append(string: str) -> bool:

View File

@@ -84,14 +84,9 @@
"duplicates": { "duplicates": {
"type": "object", "type": "object",
"properties": { "properties": {
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}, "strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
"max_dupes": {
"type": "object",
"additional_properties": {"type": "integer", "minimum": 1},
},
}, },
}, },
"static_analysis": {"type": "boolean"},
"timeout": {"type": "integer", "minimum": 0}, "timeout": {"type": "integer", "minimum": 0},
"error_on_timeout": {"type": "boolean"}, "error_on_timeout": {"type": "boolean"},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}}, "os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},

View File

@@ -58,15 +58,6 @@
{"type": "string"}, # deprecated {"type": "string"}, # deprecated
] ]
}, },
"concretization_cache": {
"type": "object",
"properties": {
"enable": {"type": "boolean"},
"url": {"type": "string"},
"entry_limit": {"type": "integer", "minimum": 0},
"size_limit": {"type": "integer", "minimum": 0},
},
},
"install_hash_length": {"type": "integer", "minimum": 1}, "install_hash_length": {"type": "integer", "minimum": 1},
"install_path_scheme": {"type": "string"}, # deprecated "install_path_scheme": {"type": "string"}, # deprecated
"build_stage": { "build_stage": {

View File

@@ -3,12 +3,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for Cray descriptive manifest: this describes a set of """Schema for Cray descriptive manifest: this describes a set of
installed packages on the system and also specifies dependency installed packages on the system and also specifies dependency
relationships between them (so this provides more information than relationships between them (so this provides more information than
external entries in packages configuration). external entries in packages configuration).
This does not specify a configuration - it is an input format This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records. that is consumed and transformed into Spack DB records.
""" """
from typing import Any, Dict from typing import Any, Dict

View File

@@ -29,7 +29,11 @@
# merged configuration scope schemas # merged configuration scope schemas
spack.schema.merged.properties, spack.schema.merged.properties,
# extra environment schema properties # extra environment schema properties
{"specs": spec_list_schema, "include_concrete": include_concrete}, {
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spec_list_schema,
"include_concrete": include_concrete,
},
), ),
} }
} }

View File

@@ -1,41 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for include.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/include.py
:lines: 12-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"include": {
"type": "array",
"default": [],
"additionalProperties": False,
"items": {
"anyOf": [
{
"type": "object",
"properties": {
"when": {"type": "string"},
"path": {"type": "string"},
"sha256": {"type": "string"},
"optional": {"type": "boolean"},
},
"required": ["path"],
"additionalProperties": False,
},
{"type": "string"},
]
},
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack include configuration file schema",
"properties": properties,
}

View File

@@ -21,7 +21,6 @@
import spack.schema.definitions import spack.schema.definitions
import spack.schema.develop import spack.schema.develop
import spack.schema.env_vars import spack.schema.env_vars
import spack.schema.include
import spack.schema.mirrors import spack.schema.mirrors
import spack.schema.modules import spack.schema.modules
import spack.schema.packages import spack.schema.packages
@@ -41,7 +40,6 @@
spack.schema.definitions.properties, spack.schema.definitions.properties,
spack.schema.develop.properties, spack.schema.develop.properties,
spack.schema.env_vars.properties, spack.schema.env_vars.properties,
spack.schema.include.properties,
spack.schema.mirrors.properties, spack.schema.mirrors.properties,
spack.schema.modules.properties, spack.schema.modules.properties,
spack.schema.packages.properties, spack.schema.packages.properties,
@@ -50,6 +48,7 @@
spack.schema.view.properties, spack.schema.view.properties,
) )
#: Full schema with metadata #: Full schema with metadata
schema = { schema = {
"$schema": "http://json-schema.org/draft-07/schema#", "$schema": "http://json-schema.org/draft-07/schema#",

View File

@@ -39,7 +39,7 @@
"load": array_of_strings, "load": array_of_strings,
"suffixes": { "suffixes": {
"type": "object", "type": "object",
"additionalKeysAreSpecs": True, "validate_spec": True,
"additionalProperties": {"type": "string"}, # key "additionalProperties": {"type": "string"}, # key
}, },
"environment": spack.schema.environment.definition, "environment": spack.schema.environment.definition,
@@ -48,44 +48,40 @@
projections_scheme = spack.schema.projections.properties["projections"] projections_scheme = spack.schema.projections.properties["projections"]
common_props = { module_type_configuration: Dict = {
"verbose": {"type": "boolean", "default": False},
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
"include": array_of_strings,
"exclude": array_of_strings,
"exclude_implicits": {"type": "boolean", "default": False},
"defaults": array_of_strings,
"hide_implicits": {"type": "boolean", "default": False},
"naming_scheme": {"type": "string"},
"projections": projections_scheme,
"all": module_file_configuration,
}
tcl_configuration = {
"type": "object", "type": "object",
"default": {}, "default": {},
"additionalKeysAreSpecs": True, "validate_spec": True,
"properties": {**common_props}, "properties": {
"verbose": {"type": "boolean", "default": False},
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
"include": array_of_strings,
"exclude": array_of_strings,
"exclude_implicits": {"type": "boolean", "default": False},
"defaults": array_of_strings,
"hide_implicits": {"type": "boolean", "default": False},
"naming_scheme": {"type": "string"},
"projections": projections_scheme,
"all": module_file_configuration,
},
"additionalProperties": module_file_configuration, "additionalProperties": module_file_configuration,
} }
lmod_configuration = { tcl_configuration = module_type_configuration.copy()
"type": "object",
"default": {}, lmod_configuration = module_type_configuration.copy()
"additionalKeysAreSpecs": True, lmod_configuration["properties"].update(
"properties": { {
**common_props,
"core_compilers": array_of_strings, "core_compilers": array_of_strings,
"hierarchy": array_of_strings, "hierarchy": array_of_strings,
"core_specs": array_of_strings, "core_specs": array_of_strings,
"filter_hierarchy_specs": { "filter_hierarchy_specs": {
"type": "object", "type": "object",
"additionalKeysAreSpecs": True, "validate_spec": True,
"additionalProperties": array_of_strings, "additionalProperties": array_of_strings,
}, },
}, }
"additionalProperties": module_file_configuration, )
}
module_config_properties = { module_config_properties = {
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]}, "use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},

File diff suppressed because it is too large Load Diff

View File

@@ -265,7 +265,6 @@ error(100, "Cannot select a single version for virtual '{0}'", Virtual)
% If we select a deprecated version, mark the package as deprecated % If we select a deprecated version, mark the package as deprecated
attr("deprecated", node(ID, Package), Version) :- attr("deprecated", node(ID, Package), Version) :-
attr("version", node(ID, Package), Version), attr("version", node(ID, Package), Version),
not external(node(ID, Package)),
pkg_fact(Package, deprecated_version(Version)). pkg_fact(Package, deprecated_version(Version)).
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version) error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)
@@ -524,16 +523,6 @@ error(10, "'{0}' is not a valid dependency for any package in the DAG", Package)
:- attr("node", node(ID, Package)), :- attr("node", node(ID, Package)),
not needed(node(ID, Package)). not needed(node(ID, Package)).
% Extensions depending on each other must all extend the same node (e.g. all Python packages
% depending on each other must depend on the same Python interpreter)
error(100, "{0} and {1} must depend on the same {2}", ExtensionParent, ExtensionChild, ExtendeePackage)
:- depends_on(ExtensionParent, ExtensionChild),
attr("extends", ExtensionParent, ExtendeePackage),
depends_on(ExtensionParent, node(X, ExtendeePackage)),
depends_on(ExtensionChild, node(Y, ExtendeePackage)),
X != Y.
#defined dependency_type/2. #defined dependency_type/2.
%----------------------------------------------------------------------------- %-----------------------------------------------------------------------------

View File

@@ -0,0 +1,179 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set
from llnl.util import lang
import spack.deptypes as dt
import spack.package_base
import spack.repo
import spack.spec
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
runtime_virtuals = set()
for x in runtime_pkgs:
pkg_class = spack.repo.PATH.get_pkg_class(x)
runtime_virtuals.update(pkg_class.provided_virtual_names())
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = (
set(x.name for x in specs if x.virtual) | runtime_virtuals
)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
virtuals, reals = lang.stable_partition(
build_dependencies, spack.repo.PATH.is_virtual_safe
)
self._possible_virtuals.update(virtuals)
for virtual_dep in virtuals:
providers = spack.repo.PATH.providers_for(virtual_dep)
self._direct_build.update(str(x) for x in providers)
self._direct_build.update(reals)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()

View File

@@ -1,539 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes to analyze the input of a solve, and provide information to set up the ASP problem"""
import collections
from typing import Dict, List, NamedTuple, Set, Tuple, Union
import archspec.cpu
from llnl.util import lang, tty
import spack.binary_distribution
import spack.config
import spack.deptypes as dt
import spack.platforms
import spack.repo
import spack.spec
import spack.store
from spack.error import SpackError
RUNTIME_TAG = "runtime"
class PossibleGraph(NamedTuple):
real_pkgs: Set[str]
virtuals: Set[str]
edges: Dict[str, Set[str]]
class PossibleDependencyGraph:
"""Returns information needed to set up an ASP problem"""
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
raise NotImplementedError
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
raise NotImplementedError
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
"""Returns the set of possible dependencies, and the set of possible virtuals.
Both sets always include runtime packages, which may be injected by compilers.
Args:
transitive: return transitive dependencies if True, only direct dependencies if False
allowed_deps: dependency types to consider
strict_depflag: if True, only the specific dep type is considered, if False any
deptype that intersects with allowed deptype is considered
expand_virtuals: expand virtual dependencies into all possible implementations
"""
raise NotImplementedError
class NoStaticAnalysis(PossibleDependencyGraph):
"""Implementation that tries to minimize the setup time (i.e. defaults to give fast
answers), rather than trying to reduce the ASP problem size with more complex analysis.
"""
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
self.configuration = configuration
self.repo = repo
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
self.runtime_virtuals = set()
self._platform_condition = spack.spec.Spec(
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
)
for x in self.runtime_pkgs:
pkg_class = self.repo.get_pkg_class(x)
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
try:
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
except spack.repo.UnknownPackageError:
self.libc_pkgs = []
def is_virtual(self, name: str) -> bool:
return self.repo.is_virtual(name)
@lang.memoized
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
"""Returns true if a package is allowed on the current host"""
pkg_cls = self.repo.get_pkg_class(pkg_name)
for when_spec, conditions in pkg_cls.requirements.items():
if not when_spec.intersects(self._platform_condition):
continue
for requirements, _, _ in conditions:
if not any(x.intersects(self._platform_condition) for x in requirements):
tty.debug(f"[{__name__}] {pkg_name} is not for this platform")
return False
return True
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
"""Returns a list of possible providers for the virtual string in input."""
return self.repo.providers_for(virtual_str)
def can_be_installed(self, *, pkg_name) -> bool:
"""Returns True if a package can be installed, False otherwise."""
return True
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
return False
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
platform = spack.platforms.host()
default_target = archspec.cpu.TARGETS[platform.default]
# Construct the list of targets which are compatible with the host
candidate_targets = [default_target] + default_target.ancestors
granularity = self.configuration.get("concretizer:targets:granularity")
host_compatible = self.configuration.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == default_target.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
return candidate_targets
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
stack = [x for x in self._package_list(specs)]
virtuals: Set[str] = set()
edges: Dict[str, Set[str]] = {}
while stack:
pkg_name = stack.pop()
if pkg_name in edges:
continue
edges[pkg_name] = set()
# Since libc is not buildable, there is no need to extend the
# search space with libc dependencies.
if pkg_name in self.libc_pkgs:
continue
pkg_cls = self.repo.get_pkg_class(pkg_name=pkg_name)
for name, conditions in pkg_cls.dependencies_by_name(when=True).items():
if all(self.unreachable(pkg_name=pkg_name, when_spec=x) for x in conditions):
tty.debug(
f"[{__name__}] Not adding {name} as a dep of {pkg_name}, because "
f"conditions cannot be met"
)
continue
if not self._has_deptypes(
conditions, allowed_deps=allowed_deps, strict=strict_depflag
):
continue
if name in virtuals:
continue
dep_names = set()
if self.is_virtual(name):
virtuals.add(name)
if expand_virtuals:
providers = self.providers_for(name)
dep_names = {spec.name for spec in providers}
else:
dep_names = {name}
edges[pkg_name].update(dep_names)
if not transitive:
continue
for dep_name in dep_names:
if dep_name in edges:
continue
if not self._is_possible(pkg_name=dep_name):
continue
stack.append(dep_name)
real_packages = set(edges)
if not transitive:
# We exit early, so add children from the edges information
for root, children in edges.items():
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
virtuals.update(self.runtime_virtuals)
real_packages = real_packages | self.runtime_pkgs
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
stack = []
for current_spec in specs:
if isinstance(current_spec, str):
current_spec = spack.spec.Spec(current_spec)
if self.repo.is_virtual(current_spec.name):
stack.extend([p.name for p in self.providers_for(current_spec.name)])
continue
stack.append(current_spec.name)
return sorted(set(stack))
def _has_deptypes(self, dependencies, *, allowed_deps: dt.DepFlag, strict: bool) -> bool:
if strict is True:
return any(
dep.depflag == allowed_deps for deplist in dependencies.values() for dep in deplist
)
return any(
dep.depflag & allowed_deps for deplist in dependencies.values() for dep in deplist
)
def _is_possible(self, *, pkg_name):
try:
return self.is_allowed_on_this_platform(pkg_name=pkg_name) and self.can_be_installed(
pkg_name=pkg_name
)
except spack.repo.UnknownPackageError:
return False
class StaticAnalysis(NoStaticAnalysis):
"""Performs some static analysis of the configuration, store, etc. to provide more precise
answers on whether some packages can be installed, or used as a provider.
It increases the setup time, but might decrease the grounding and solve time considerably,
especially when requirements restrict the possible choices for providers.
"""
def __init__(
self,
*,
configuration: spack.config.Configuration,
repo: spack.repo.RepoPath,
store: spack.store.Store,
binary_index: spack.binary_distribution.BinaryCacheIndex,
):
super().__init__(configuration=configuration, repo=repo)
self.store = store
self.binary_index = binary_index
@lang.memoized
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
candidates = super().providers_for(virtual_str)
result = []
for spec in candidates:
if not self._is_provider_candidate(pkg_name=spec.name, virtual=virtual_str):
continue
result.append(spec)
return result
@lang.memoized
def buildcache_specs(self) -> List[spack.spec.Spec]:
self.binary_index.update()
return self.binary_index.get_all_built_specs()
@lang.memoized
def can_be_installed(self, *, pkg_name) -> bool:
if self.configuration.get(f"packages:{pkg_name}:buildable", True):
return True
if self.configuration.get(f"packages:{pkg_name}:externals", []):
return True
reuse = self.configuration.get("concretizer:reuse")
if reuse is not False and self.store.db.query(pkg_name):
return True
if reuse is not False and any(x.name == pkg_name for x in self.buildcache_specs()):
return True
tty.debug(f"[{__name__}] {pkg_name} cannot be installed")
return False
@lang.memoized
def _is_provider_candidate(self, *, pkg_name: str, virtual: str) -> bool:
if not self.is_allowed_on_this_platform(pkg_name=pkg_name):
return False
if not self.can_be_installed(pkg_name=pkg_name):
return False
virtual_spec = spack.spec.Spec(virtual)
if self.unreachable(pkg_name=virtual_spec.name, when_spec=pkg_name):
tty.debug(f"[{__name__}] {pkg_name} cannot be a provider for {virtual}")
return False
return True
@lang.memoized
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
candidates = self.configuration.get(f"packages:{pkg_name}:require", [])
if not candidates and pkg_name != "all":
return self.unreachable(pkg_name="all", when_spec=when_spec)
if not candidates:
return False
if isinstance(candidates, str):
candidates = [candidates]
union_requirement = spack.spec.Spec()
for c in candidates:
if not isinstance(c, str):
continue
try:
union_requirement.constrain(c)
except SpackError:
# Less optimized, but shouldn't fail
pass
if not union_requirement.intersects(when_spec):
return True
return False
def create_graph_analyzer() -> PossibleDependencyGraph:
static_analysis = spack.config.CONFIG.get("concretizer:static_analysis", False)
if static_analysis:
return StaticAnalysis(
configuration=spack.config.CONFIG,
repo=spack.repo.PATH,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
)
return NoStaticAnalysis(configuration=spack.config.CONFIG, repo=spack.repo.PATH)
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
self.possible_graph = possible_graph
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: Set[str] = set()
self._possible_virtuals: Set[str] = {
x.name for x in specs if spack.repo.PATH.is_virtual(x.name)
}
def possible_dependencies(self) -> Set[str]:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self) -> None:
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self) -> None:
self._possible_dependencies, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
super().__init__(specs, tests, possible_graph)
self._link_run: Set[str] = set()
self._direct_build: Set[str] = set()
self._total_build: Set[str] = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self) -> None:
self._link_run, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.link_run_types
)
self._possible_virtuals.update(virtuals)
self._link_run_virtuals.update(virtuals)
for x in self._link_run:
reals, virtuals, _ = self.possible_graph.possible_dependencies(
x, allowed_deps=dt.BUILD, transitive=False, strict_depflag=True
)
self._possible_virtuals.update(virtuals)
self._direct_build.update(reals)
self._total_build, virtuals, _ = self.possible_graph.possible_dependencies(
*self._direct_build, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
default = spack.config.CONFIG.get("concretizer:duplicates:max_dupes:default", 2)
for package_name in sorted(self.possible_dependencies() & build_tools):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
if max_dupes > 1:
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (link-run virtuals)")
for package_name in sorted(self._link_run_virtuals):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (other virtuals)")
for package_name in sorted(self.possible_virtuals() - self._link_run_virtuals):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
def create_counter(
specs: List[spack.spec.Spec], tests: bool, possible_graph: PossibleDependencyGraph
) -> Counter:
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
return NoDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)

View File

@@ -52,7 +52,6 @@
import enum import enum
import io import io
import itertools import itertools
import json
import os import os
import pathlib import pathlib
import platform import platform
@@ -99,6 +98,7 @@
import spack.traverse import spack.traverse
import spack.util.executable import spack.util.executable
import spack.util.hash import spack.util.hash
import spack.util.module_cmd as md
import spack.util.prefix import spack.util.prefix
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
@@ -798,7 +798,7 @@ def update_deptypes(self, depflag: dt.DepFlag) -> bool:
self.depflag = new self.depflag = new
return True return True
def update_virtuals(self, virtuals: Iterable[str]) -> bool: def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
"""Update the list of provided virtuals""" """Update the list of provided virtuals"""
old = self.virtuals old = self.virtuals
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals))) self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
@@ -1337,20 +1337,14 @@ class SpecBuildInterface(lang.ObjectWrapper):
"command", default_handler=_command_default_handler, _indirect=True "command", default_handler=_command_default_handler, _indirect=True
) )
def __init__( def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
self,
spec: "Spec",
name: str,
query_parameters: List[str],
_parent: "Spec",
is_virtual: bool,
):
super().__init__(spec) super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper # Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object # resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec) original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters, _parent, is_virtual self.token = original_spec, name, query_parameters, _parent
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState( self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual name=name, extra_parameters=query_parameters, isvirtual=is_virtual
) )
@@ -1533,8 +1527,9 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
self._external_path = external_path self._external_path = external_path
self.external_modules = Spec._format_module_list(external_modules) self.external_modules = Spec._format_module_list(external_modules)
# This attribute is used to store custom information for external specs. # This attribute is used to store custom information for
self.extra_attributes: dict = {} # external specs. None signal that it was not set yet.
self.extra_attributes = None
# This attribute holds the original build copy of the spec if it is # This attribute holds the original build copy of the spec if it is
# deployed differently than it was built. None signals that the spec # deployed differently than it was built. None signals that the spec
@@ -1911,22 +1906,10 @@ def package_class(self):
"""Internal package call gets only the class object for a package. """Internal package call gets only the class object for a package.
Use this to just get package metadata. Use this to just get package metadata.
""" """
warnings.warn(
"`Spec.package_class` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.get_pkg_class(spec.fullname) instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.get_pkg_class(self.fullname) return spack.repo.PATH.get_pkg_class(self.fullname)
@property @property
def virtual(self): def virtual(self):
warnings.warn(
"`Spec.virtual` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.is_virtual(spec.name)` instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.is_virtual(self.name) return spack.repo.PATH.is_virtual(self.name)
@property @property
@@ -2118,20 +2101,20 @@ def cshort_spec(self):
return self.cformat(spec_format) return self.cformat(spec_format)
@property @property
def prefix(self) -> spack.util.prefix.Prefix: def prefix(self):
if not self._concrete: if not self._concrete:
raise spack.error.SpecError(f"Spec is not concrete: {self}") raise spack.error.SpecError("Spec is not concrete: " + str(self))
if self._prefix is None: if self._prefix is None:
_, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash()) upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
if record and record.path: if record and record.path:
self.set_prefix(record.path) self.prefix = record.path
else: else:
self.set_prefix(spack.store.STORE.layout.path_for_spec(self)) self.prefix = spack.store.STORE.layout.path_for_spec(self)
assert self._prefix is not None
return self._prefix return self._prefix
def set_prefix(self, value: str) -> None: @prefix.setter
def prefix(self, value):
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value)) self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
def spec_hash(self, hash): def spec_hash(self, hash):
@@ -2145,9 +2128,7 @@ def spec_hash(self, hash):
if hash.override is not None: if hash.override is not None:
return hash.override(self) return hash.override(self)
node_dict = self.to_node_dict(hash=hash) node_dict = self.to_node_dict(hash=hash)
json_text = json.dumps( json_text = sjson.dump(node_dict)
node_dict, ensure_ascii=True, indent=None, separators=(",", ":"), sort_keys=False
)
# This implements "frankenhashes", preserving the last 7 characters of the # This implements "frankenhashes", preserving the last 7 characters of the
# original hash when splicing so that we can avoid relocation issues # original hash when splicing so that we can avoid relocation issues
out = spack.util.hash.b32_hash(json_text) out = spack.util.hash.b32_hash(json_text)
@@ -2370,10 +2351,15 @@ def to_node_dict(self, hash=ht.dag_hash):
) )
if self.external: if self.external:
if self.extra_attributes:
extra_attributes = syaml.sorted_dict(self.extra_attributes)
else:
extra_attributes = None
d["external"] = { d["external"] = {
"path": self.external_path, "path": self.external_path,
"module": self.external_modules or None, "module": self.external_modules,
"extra_attributes": syaml.sorted_dict(self.extra_attributes), "extra_attributes": extra_attributes,
} }
if not self._concrete: if not self._concrete:
@@ -2708,7 +2694,7 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
return name, depflag return name, depflag
def spec_and_dependency_types( def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]], s: Union[Spec, Tuple[Spec, str]]
) -> Tuple[Spec, dt.DepFlag]: ) -> Tuple[Spec, dt.DepFlag]:
"""Given a non-string key in the literal, extracts the spec """Given a non-string key in the literal, extracts the spec
and its dependency types. and its dependency types.
@@ -2737,7 +2723,7 @@ def spec_and_dependency_types(
return spec_builder(spec_dict) return spec_builder(spec_dict)
@staticmethod @staticmethod
def from_dict(data) -> "Spec": def from_dict(data):
"""Construct a spec from JSON/YAML. """Construct a spec from JSON/YAML.
Args: Args:
@@ -2760,7 +2746,7 @@ def from_dict(data) -> "Spec":
return spec return spec
@staticmethod @staticmethod
def from_yaml(stream) -> "Spec": def from_yaml(stream):
"""Construct a spec from YAML. """Construct a spec from YAML.
Args: Args:
@@ -2770,7 +2756,7 @@ def from_yaml(stream) -> "Spec":
return Spec.from_dict(data) return Spec.from_dict(data)
@staticmethod @staticmethod
def from_json(stream) -> "Spec": def from_json(stream):
"""Construct a spec from JSON. """Construct a spec from JSON.
Args: Args:
@@ -2780,7 +2766,7 @@ def from_json(stream) -> "Spec":
data = sjson.load(stream) data = sjson.load(stream)
return Spec.from_dict(data) return Spec.from_dict(data)
except Exception as e: except Exception as e:
raise sjson.SpackJSONError("error parsing JSON spec:", e) from e raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
@staticmethod @staticmethod
def extract_json_from_clearsig(data): def extract_json_from_clearsig(data):
@@ -2828,6 +2814,24 @@ def from_detection(
s.extra_attributes = extra_attributes s.extra_attributes = extra_attributes
return s return s
def validate_detection(self):
"""Validate the detection of an external spec.
This method is used as part of Spack's detection protocol, and is
not meant for client code use.
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
self
)
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _patches_assigned(self): def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer.""" """Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization # FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2844,6 +2848,94 @@ def _patches_assigned(self):
return True return True
@staticmethod
def inject_patches_variant(root):
# This dictionary will store object IDs rather than Specs as keys
# since the Spec __hash__ will change as patches are added to them
spec_to_patches = {}
for s in root.traverse():
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
# normalize and concretize evaluate Packages using Repo.get(),
# which respects precedence. So, a namespace assignment isn't
# changing how a package name would have been interpreted and
# we can do it as late as possible to allow as much
# compatibility across repositories as possible.
if s.namespace is None:
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
if s.concrete:
continue
# Add any patches from the package to the spec.
patches = set()
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
for patch in patch_list:
patches.add(patch)
if patches:
spec_to_patches[id(s)] = patches
# Also record all patches required on dependencies by
# depends_on(..., patch=...)
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
if dspec.spec.concrete:
continue
pkg_deps = dspec.parent.package_class.dependencies
patches = []
for cond, deps_by_name in pkg_deps.items():
if not dspec.parent.satisfies(cond):
continue
dependency = deps_by_name.get(dspec.spec.name)
if not dependency:
continue
for pcond, patch_list in dependency.patches.items():
if dspec.spec.satisfies(pcond):
patches.extend(patch_list)
if patches:
all_patches = spec_to_patches.setdefault(id(dspec.spec), set())
for patch in patches:
all_patches.add(patch)
for spec in root.traverse():
if id(spec) not in spec_to_patches:
continue
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches mvar to store patches order
full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
ordered_hashes = sorted(full_order_keys)
tty.debug(
"Ordered hashes [{0}]: ".format(spec.name)
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
)
mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
@staticmethod
def ensure_external_path_if_external(external_spec):
if external_spec.external_modules and not external_spec.external_path:
compiler = spack.compilers.compiler_for_spec(
external_spec.compiler, external_spec.architecture
)
for mod in compiler.modules:
md.load_module(mod)
# Get the path from the module the package can override the default
# (this is mostly needed for Cray)
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
package = pkg_cls(external_spec)
external_spec.external_path = getattr(
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
)
@staticmethod @staticmethod
def ensure_no_deprecated(root): def ensure_no_deprecated(root):
"""Raise if a deprecated spec is in the dag. """Raise if a deprecated spec is in the dag.
@@ -2998,7 +3090,7 @@ def validate_or_raise(self):
# FIXME: raise just the first one encountered # FIXME: raise just the first one encountered
for spec in self.traverse(): for spec in self.traverse():
# raise an UnknownPackageError if the spec's package isn't real. # raise an UnknownPackageError if the spec's package isn't real.
if spec.name and not spack.repo.PATH.is_virtual(spec.name): if (not spec.virtual) and spec.name:
spack.repo.PATH.get_pkg_class(spec.fullname) spack.repo.PATH.get_pkg_class(spec.fullname)
# validate compiler in addition to the package name. # validate compiler in addition to the package name.
@@ -3007,7 +3099,7 @@ def validate_or_raise(self):
raise UnsupportedCompilerError(spec.compiler.name) raise UnsupportedCompilerError(spec.compiler.name)
# Ensure correctness of variants (if the spec is not virtual) # Ensure correctness of variants (if the spec is not virtual)
if not spack.repo.PATH.is_virtual(spec.name): if not spec.virtual:
Spec.ensure_valid_variants(spec) Spec.ensure_valid_variants(spec)
substitute_abstract_variants(spec) substitute_abstract_variants(spec)
@@ -3025,7 +3117,7 @@ def ensure_valid_variants(spec):
if spec.concrete: if spec.concrete:
return return
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname) pkg_cls = spec.package_class
pkg_variants = pkg_cls.variant_names() pkg_variants = pkg_cls.variant_names()
# reserved names are variants that may be set on any package # reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class # but are not necessarily recorded by the package's class
@@ -3242,9 +3334,7 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals # If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name: if self.name != other.name and self.name and other.name:
self_virtual = spack.repo.PATH.is_virtual(self.name) if self.virtual and other.virtual:
other_virtual = spack.repo.PATH.is_virtual(other.name)
if self_virtual and other_virtual:
# Two virtual specs intersect only if there are providers for both # Two virtual specs intersect only if there are providers for both
lhs = spack.repo.PATH.providers_for(str(self)) lhs = spack.repo.PATH.providers_for(str(self))
rhs = spack.repo.PATH.providers_for(str(other)) rhs = spack.repo.PATH.providers_for(str(other))
@@ -3252,8 +3342,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return bool(intersection) return bool(intersection)
# A provider can satisfy a virtual dependency. # A provider can satisfy a virtual dependency.
elif self_virtual or other_virtual: elif self.virtual or other.virtual:
virtual_spec, non_virtual_spec = (self, other) if self_virtual else (other, self) virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
try: try:
# Here we might get an abstract spec # Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname) pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
@@ -3323,20 +3413,12 @@ def _intersects_dependencies(self, other):
# These two loops handle cases where there is an overly restrictive # These two loops handle cases where there is an overly restrictive
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not # vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2) # compatible with mpich2)
for spec in self.traverse(): for spec in self.virtual_dependencies():
if ( if spec.name in other_index and not other_index.providers_for(spec):
spack.repo.PATH.is_virtual(spec.name)
and spec.name in other_index
and not other_index.providers_for(spec)
):
return False return False
for spec in other.traverse(): for spec in other.virtual_dependencies():
if ( if spec.name in self_index and not self_index.providers_for(spec):
spack.repo.PATH.is_virtual(spec.name)
and spec.name in self_index
and not self_index.providers_for(spec)
):
return False return False
return True return True
@@ -3366,9 +3448,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals # If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name: if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency. # A concrete provider can satisfy a virtual dependency.
if not spack.repo.PATH.is_virtual(self.name) and spack.repo.PATH.is_virtual( if not self.virtual and other.virtual:
other.name
):
try: try:
# Here we might get an abstract spec # Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname) pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
@@ -3436,7 +3516,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set) lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
for rhs_edge in other.traverse_edges(root=False, cover="edges"): for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge # If we are checking for ^mpi we need to verify if there is any edge
if spack.repo.PATH.is_virtual(rhs_edge.spec.name): if rhs_edge.spec.virtual:
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,)) rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
if not rhs_edge.virtuals: if not rhs_edge.virtuals:
@@ -3480,6 +3560,10 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
for rhs in other.traverse(root=False) for rhs in other.traverse(root=False)
) )
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spec.virtual]
@property # type: ignore[misc] # decorated prop not supported in mypy @property # type: ignore[misc] # decorated prop not supported in mypy
def patches(self): def patches(self):
"""Return patch objects for any patch sha256 sums on this Spec. """Return patch objects for any patch sha256 sums on this Spec.
@@ -3669,23 +3753,30 @@ def __getitem__(self, name: str):
csv = query_parameters.pop().strip() csv = query_parameters.pop().strip()
query_parameters = re.split(r"\s*,\s*", csv) query_parameters = re.split(r"\s*,\s*", csv)
# Consider all direct dependencies and transitive runtime dependencies order = lambda: itertools.chain(
order = itertools.chain( self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
self.edges_to_dependencies(depflag=dt.ALL), self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"), self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
) )
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
# and prefer matches closest to the root.
try: try:
edge = next((e for e in order if e.spec.name == name or name in e.virtuals)) child: Spec = next(
except StopIteration as e: e.spec
raise KeyError(f"No spec with name {name} in {self}") from e for e in itertools.chain(
(e for e in order() if e.spec.name == name or name in e.virtuals),
# for historical reasons
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
)
)
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete: if self._concrete:
return SpecBuildInterface( return SpecBuildInterface(child, name, query_parameters, _parent=self)
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
)
return edge.spec return child
def __contains__(self, spec): def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec. """True if this spec or some dependency satisfies the spec.
@@ -3701,11 +3792,8 @@ def __contains__(self, spec):
# if anonymous or same name, we only have to look at the root # if anonymous or same name, we only have to look at the root
if not spec.name or spec.name == self.name: if not spec.name or spec.name == self.name:
return self.satisfies(spec) return self.satisfies(spec)
try: else:
dep = self[spec.name] return any(s.satisfies(spec) for s in self.traverse(root=False))
except KeyError:
return False
return dep.satisfies(spec)
def eq_dag(self, other, deptypes=True, vs=None, vo=None): def eq_dag(self, other, deptypes=True, vs=None, vo=None):
"""True if the full dependency DAGs of specs are equal.""" """True if the full dependency DAGs of specs are equal."""
@@ -3773,13 +3861,6 @@ def _cmp_iter(self):
for item in self._cmp_node(): for item in self._cmp_node():
yield item yield item
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
# two specs can be identical modulo DAG hash, depending on what time they were concretized
# From the perspective of many operation in Spack (database, build cache, etc) a different
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
# serialized before the hash change and one after, are considered different.
yield self.dag_hash() if self.concrete else None
# This needs to be in _cmp_iter so that no specs with different process hashes # This needs to be in _cmp_iter so that no specs with different process hashes
# are considered the same by `__hash__` or `__eq__`. # are considered the same by `__hash__` or `__eq__`.
# #
@@ -4615,6 +4696,17 @@ def constrain(self, other: "VariantMap") -> bool:
return changed return changed
@property
def concrete(self):
"""Returns True if the spec is concrete in terms of variants.
Returns:
bool: True or False
"""
return self.spec._concrete or all(
v in self for v in self.spec.package_class.variant_names()
)
def copy(self) -> "VariantMap": def copy(self) -> "VariantMap":
clone = VariantMap(self.spec) clone = VariantMap(self.spec)
for name, variant in self.items(): for name, variant in self.items():
@@ -4672,14 +4764,14 @@ def substitute_abstract_variants(spec: Spec):
elif name in vt.reserved_names: elif name in vt.reserved_names:
continue continue
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name) variant_defs = spec.package_class.variant_definitions(name)
valid_defs = [] valid_defs = []
for when, vdef in variant_defs: for when, vdef in variant_defs:
if when.intersects(spec): if when.intersects(spec):
valid_defs.append(vdef) valid_defs.append(vdef)
if not valid_defs: if not valid_defs:
if name not in spack.repo.PATH.get_pkg_class(spec.fullname).variant_names(): if name not in spec.package_class.variant_names():
unknown.append(name) unknown.append(name)
else: else:
whens = [str(when) for when, _ in variant_defs] whens = [str(when) for when, _ in variant_defs]
@@ -4741,51 +4833,31 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
return merged_spec return merged_spec
def reconstruct_virtuals_on_edges(spec: Spec) -> None: def reconstruct_virtuals_on_edges(spec):
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.""" """Reconstruct virtuals on edges. Used to read from old DB and reindex.
virtuals_needed: Dict[str, Set[str]] = {}
virtuals_provided: Dict[str, Set[str]] = {}
for edge in spec.traverse_edges(cover="edges", root=False):
parent_key = edge.parent.dag_hash()
if parent_key not in virtuals_needed:
# Construct which virtuals are needed by parent
virtuals_needed[parent_key] = set()
try:
parent_pkg = edge.parent.package
except Exception as e:
warnings.warn(
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
)
continue
virtuals_needed[parent_key].update( Args:
name spec: spec on which we want to reconstruct virtuals
for name, when_deps in parent_pkg.dependencies_by_name(when=True).items() """
if spack.repo.PATH.is_virtual(name) # Collect all possible virtuals
and any(edge.parent.satisfies(x) for x in when_deps) possible_virtuals = set()
) for node in spec.traverse():
try:
if not virtuals_needed[parent_key]: possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue continue
child_key = edge.spec.dag_hash() # Assume all incoming edges to provider are marked with virtuals=
if child_key not in virtuals_provided: for vspec in possible_virtuals:
virtuals_provided[child_key] = set() try:
try: provider = spec[vspec]
child_pkg = edge.spec.package except KeyError:
except Exception as e: # Virtual not in the DAG
warnings.warn(
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
)
continue
virtuals_provided[child_key].update(x.name for x in child_pkg.virtuals_provided)
if not virtuals_provided[child_key]:
continue continue
virtuals_to_add = virtuals_needed[parent_key] & virtuals_provided[child_key] for edge in provider.edges_from_dependents():
if virtuals_to_add: edge.update_virtuals([vspec])
edge.update_virtuals(virtuals_to_add)
class SpecfileReaderBase: class SpecfileReaderBase:
@@ -4836,7 +4908,7 @@ def from_node_dict(cls, node):
spec.external_modules = node["external"]["module"] spec.external_modules = node["external"]["module"]
if spec.external_modules is False: if spec.external_modules is False:
spec.external_modules = None spec.external_modules = None
spec.extra_attributes = node["external"].get("extra_attributes") or {} spec.extra_attributes = node["external"].get("extra_attributes", {})
# specs read in are concrete unless marked abstract # specs read in are concrete unless marked abstract
if node.get("concrete", True): if node.get("concrete", True):
@@ -5130,13 +5202,6 @@ def get_host_environment() -> Dict[str, Any]:
} }
def eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
return eval(string, valid_variables)
class SpecParseError(spack.error.SpecError): class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs.""" """Wrapper for ParseError for when we're parsing specs."""

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test ABI-based splicing of dependencies""" """ Test ABI-based splicing of dependencies """
from typing import List from typing import List

View File

@@ -200,11 +200,7 @@ def dummy_prefix(tmpdir):
@pytest.mark.requires_executables(*required_executables) @pytest.mark.requires_executables(*required_executables)
@pytest.mark.maybeslow @pytest.mark.maybeslow
@pytest.mark.usefixtures( @pytest.mark.usefixtures(
"default_config", "default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
"cache_directory",
"install_dir_default_layout",
"temporary_mirror",
"mutable_mock_env_path",
) )
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir): def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
""" """
@@ -276,11 +272,7 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
@pytest.mark.maybeslow @pytest.mark.maybeslow
@pytest.mark.nomockstage @pytest.mark.nomockstage
@pytest.mark.usefixtures( @pytest.mark.usefixtures(
"default_config", "default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
"cache_directory",
"install_dir_default_layout",
"temporary_mirror",
"mutable_mock_env_path",
) )
def test_relative_rpaths_install_default_layout(temporary_mirror_dir): def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
""" """
@@ -577,6 +569,7 @@ def test_FetchCacheError_only_accepts_lists_of_errors():
def test_FetchCacheError_pretty_printing_multiple(): def test_FetchCacheError_pretty_printing_multiple():
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")]) e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
str_e = str(e) str_e = str(e)
print("'" + str_e + "'")
assert "Multiple errors" in str_e assert "Multiple errors" in str_e
assert "Error 1: RuntimeError: Oops!" in str_e assert "Error 1: RuntimeError: Oops!" in str_e
assert "Error 2: TypeError: Trouble!" in str_e assert "Error 2: TypeError: Trouble!" in str_e

View File

@@ -388,7 +388,7 @@ def test_wrapper_variables(
root = spack.concretize.concretize_one("dt-diamond") root = spack.concretize.concretize_one("dt-diamond")
for s in root.traverse(): for s in root.traverse():
s.set_prefix(f"/{s.name}-prefix/") s.prefix = "/{0}-prefix/".format(s.name)
dep_pkg = root["dt-diamond-left"].package dep_pkg = root["dt-diamond-left"].package
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"] dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
@@ -396,7 +396,7 @@ def test_wrapper_variables(
dep_libs = LibraryList(dep_lib_paths) dep_libs = LibraryList(dep_lib_paths)
dep2_pkg = root["dt-diamond-right"].package dep2_pkg = root["dt-diamond-right"].package
dep2_pkg.spec.set_prefix(str(installation_dir_with_headers)) dep2_pkg.spec.prefix = str(installation_dir_with_headers)
setattr(dep_pkg, "libs", dep_libs) setattr(dep_pkg, "libs", dep_libs)
try: try:
@@ -542,7 +542,7 @@ def test_build_jobs_sequential_is_sequential():
spack.config.determine_number_of_jobs( spack.config.determine_number_of_jobs(
parallel=False, parallel=False,
max_cpus=8, max_cpus=8,
config=spack.config.create_from( config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}), spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}), spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
), ),
@@ -556,7 +556,7 @@ def test_build_jobs_command_line_overrides():
spack.config.determine_number_of_jobs( spack.config.determine_number_of_jobs(
parallel=True, parallel=True,
max_cpus=1, max_cpus=1,
config=spack.config.create_from( config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}), spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}), spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
), ),
@@ -567,7 +567,7 @@ def test_build_jobs_command_line_overrides():
spack.config.determine_number_of_jobs( spack.config.determine_number_of_jobs(
parallel=True, parallel=True,
max_cpus=100, max_cpus=100,
config=spack.config.create_from( config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}), spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}), spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
), ),
@@ -581,7 +581,7 @@ def test_build_jobs_defaults():
spack.config.determine_number_of_jobs( spack.config.determine_number_of_jobs(
parallel=True, parallel=True,
max_cpus=10, max_cpus=10,
config=spack.config.create_from( config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}) spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
), ),
) )
@@ -591,7 +591,7 @@ def test_build_jobs_defaults():
spack.config.determine_number_of_jobs( spack.config.determine_number_of_jobs(
parallel=True, parallel=True,
max_cpus=10, max_cpus=10,
config=spack.config.create_from( config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}) spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
), ),
) )

View File

@@ -403,8 +403,8 @@ def test_autoreconf_search_path_args_multiple(default_mock_concretization, tmpdi
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal")) aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal")) aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build") build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.set_prefix(str(tmpdir.join("fst"))) build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.set_prefix(str(tmpdir.join("snd"))) build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [ assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
"-I", "-I",
aclocal_fst, aclocal_fst,
@@ -422,8 +422,8 @@ def test_autoreconf_search_path_args_skip_automake(default_mock_concretization,
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal")) aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build") build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.name = "automake" build_dep_one.name = "automake"
build_dep_one.set_prefix(str(tmpdir.join("fst"))) build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.set_prefix(str(tmpdir.join("snd"))) build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd] assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
@@ -434,7 +434,7 @@ def test_autoreconf_search_path_args_external_order(default_mock_concretization,
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal")) aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build") build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.external_path = str(tmpdir.join("fst")) build_dep_one.external_path = str(tmpdir.join("fst"))
build_dep_two.set_prefix(str(tmpdir.join("snd"))) build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [ assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
"-I", "-I",
aclocal_snd, aclocal_snd,
@@ -447,8 +447,8 @@ def test_autoreconf_search_path_skip_nonexisting(default_mock_concretization, tm
"""Skip -I flags for non-existing directories""" """Skip -I flags for non-existing directories"""
spec = default_mock_concretization("dttop") spec = default_mock_concretization("dttop")
build_dep_one, build_dep_two = spec.dependencies(deptype="build") build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.set_prefix(str(tmpdir.join("fst"))) build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.set_prefix(str(tmpdir.join("snd"))) build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [] assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []

View File

@@ -210,6 +210,7 @@ def check_args_contents(cc, args, must_contain, must_not_contain):
""" """
with set_env(SPACK_TEST_COMMAND="dump-args"): with set_env(SPACK_TEST_COMMAND="dump-args"):
cc_modified_args = cc(*args, output=str).strip().split("\n") cc_modified_args = cc(*args, output=str).strip().split("\n")
print(cc_modified_args)
for a in must_contain: for a in must_contain:
assert a in cc_modified_args assert a in cc_modified_args
for a in must_not_contain: for a in must_not_contain:

View File

@@ -1,10 +1,8 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details. # Copyright Spack Project Developers. See COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os import os
import subprocess import subprocess
from urllib.error import HTTPError
import pytest import pytest
@@ -17,7 +15,6 @@
import spack.paths as spack_paths import spack.paths as spack_paths
import spack.repo as repo import spack.repo as repo
import spack.util.git import spack.util.git
from spack.test.conftest import MockHTTPResponse
pytestmark = [pytest.mark.usefixtures("mock_packages")] pytestmark = [pytest.mark.usefixtures("mock_packages")]
@@ -165,8 +162,38 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key) ci.import_signing_key(signing_key)
def test_download_and_extract_artifacts(tmpdir, monkeypatch): class FakeWebResponder:
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken") def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
return self
def getcode(self):
return self._resp_code
def read(self, length=None):
if len(self._content) <= 0:
return None
if not self._read[-1]:
return_content = self._content[-1]
if length:
self._read[-1] = True
else:
self._read.pop()
self._content.pop()
return return_content
self._read.pop()
self._content.pop()
return None
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
url = "https://www.nosuchurlexists.itsfake/artifacts.zip" url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro") working_dir = os.path.join(tmpdir.strpath, "repro")
@@ -174,13 +201,10 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch):
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip" spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
) )
def _urlopen_OK(*args, **kwargs): with open(test_artifacts_path, "rb") as fd:
with open(test_artifacts_path, "rb") as f: fake_responder = FakeWebResponder(content_to_read=[fd.read()])
return MockHTTPResponse(
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
)
monkeypatch.setattr(ci, "urlopen", _urlopen_OK) monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
ci.download_and_extract_artifacts(url, working_dir) ci.download_and_extract_artifacts(url, working_dir)
@@ -190,11 +214,7 @@ def _urlopen_OK(*args, **kwargs):
found_install = fs.find(working_dir, "install.sh") found_install = fs.find(working_dir, "install.sh")
assert len(found_install) == 1 assert len(found_install) == 1
def _urlopen_500(*args, **kwargs): fake_responder._resp_code = 400
raise HTTPError(url, 500, "Internal Server Error", {}, None)
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
with pytest.raises(spack.error.SpackError): with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir) ci.download_and_extract_artifacts(url, working_dir)
@@ -347,6 +367,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
for key, val in expectations.items(): for key, val in expectations.items():
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key) affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
affected_pkg_names = set([s.name for s in affected_specs]) affected_pkg_names = set([s.name for s in affected_specs])
print(f"{key}: {affected_pkg_names}")
assert affected_pkg_names == val assert affected_pkg_names == val

View File

@@ -12,7 +12,7 @@
build_env = SpackCommand("build-env") build_env = SpackCommand("build-env")
@pytest.mark.parametrize("pkg", [("pkg-c",), ("pkg-c", "--")]) @pytest.mark.parametrize("pkg", [("zlib",), ("zlib", "--")])
@pytest.mark.usefixtures("config", "mock_packages", "working_env") @pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_it_just_runs(pkg): def test_it_just_runs(pkg):
build_env(*pkg) build_env(*pkg)
@@ -38,7 +38,7 @@ def test_build_env_requires_a_spec(args):
@pytest.mark.usefixtures("config", "mock_packages", "working_env") @pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_dump(shell_as, shell, tmpdir): def test_dump(shell_as, shell, tmpdir):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
build_env("--dump", _out_file, "pkg-c") build_env("--dump", _out_file, "zlib")
with open(_out_file, encoding="utf-8") as f: with open(_out_file, encoding="utf-8") as f:
if shell == "pwsh": if shell == "pwsh":
assert any(line.startswith("$Env:PATH") for line in f.readlines()) assert any(line.startswith("$Env:PATH") for line in f.readlines())
@@ -51,7 +51,7 @@ def test_dump(shell_as, shell, tmpdir):
@pytest.mark.usefixtures("config", "mock_packages", "working_env") @pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_pickle(tmpdir): def test_pickle(tmpdir):
with tmpdir.as_cwd(): with tmpdir.as_cwd():
build_env("--pickle", _out_file, "pkg-c") build_env("--pickle", _out_file, "zlib")
environment = pickle.load(open(_out_file, "rb")) environment = pickle.load(open(_out_file, "rb"))
assert isinstance(environment, dict) assert isinstance(environment, dict)
assert "PATH" in environment assert "PATH" in environment

View File

@@ -148,7 +148,7 @@ def test_update_key_index(
s = spack.concretize.concretize_one("libdwarf") s = spack.concretize.concretize_one("libdwarf")
# Install a package # Install a package
install("--fake", s.name) install(s.name)
# Put installed package in the buildcache, which, because we're signing # Put installed package in the buildcache, which, because we're signing
# it, should result in the public key getting pushed to the buildcache # it, should result in the public key getting pushed to the buildcache
@@ -178,7 +178,7 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
s = spack.concretize.concretize_one("libdwarf") s = spack.concretize.concretize_one("libdwarf")
# Install and generate build cache index # Install and generate build cache index
PackageInstaller([s.package], fake=True, explicit=True).install() PackageInstaller([s.package], explicit=True).install()
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json") metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
@@ -214,11 +214,13 @@ def verify_mirror_contents():
if in_env_pkg in p: if in_env_pkg in p:
found_pkg = True found_pkg = True
assert found_pkg, f"Expected to find {in_env_pkg} in {dest_mirror_dir}" if not found_pkg:
print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
assert False
# Install a package and put it in the buildcache # Install a package and put it in the buildcache
s = spack.concretize.concretize_one(out_env_pkg) s = spack.concretize.concretize_one(out_env_pkg)
install("--fake", s.name) install(s.name)
buildcache("push", "-u", "-f", src_mirror_url, s.name) buildcache("push", "-u", "-f", src_mirror_url, s.name)
env("create", "test") env("create", "test")

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse import argparse
import pathlib
import pytest import pytest
@@ -23,15 +22,7 @@
@pytest.fixture @pytest.fixture
def no_add(monkeypatch): def can_fetch_versions(monkeypatch):
def add_versions_to_pkg(pkg, version_lines, open_in_editor):
raise AssertionError("Should not be called")
monkeypatch.setattr(spack.cmd.checksum, "add_versions_to_pkg", add_versions_to_pkg)
@pytest.fixture
def can_fetch_versions(monkeypatch, no_add):
"""Fake successful version detection.""" """Fake successful version detection."""
def fetch_remote_versions(pkg, concurrency): def fetch_remote_versions(pkg, concurrency):
@@ -54,7 +45,7 @@ def url_exists(url, curl=None):
@pytest.fixture @pytest.fixture
def cannot_fetch_versions(monkeypatch, no_add): def cannot_fetch_versions(monkeypatch):
"""Fake unsuccessful version detection.""" """Fake unsuccessful version detection."""
def fetch_remote_versions(pkg, concurrency): def fetch_remote_versions(pkg, concurrency):
@@ -97,6 +88,7 @@ def test_checksum_args(arguments, expected):
(["--batch", "preferred-test"], "version of preferred-test"), (["--batch", "preferred-test"], "version of preferred-test"),
(["--latest", "preferred-test"], "Found 1 version"), (["--latest", "preferred-test"], "Found 1 version"),
(["--preferred", "preferred-test"], "Found 1 version"), (["--preferred", "preferred-test"], "Found 1 version"),
(["--add-to-package", "preferred-test"], "Added 0 new versions to"),
(["--verify", "preferred-test"], "Verified 1 of 1"), (["--verify", "preferred-test"], "Verified 1 of 1"),
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"), (["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
], ],
@@ -279,12 +271,15 @@ def test_checksum_interactive_unrecognized_command():
assert interactive_version_filter(v.copy(), input=input) == v assert interactive_version_filter(v.copy(), input=input) == v
def test_checksum_versions(mock_packages, can_fetch_versions, monkeypatch): def test_checksum_versions(mock_packages, can_fetch_versions):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib") pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
versions = [str(v) for v in pkg_cls.versions] versions = [str(v) for v in pkg_cls.versions]
output = spack_checksum("zlib", *versions) output = spack_checksum("zlib", *versions)
assert "Found 3 versions" in output assert "Found 3 versions" in output
assert "version(" in output assert "version(" in output
output = spack_checksum("--add-to-package", "zlib", *versions)
assert "Found 3 versions" in output
assert "Added 0 new versions to" in output
def test_checksum_missing_version(mock_packages, cannot_fetch_versions): def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
@@ -292,6 +287,7 @@ def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
assert "Could not find any remote versions" in output assert "Could not find any remote versions" in output
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False) output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
assert "Could not find any remote versions" in output assert "Could not find any remote versions" in output
assert "Added 1 new versions to" not in output
def test_checksum_deprecated_version(mock_packages, can_fetch_versions): def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
@@ -301,6 +297,8 @@ def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False "--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
) )
assert "Version 1.1.0 is deprecated" in output assert "Version 1.1.0 is deprecated" in output
# TODO alecbcs: broken assertion.
# assert "Added 0 new versions to" not in output
def test_checksum_url(mock_packages, config): def test_checksum_url(mock_packages, config):
@@ -339,52 +337,3 @@ def test_checksum_manual_download_fails(mock_packages, monkeypatch):
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error) monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
with pytest.raises(ManualDownloadRequiredError, match=error): with pytest.raises(ManualDownloadRequiredError, match=error):
spack_checksum(name, *versions) spack_checksum(name, *versions)
def test_upate_package_contents(tmp_path: pathlib.Path):
"""Test that the package.py file is updated with the new versions."""
pkg_path = tmp_path / "package.py"
pkg_path.write_text(
"""\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)
version_lines = """\
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
"""
# two new versions are added
assert spack.cmd.checksum.add_versions_to_pkg(str(pkg_path), version_lines) == 2
assert (
pkg_path.read_text()
== """\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)

View File

@@ -28,7 +28,6 @@
from spack.ci.generator_registry import generator from spack.ci.generator_registry import generator
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
from spack.database import INDEX_JSON_FILE from spack.database import INDEX_JSON_FILE
from spack.error import SpackError
from spack.schema.buildcache_spec import schema as specfile_schema from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.database_index import schema as db_idx_schema from spack.schema.database_index import schema as db_idx_schema
from spack.spec import Spec from spack.spec import Spec
@@ -171,9 +170,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
url: https://my.fake.cdash url: https://my.fake.cdash
project: Not used project: Not used
site: Nothing site: Nothing
""", """
"--artifacts-root",
str(tmp_path / "my_artifacts_root"),
) )
yaml_contents = syaml.load(outputfile.read_text()) yaml_contents = syaml.load(outputfile.read_text())
@@ -195,7 +192,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
assert "variables" in yaml_contents assert "variables" in yaml_contents
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"] assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "my_artifacts_root" assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "jobs_scratch_dir"
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index): def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
@@ -1065,7 +1062,7 @@ def test_ci_rebuild_index(
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f: with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
f.write(concrete_spec.to_json(hash=ht.dag_hash)) f.write(concrete_spec.to_json(hash=ht.dag_hash))
install_cmd("--fake", "--add", "-f", str(tmp_path / "spec.json")) install_cmd("--add", "-f", str(tmp_path / "spec.json"))
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath") buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
ci_cmd("rebuild-index") ci_cmd("rebuild-index")
@@ -1325,50 +1322,44 @@ def test_ci_reproduce(
env.concretize() env.concretize()
env.write() env.write()
def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True): repro_dir.mkdir()
with working_dir(tmp_path), ev.Environment(".") as env:
if not os.path.exists(repro_dir):
repro_dir.mkdir()
job_spec = env.concrete_roots()[0] job_spec = env.concrete_roots()[0]
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f: with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
f.write(job_spec.to_json(hash=ht.dag_hash)) f.write(job_spec.to_json(hash=ht.dag_hash))
artifacts_root = repro_dir / "jobs_scratch_dir"
pipeline_path = artifacts_root / "pipeline.yml"
ci_cmd( artifacts_root = repro_dir / "scratch_dir"
"generate", pipeline_path = artifacts_root / "pipeline.yml"
"--output-file",
str(pipeline_path), ci_cmd(
"--artifacts-root", "generate",
str(artifacts_root), "--output-file",
str(pipeline_path),
"--artifacts-root",
str(artifacts_root),
)
job_name = gitlab_generator.get_job_name(job_spec)
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
f.write(
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
) )
)
job_name = gitlab_generator.get_job_name(job_spec) with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f: with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
f.write( f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n")
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
)
)
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f: def fake_download_and_extract_artifacts(url, work_dir):
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n") pass
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
if merge_commit_test:
f.write(
f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n"
)
else:
f.write(f"\ncommit {last_two_git_commits[1]}\n\n")
return "jobs_scratch_dir"
monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts) monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
rep_out = ci_cmd( rep_out = ci_cmd(
@@ -1384,64 +1375,6 @@ def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
# Make sure we tell the user where it is when not in interactive mode # Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commits are used
assert f"checkout_commit: {last_two_git_commits[0]}" in rep_out
assert f"merge_commit: {last_two_git_commits[1]}" in rep_out
# Test re-running in dirty working dir
with pytest.raises(SpackError, match=f"{repro_dir}"):
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Cleanup between tests
shutil.rmtree(repro_dir)
# Test --use-local-head
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--use-local-head",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure we are checkout out the HEAD commit without a merge commit
assert "checkout_commit: HEAD" in rep_out
assert "merge_commit: None" in rep_out
# Test the case where the spack_info.txt is not a merge commit
monkeypatch.setattr(
ci,
"download_and_extract_artifacts",
lambda url, wd: fake_download_and_extract_artifacts(url, wd, False),
)
# Cleanup between tests
shutil.rmtree(repro_dir)
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure the script was generated
assert (repro_dir / "start.sh").exists()
# Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commit is used (different than HEAD)
assert f"checkout_commit: {last_two_git_commits[1]}" in rep_out
assert "merge_commit: None" in rep_out
@pytest.mark.parametrize( @pytest.mark.parametrize(
"url_in,url_out", "url_in,url_out",

View File

@@ -5,7 +5,6 @@
import filecmp import filecmp
import os import os
import shutil import shutil
import textwrap
import pytest import pytest
@@ -260,25 +259,15 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
def test_updated_completion_scripts(shell, tmpdir): def test_updated_completion_scripts(shell, tmpdir):
"""Make sure our shell tab completion scripts remain up-to-date.""" """Make sure our shell tab completion scripts remain up-to-date."""
width = 72 msg = (
lines = textwrap.wrap(
"It looks like Spack's command-line interface has been modified. " "It looks like Spack's command-line interface has been modified. "
"If differences are more than your global 'include:' scopes, please " "Please update Spack's shell tab completion scripts by running:\n\n"
"update Spack's shell tab completion scripts by running:", " spack commands --update-completion\n\n"
width, "and adding the changed files to your pull request."
) )
lines.append("\n spack commands --update-completion\n")
lines.extend(
textwrap.wrap(
"and adding the changed files (minus your global 'include:' scopes) "
"to your pull request.",
width,
)
)
msg = "\n".join(lines)
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}") header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
script = f"spack-completion.{shell}" script = "spack-completion.{0}".format(shell)
old_script = os.path.join(spack.paths.share_path, script) old_script = os.path.join(spack.paths.share_path, script)
new_script = str(tmpdir.join(script)) new_script = str(tmpdir.join(script))

View File

@@ -213,7 +213,7 @@ def test_config_add_update_dict(mutable_empty_config):
def test_config_with_c_argument(mutable_empty_config): def test_config_with_c_argument(mutable_empty_config):
# I don't know how to add a spack argument to a Spack Command, so we test this way # I don't know how to add a spack argument to a Spack Command, so we test this way
config_file = "config:install_tree:root:/path/to/config.yaml" config_file = "config:install_root:root:/path/to/config.yaml"
parser = spack.main.make_argument_parser() parser = spack.main.make_argument_parser()
args = parser.parse_args(["-c", config_file]) args = parser.parse_args(["-c", config_file])
assert config_file in args.config_vars assert config_file in args.config_vars
@@ -221,7 +221,7 @@ def test_config_with_c_argument(mutable_empty_config):
# Add the path to the config # Add the path to the config
config("add", args.config_vars[0], scope="command_line") config("add", args.config_vars[0], scope="command_line")
output = config("get", "config") output = config("get", "config")
assert "config:\n install_tree:\n root: /path/to/config.yaml" in output assert "config:\n install_root:\n root: /path/to/config.yaml" in output
def test_config_add_ordered_dict(mutable_empty_config): def test_config_add_ordered_dict(mutable_empty_config):
@@ -335,7 +335,7 @@ def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir): def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
config("add", "packages:all:require:['%gcc']") config("add", "packages:all:compiler:[gcc]")
# contents to add to file # contents to add to file
contents = """spack: contents = """spack:
@@ -357,7 +357,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
expected = """packages: expected = """packages:
all: all:
target: [x86_64] target: [x86_64]
require: ['%gcc'] compiler: [gcc]
""" """
assert expected == output assert expected == output
@@ -606,6 +606,7 @@ def test_config_prefer_upstream(
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"] packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
# Make sure only the non-default variants are set. # Make sure only the non-default variants are set.
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]} assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
assert packages["dependency-install"] == {"version": ["2.0"]} assert packages["dependency-install"] == {"version": ["2.0"]}
# Ensure that neither variant gets listed for hdf5, since they conflict # Ensure that neither variant gets listed for hdf5, since they conflict

View File

@@ -2,16 +2,52 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform import platform
import pytest
import spack import spack
import spack.platforms import spack.platforms
import spack.spec import spack.spec
from spack.database import INDEX_JSON_FILE
from spack.main import SpackCommand from spack.main import SpackCommand
from spack.util.executable import which
debug = SpackCommand("debug") debug = SpackCommand("debug")
@pytest.mark.db
def test_create_db_tarball(tmpdir, database):
with tmpdir.as_cwd():
debug("create-db-tarball")
# get the first non-dotfile to avoid coverage files in the directory
files = os.listdir(os.getcwd())
tarball_name = next(
f for f in files if not f.startswith(".") and not f.startswith("tests")
)
# debug command made an archive
assert os.path.exists(tarball_name)
# print contents of archive
tar = which("tar")
contents = tar("tzf", tarball_name, output=str)
# DB file is included
assert INDEX_JSON_FILE in contents
# specfiles from all installs are included
for spec in database.query():
# externals won't have a specfile
if spec.external:
continue
spec_suffix = "%s/.spack/spec.json" % spec.dag_hash()
assert spec_suffix in contents
def test_report(): def test_report():
out = debug("report") out = debug("report")
host_platform = spack.platforms.host() host_platform = spack.platforms.host()

View File

@@ -24,24 +24,32 @@
mpi_deps = ["fake"] mpi_deps = ["fake"]
@pytest.mark.parametrize( def test_direct_dependencies(mock_packages):
"cli_args,expected", out = dependencies("mpileaks")
[ actual = set(re.split(r"\s+", out.strip()))
(["mpileaks"], set(["callpath"] + mpis)), expected = set(["callpath"] + mpis)
( assert expected == actual
["--transitive", "mpileaks"],
set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps),
), def test_transitive_dependencies(mock_packages):
(["--transitive", "--deptype=link,run", "dtbuild1"], {"dtlink2", "dtrun2"}), out = dependencies("--transitive", "mpileaks")
(["--transitive", "--deptype=build", "dtbuild1"], {"dtbuild2", "dtlink2"}), actual = set(re.split(r"\s+", out.strip()))
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}), expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
], assert expected == actual
)
def test_direct_dependencies(cli_args, expected, mock_runtimes):
out = dependencies(*cli_args) def test_transitive_dependencies_with_deptypes(mock_packages):
result = set(re.split(r"\s+", out.strip())) out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
expected.update(mock_runtimes) deps = set(re.split(r"\s+", out.strip()))
assert expected == result assert set(["dtlink2", "dtrun2"]) == deps
out = dependencies("--transitive", "--deptype=build", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtbuild2", "dtlink2"]) == deps
out = dependencies("--transitive", "--deptype=link", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2"]) == deps
@pytest.mark.db @pytest.mark.db

Some files were not shown because too many files have changed in this diff Show More