Compare commits

..

1 Commits

Author SHA1 Message Date
Wouter Deconinck
1e38c62748 celeritas: prevent 0.5.0 with geant4@11.3 2025-02-12 22:14:18 -06:00
782 changed files with 7946 additions and 15464 deletions

View File

@@ -9,7 +9,6 @@ on:
branches:
- develop
- releases/**
merge_group:
concurrency:
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -26,17 +25,13 @@ jobs:
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
# For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
id: filter
with:
# For merge group events, compare against the target branch (main)
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
# For merge group events, use the merge group head ref
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
# Don't run if we only modified packages in the
# built-in repository or documentation
@@ -81,11 +76,10 @@ jobs:
prechecks:
needs: [ changes ]
uses: ./.github/workflows/prechecks.yml
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
with_packages: ${{ needs.changes.outputs.packages }}
import-check:
needs: [ changes ]
@@ -99,7 +93,7 @@ jobs:
- name: Success
run: |
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
else
exit 0
@@ -107,7 +101,6 @@ jobs:
coverage:
needs: [ unit-tests, prechecks ]
if: ${{ needs.changes.outputs.core }}
uses: ./.github/workflows/coverage.yml
secrets: inherit
@@ -120,10 +113,10 @@ jobs:
- name: Status summary
run: |
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
echo "Unit tests failed."
echo "Unit tests failed."
exit 1
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
echo "Bootstrap tests failed."
echo "Bootstrap tests failed."
exit 1
else
exit 0

View File

@@ -1,7 +1,7 @@
black==25.1.0
black==24.10.0
clingo==5.7.1
flake8==7.1.2
isort==6.0.1
mypy==1.15.0
types-six==1.17.0.20250304
flake8==7.1.1
isort==5.13.2
mypy==1.11.2
types-six==1.17.0.20241205
vermin==1.6.0

View File

@@ -1,4 +1,4 @@
name: prechecks
name: style
on:
workflow_call:
@@ -6,9 +6,6 @@ on:
with_coverage:
required: true
type: string
with_packages:
required: true
type: string
concurrency:
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
@@ -33,7 +30,6 @@ jobs:
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
# Run style checks on the files that have been changed
style:
runs-on: ubuntu-latest
@@ -57,25 +53,12 @@ jobs:
- name: Run style tests
run: |
share/spack/qa/run-style-tests
audit:
uses: ./.github/workflows/audit.yaml
secrets: inherit
with:
with_coverage: ${{ inputs.with_coverage }}
python_version: '3.13'
verify-checksums:
if: ${{ inputs.with_packages == 'true' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 2
- name: Verify Added Checksums
run: |
bin/spack ci verify-versions HEAD^1 HEAD
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8:
runs-on: ubuntu-latest

1
.gitignore vendored
View File

@@ -201,6 +201,7 @@ tramp
# Org-mode
.org-id-locations
*_archive
# flymake-mode
*_flymake.*

View File

@@ -43,28 +43,6 @@ concretizer:
# (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
# number used if there isn't a more specific alternative
max_dupes:
default: 1
# Virtuals
c: 2
cxx: 2
fortran: 1
# Regular packages
cmake: 2
gmake: 2
python: 2
python-venv: 2
py-cython: 2
py-flit-core: 2
py-pip: 2
py-setuptools: 2
py-wheel: 2
xcb-proto: 2
# Compilers
gcc: 2
llvm: 2
# Option to specify compatibility between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's

View File

@@ -1761,24 +1761,19 @@ Verifying installations
The ``spack verify`` command can be used to verify the validity of
Spack-installed packages any time after installation.
^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify manifest``
^^^^^^^^^^^^^^^^^^^^^^^^^
At installation time, Spack creates a manifest of every file in the
installation prefix. For links, Spack tracks the mode, ownership, and
destination. For directories, Spack tracks the mode, and
ownership. For files, Spack tracks the mode, ownership, modification
time, hash, and size. The ``spack verify manifest`` command will check,
for every file in each package, whether any of those attributes have
changed. It will also check for newly added files or deleted files from
the installation prefix. Spack can either check all installed packages
time, hash, and size. The Spack verify command will check, for every
file in each package, whether any of those attributes have changed. It
will also check for newly added files or deleted files from the
installation prefix. Spack can either check all installed packages
using the `-a,--all` or accept specs listed on the command line to
verify.
The ``spack verify manifest`` command can also verify for individual files
that they haven't been altered since installation time. If the given file
The ``spack verify`` command can also verify for individual files that
they haven't been altered since installation time. If the given file
is not in a Spack installation prefix, Spack will report that it is
not owned by any package. To check individual files instead of specs,
use the ``-f,--files`` option.
@@ -1793,22 +1788,6 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack verify libraries``
^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack verify libraries`` command can be used to verify that packages
do not have accidental system dependencies. This command scans the install
prefixes of packages for executables and shared libraries, and resolves
their needed libraries in their RPATHs. When needed libraries cannot be
located, an error is reported. This typically indicates that a package
was linked against a system library, instead of a library provided by
a Spack package.
This verification can also be enabled as a post-install hook by setting
``config:shared_linking:missing_library_policy`` to ``error`` or ``warn``
in :ref:`config.yaml <config-yaml>`.
-----------------------
Filesystem requirements
-----------------------

View File

@@ -223,10 +223,6 @@ def setup(sphinx):
("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -125,8 +125,6 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
by default. Can be purged with :ref:`spack clean --downloads
<cmd-spack-clean>`.
.. _Misc Cache:
--------------------
``misc_cache``
--------------------
@@ -336,52 +334,3 @@ create a new alias called ``inst`` that will always call ``install -v``:
aliases:
inst: install -v
-------------------------------
``concretization_cache:enable``
-------------------------------
When set to ``true``, Spack will utilize a cache of solver outputs from
successful concretization runs. When enabled, Spack will check the concretization
cache prior to running the solver. If a previous request to solve a given
problem is present in the cache, Spack will load the concrete specs and other
solver data from the cache rather than running the solver. Specs not previously
concretized will be added to the cache on a successful solve. The cache additionally
holds solver statistics, so commands like ``spack solve`` will still return information
about the run that produced a given solver result.
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
Cache is cleaned.
When ``false`` or ommitted, all concretization requests will be performed from scatch
----------------------------
``concretization_cache:url``
----------------------------
Path to the location where Spack will root the concretization cache. Currently this only supports
paths on the local filesystem.
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
------------------------------------
``concretization_cache:entry_limit``
------------------------------------
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.
-----------------------------------
``concretization_cache:size_limit``
-----------------------------------
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
after each concretization run; if Spack has stored more results than the limit allows, the
oldest concretization results are pruned until 10% of the limit has been removed.
Setting this value to 0 disables the automatic pruning. It is expected users will be
responsible for maintaining this cache.

View File

@@ -14,7 +14,6 @@ case you want to skip directly to specific docs:
* :ref:`compilers.yaml <compiler-config>`
* :ref:`concretizer.yaml <concretizer-options>`
* :ref:`config.yaml <config-yaml>`
* :ref:`include.yaml <include-yaml>`
* :ref:`mirrors.yaml <mirrors>`
* :ref:`modules.yaml <modules>`
* :ref:`packages.yaml <packages-config>`

View File

@@ -457,13 +457,6 @@ developed package in the environment are concretized to match the
version (and other constraints) passed as the spec argument to the
``spack develop`` command.
When working deep in the graph it is often desirable to have multiple specs marked
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
to ensure that all the dependents of the initial spec you provide are also marked
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
so the graph can be traversed from the supplied spec all the way to the root specs.
For packages with ``git`` attributes, git branches, tags, and commits can
also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
@@ -677,45 +670,24 @@ This configuration sets the default compiler for all packages to
Included configurations
^^^^^^^^^^^^^^^^^^^^^^^
Spack environments allow an ``include`` heading in their yaml schema.
This heading pulls in external configuration files and applies them to
the environment.
Spack environments allow an ``include`` heading in their yaml
schema. This heading pulls in external configuration files and applies
them to the environment.
.. code-block:: yaml
spack:
include:
- environment/relative/path/to/config.yaml
- relative/path/to/config.yaml
- https://github.com/path/to/raw/config/compilers.yaml
- /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. (See
:ref:`include-yaml` for more information on optional and conditional entries.)
Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.
Environments can include files or URLs. File paths can be relative or
absolute. URLs include the path to the text for individual files or
can be the path to a directory containing configuration files.
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
schemes). Spack-specific, environment and user path variables may be
used in these paths. See :ref:`config-file-variables` for more information.
^^^^^^^^^^^^^^^^^^^^^^^^
Configuration precedence

View File

@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
.. note::
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
The following set of criteria (from lowest to highest precedence) explain
common cases where concretization output may seem surprising at first.
@@ -56,19 +56,7 @@ common cases where concretization output may seem surprising at first.
concretizer:
reuse: dependencies # other options are 'true' and 'false'
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
are higher priority than reuse, and can be used to strongly prefer a specific version
or variant, without erroring out if it's not possible. Strong preferences are specified
as follows:
.. code-block:: yaml
packages:
foo:
prefer:
- "@1.1: ~mpi"
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
and constraints from the command line as well as ``package.py`` files override all
of the above. Requirements are specified as follows:
@@ -78,8 +66,6 @@ common cases where concretization output may seem surprising at first.
foo:
require:
- "@1.2: +mpi"
conflicts:
- "@1.4"
Requirements and constraints restrict the set of possible solutions, while reuse
behavior and preferences influence what an optimal solution looks like.

View File

@@ -1,51 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _include-yaml:
===============================
Include Settings (include.yaml)
===============================
Spack allows you to include configuration files through ``include.yaml``.
Using the ``include:`` heading results in pulling in external configuration
information to be used by any Spack command.
Included configuration files are required *unless* they are explicitly optional
or the entry's condition evaluates to ``false``. Optional includes are specified
with the ``optional`` clause and conditional with the ``when`` clause. For
example,
.. code-block:: yaml
include:
- /path/to/a/required/config.yaml
- path: /path/to/$os/$target/config
optional: true
- path: /path/to/os-specific/config-dir
when: os == "ventura"
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
indicates that included ``config.yaml`` file is required (so must exist).
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
the path is only included if it exists. The condition ``os == "ventura"``
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
path is only included when the operating system (``os``) is ``ventura``.
The same conditions and variables in `Spec List References
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
can be used for conditional activation in the ``when`` clauses.
Included files can be specified by path or by their parent directory.
Paths may be absolute, relative (to the configuration file including the path),
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.)
.. warning::
Recursive includes are not currently processed in a breadth-first manner
so the value of a configuration option that is altered by multiple included
files may not be what you expect. This will be addressed in a future
update.

View File

@@ -71,7 +71,6 @@ or refer to the full manual below.
configuration
config_yaml
include_yaml
packages_yaml
build_settings
environments

View File

@@ -486,8 +486,6 @@ present. For instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
.. _package-strong-preferences:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Conflicts and strong preferences
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
.. _ci_artifacts:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CI Artifacts Directory Layout
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
it require user specified files be written to any specific directory.
------------------------
``concrete_environment``
------------------------
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
the concrete ``spack.lock`` for the CI environment.
--------
``logs``
--------
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
----------------
``reproduction``
----------------
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
An example of what a ``repro.json`` may look like is here.
.. code:: json
{
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
"job_spec_json": "adios2.json",
"ci_project_dir": "/builds/spack/spack"
}
---------
``tests``
---------
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
data in it depending on the package that was built and the availability of tests.
-------------
``user_data``
-------------
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
Users may use this to store additional logs or metrics or other types of files generated by the build job.
-------------------------------------
Using a custom spack in your pipeline
-------------------------------------

View File

@@ -1,13 +1,13 @@
sphinx==8.2.3
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1
python-levenshtein==0.26.1
docutils==0.21.2
pygments==2.19.1
urllib3==2.3.0
pytest==8.3.5
isort==6.0.1
black==25.1.0
flake8==7.1.2
pytest==8.3.4
isort==5.13.2
black==24.10.0
flake8==7.1.1
mypy==1.11.1

View File

@@ -7,7 +7,6 @@
import fnmatch
import glob
import hashlib
import io
import itertools
import numbers
import os
@@ -21,7 +20,6 @@
from contextlib import contextmanager
from itertools import accumulate
from typing import (
IO,
Callable,
Deque,
Dict,
@@ -2456,69 +2454,26 @@ class WindowsSimulatedRPath:
and vis versa.
"""
def __init__(
self,
package,
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
link_install_prefix: bool = True,
):
def __init__(self, package, link_install_prefix=True):
"""
Args:
package (spack.package_base.PackageBase): Package requiring links
base_modification_prefix (str|pathlib.Path): Path representation indicating
the root directory in which to establish the simulated rpath, ie where the
symlinks that comprise the "rpath" behavior will be installed.
Note: This is a mutually exclusive option with `link_install_prefix` using
both is an error.
Default: None
link_install_prefix (bool): Link against package's own install or stage root.
Packages that run their own executables during build and require rpaths to
the build directory during build time require this option.
Default: install
the build directory during build time require this option. Default: install
root
Note: This is a mutually exclusive option with `base_modification_prefix`, using
both is an error.
"""
self.pkg = package
self._addl_rpaths: set[str] = set()
if link_install_prefix and base_modification_prefix:
raise RuntimeError(
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
"Select either `link_install_prefix` to create an install prefix rpath"
" or specify a `base_modification_prefix` for any other link type. "
"Specifying both arguments is invalid."
)
if not (link_install_prefix or base_modification_prefix):
raise RuntimeError(
"Insufficient arguments given to WindowsSimulatedRpath.\n"
"WindowsSimulatedRPath requires one of link_install_prefix"
" or base_modification_prefix to be specified."
" Neither was provided."
)
self._addl_rpaths = set()
self.link_install_prefix = link_install_prefix
if base_modification_prefix:
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
else:
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
self._additional_library_dependents: set[pathlib.Path] = set()
if not self.link_install_prefix:
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
self._additional_library_dependents = set()
@property
def library_dependents(self):
"""
Set of directories where package binaries/libraries are located.
"""
base_pths = set()
if self.link_install_prefix:
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
base_pths |= self._additional_library_dependents
return base_pths
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
def add_library_dependent(self, *dest):
"""
@@ -2534,12 +2489,6 @@ def add_library_dependent(self, *dest):
new_pth = pathlib.Path(pth).parent
else:
new_pth = pathlib.Path(pth)
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
if not path_is_in_prefix:
raise RuntimeError(
f"Attempting to generate rpath symlink out of rpath context:\
{str(self.base_modification_prefix)}"
)
self._additional_library_dependents.add(new_pth)
@property
@@ -2628,33 +2577,6 @@ def establish_link(self):
self._link(library, lib_dir)
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
so an executable can test the libraries/executables with proper access
to dependent dlls
Note: this is a no-op on all other platforms besides Windows
Args:
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
test_dir: the testing directory in which we should construct an rpath
"""
# link_install_prefix as false ensures we're not linking into the install prefix
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
# add the testing directory as a location to install rpath symlinks
mini_rpath.add_library_dependent(test_dir)
# check for whether build_directory is available, if not
# assume the stage root is the build dir
build_dir_attr = getattr(pkg, "build_directory", None)
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
# add the build dir & build dir bin
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
mini_rpath.add_rpath(os.path.join(build_directory))
# construct rpath
mini_rpath.establish_link()
@system_path_filter
@memoized
def can_access_dir(path):
@@ -2883,20 +2805,6 @@ def keep_modification_time(*filenames):
os.utime(f, (os.path.getatime(f), mtime))
@contextmanager
def temporary_file_position(stream):
orig_pos = stream.tell()
yield
stream.seek(orig_pos)
@contextmanager
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
with temporary_file_position(stream):
stream.seek(loc, relative_to)
yield
@contextmanager
def temporary_dir(
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None

View File

@@ -11,11 +11,10 @@
import re
import sys
import traceback
import types
import typing
import warnings
from datetime import datetime, timedelta
from typing import Callable, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
@@ -708,24 +707,14 @@ def __init__(self, wrapped_object):
class Singleton:
"""Wrapper for lazily initialized singleton objects."""
"""Simple wrapper for lazily initialized singleton objects."""
def __init__(self, factory: Callable[[], object]):
def __init__(self, factory):
"""Create a new singleton to be inited with the factory function.
Most factories will simply create the object to be initialized and
return it.
In some cases, e.g. when bootstrapping some global state, the singleton
may need to be initialized incrementally. If the factory returns a generator
instead of a regular object, the singleton will assign each result yielded by
the generator to the singleton instance. This allows methods called by
the factory in later stages to refer back to the singleton.
Args:
factory (function): function taking no arguments that creates the
singleton instance.
factory (function): function taking no arguments that
creates the singleton instance.
"""
self.factory = factory
self._instance = None
@@ -733,16 +722,7 @@ def __init__(self, factory: Callable[[], object]):
@property
def instance(self):
if self._instance is None:
instance = self.factory()
if isinstance(instance, types.GeneratorType):
# if it's a generator, assign every value
for value in instance:
self._instance = value
else:
# if not, just assign the result like a normal singleton
self._instance = instance
self._instance = self.factory()
return self._instance
def __getattr__(self, name):
@@ -1100,88 +1080,3 @@ def __set__(self, instance, value):
def factory(self, instance, owner):
raise NotImplementedError("must be implemented by derived classes")
KT = TypeVar("KT")
VT = TypeVar("VT")
class PriorityOrderedMapping(Mapping[KT, VT]):
"""Mapping that iterates over key according to an integer priority. If the priority is
the same for two keys, insertion order is what matters.
The priority is set when the key/value pair is added. If not set, the highest current priority
is used.
"""
_data: Dict[KT, VT]
_priorities: List[Tuple[int, KT]]
def __init__(self) -> None:
self._data = {}
# Tuple of (priority, key)
self._priorities = []
def __getitem__(self, key: KT) -> VT:
return self._data[key]
def __len__(self) -> int:
return len(self._data)
def __iter__(self):
yield from (key for _, key in self._priorities)
def __reversed__(self):
yield from (key for _, key in reversed(self._priorities))
def reversed_keys(self):
"""Iterates over keys from the highest priority, to the lowest."""
return reversed(self)
def reversed_values(self):
"""Iterates over values from the highest priority, to the lowest."""
yield from (self._data[key] for _, key in reversed(self._priorities))
def _highest_priority(self) -> int:
if not self._priorities:
return 0
result, _ = self._priorities[-1]
return result
def add(self, key: KT, *, value: VT, priority: Optional[int] = None) -> None:
"""Adds a key/value pair to the mapping, with a specific priority.
If the priority is None, then it is assumed to be the highest priority value currently
in the container.
Raises:
ValueError: when the same priority is already in the mapping
"""
if priority is None:
priority = self._highest_priority()
if key in self._data:
self.remove(key)
self._priorities.append((priority, key))
# We rely on sort being stable
self._priorities.sort(key=lambda x: x[0])
self._data[key] = value
assert len(self._data) == len(self._priorities)
def remove(self, key: KT) -> VT:
"""Removes a key from the mapping.
Returns:
The value associated with the key being removed
Raises:
KeyError: if the key is not in the mapping
"""
if key not in self._data:
raise KeyError(f"cannot find {key}")
popped_item = self._data.pop(key)
self._priorities = [(p, k) for p, k in self._priorities if k != key]
assert len(self._data) == len(self._priorities)
return popped_item

View File

@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
self.src_a = src_a
self.src_b = src_b
def __repr__(self) -> str:
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
def _samefile(a: str, b: str):
try:
return os.path.samefile(a, b)
except OSError:
return False
class SourceMergeVisitor(BaseDirectoryVisitor):
"""
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
):
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
self.ignore = ignore if ignore is not None else lambda f: False
# On case-insensitive filesystems, normalize paths to detect duplications
self.normalize_paths = normalize_paths
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
@@ -86,88 +71,10 @@ def __init__(
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
def _in_directories(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the directory list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._directories_normalized
else:
return proj_rel_path in self.directories
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the directory that is mapped to a path
"""
if self.normalize_paths:
return self._directories_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.directories[proj_rel_path])
def _del_directory(self, proj_rel_path: str):
"""
Remove a directory from the list of directories
"""
del self.directories[proj_rel_path]
if self.normalize_paths:
del self._directories_normalized[proj_rel_path.lower()]
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a directory to the list of directories.
Also stores the normalized version for later lookups
"""
self.directories[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def _in_files(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the files list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._files_normalized
else:
return proj_rel_path in self.files
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the file that is mapped to a path
"""
if self.normalize_paths:
return self._files_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.files[proj_rel_path])
def _del_file(self, proj_rel_path: str):
"""
Remove a file from the list of files
"""
del self.files[proj_rel_path]
if self.normalize_paths:
del self._files_normalized[proj_rel_path.lower()]
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a file to the list of files
Also stores the normalized version for later lookups
"""
self.files[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
if self.ignore(rel_path):
# Don't recurse when dir is ignored.
return False
elif self._in_files(proj_rel_path):
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
src_a = os.path.join(*self._file(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return False
# Remove the link in favor of the dir.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_directory(proj_rel_path, root, rel_path)
return True
elif self._in_directories(proj_rel_path):
)
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
return True
else:
# Register new directory.
self._add_directory(proj_rel_path, root, rel_path)
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if self.ignore(rel_path):
pass
elif self._in_directories(proj_rel_path):
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
# in which case we simply drop the symlink in favor of the actual dir.
src_a = os.path.join(*self._directory(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not symlink or not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_b=os.path.join(root, rel_path),
)
elif self._in_files(proj_rel_path):
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self._file(proj_rel_path))
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_file(proj_rel_path, root, rel_path)
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
else:
# Otherwise register this file to be linked.
self._add_file(proj_rel_path, root, rel_path)
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
# Treat symlinked files as ordinary files (without "dereferencing")
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
path = ""
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if not self._in_files(path):
self._add_directory(path, "<projection>", path)
if path not in self.files:
self.directories[path] = ("<projection>", path)
else:
# Can't create a dir where a file is.
_, src_a_root, src_a_relpath = self._file(path)
src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append(
MergeConflict(
dst=path,
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper.
if self.src._in_directories(rel_path):
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
self.src._del_directory(existing_proj_rel_path)
if rel_path in self.src.directories:
del self.src.directories[rel_path]
return True
# If the destination dir does not appear in the src dir,
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
be seen as files; we should not accidentally merge
source dir with a symlinked dest dir.
"""
self.visit_file(root, rel_path, depth)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if self.src._in_directories(rel_path):
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)

View File

@@ -269,7 +269,7 @@ def __init__(
@staticmethod
def _poll_interval_generator(
_wait_times: Optional[Tuple[float, float, float]] = None,
_wait_times: Optional[Tuple[float, float, float]] = None
) -> Generator[float, None, None]:
"""This implements a backoff scheme for polling a contended resource
by suggesting a succession of wait times between polls.

View File

@@ -2,7 +2,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Utility classes for logging the output of blocks of code."""
"""Utility classes for logging the output of blocks of code.
"""
import atexit
import ctypes
import errno

View File

@@ -13,18 +13,6 @@
__version__ = "1.0.0.dev0"
spack_version = __version__
#: The current Package API version implemented by this version of Spack. The Package API defines
#: the Python interface for packages as well as the layout of package repositories. The minor
#: version is incremented when the package API is extended in a backwards-compatible way. The major
#: version is incremented upon breaking changes. This version is changed independently from the
#: Spack version.
package_api_version = (1, 0)
#: The minimum Package API version that this version of Spack is compatible with. This should
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
#: compatibility with vX.0.
min_package_api_version = (1, 0)
def __try_int(v):
try:
@@ -91,6 +79,4 @@ def get_short_version() -> str:
"get_version",
"get_spack_commit",
"get_short_version",
"package_api_version",
"min_package_api_version",
]

View File

@@ -923,7 +923,7 @@ class FileTypes:
UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int:

View File

@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable
with spack.config.override(self.mirror_scope):
PackageInstaller(
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
PackageInstaller([concrete_spec.package], fail_fast=True).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception):
if create_bootstrapper(current_config).try_import(module, abstract_spec):
return

View File

@@ -881,6 +881,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def load_external_modules(pkg):
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
pkg (spack.package_base.PackageBase): package to load deps for
"""
for dep in list(pkg.spec.traverse()):
external_modules = dep.external_modules or []
for external_module in external_modules:
load_module(external_module)
def setup_package(pkg, dirty, context: Context = Context.BUILD):
"""Execute all environment setup routines."""
if context not in (Context.BUILD, Context.TEST):
@@ -931,7 +946,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
for mod in pkg.compiler.modules:
load_module(mod)
load_external_modules(setup_context)
load_external_modules(pkg)
# Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn)
@@ -1220,21 +1235,6 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
env.prepend_path("PATH", bin_dir)
def load_external_modules(context: SetupContext) -> None:
"""Traverse a package's spec DAG and load any external modules.
Traverse a package's dependencies and load any external modules
associated with them.
Args:
context: A populated SetupContext object
"""
for spec, _ in context.external:
external_modules = spec.external_modules or []
for external_module in external_modules:
load_module(external_module)
def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
function: Callable,

View File

@@ -12,7 +12,6 @@
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import depends_on
from .cmake import CMakeBuilder, CMakePackage
@@ -278,24 +277,17 @@ def initconfig_hardware_entries(self):
entries.append("# ROCm")
entries.append("#------------------{0}\n".format("-" * 30))
if spec.satisfies("^blt@0.7:"):
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
else:
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath(
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
)
)
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
# others point to /<path>/rocm-<ver>/llvm
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
entries.append(
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
)
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
@@ -379,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
CMakeBuilder = CachedCMakeBuilder
# These dependencies are assumed in the builder
depends_on("c", type="build")
depends_on("cxx", type="build")
def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache

View File

@@ -70,16 +70,10 @@ def build_directory(self):
"""Return the directory containing the main Cargo.toml."""
return self.pkg.stage.source_path
@property
def std_build_args(self):
"""Standard arguments for ``cargo build`` provided as a property for
convenience of package writers."""
return ["-j", str(self.pkg.module.make_jobs)]
@property
def build_args(self):
"""Arguments for ``cargo build``."""
return []
return ["-j", str(self.pkg.module.make_jobs)]
@property
def check_args(self):
@@ -94,9 +88,7 @@ def build(
) -> None:
"""Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory):
pkg.module.cargo(
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
)
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
def install(
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix

View File

@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
# Meson uses pkg-config for dependency detection, and this dependency is
# often overlooked by packages that use meson as a build system.
depends_on("pkgconfig", type="build")
# Python detection in meson requires distutils to be importable, but distutils no longer
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
# because the distutils-precedence.pth startup file that setuptools ships with is not run

View File

@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh
"""
# Only if environment modifications are desired (default is +envmods)
if "+envmods" in self.spec:
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args

View File

@@ -6,7 +6,6 @@
import codecs
import json
import os
import pathlib
import re
import shutil
import stat
@@ -14,7 +13,7 @@
import tempfile
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set, Union
from typing import Callable, Dict, List, Set
from urllib.request import Request
import llnl.path
@@ -24,6 +23,7 @@
import spack
import spack.binary_distribution as bindist
import spack.builder
import spack.concretize
import spack.config as cfg
import spack.environment as ev
@@ -33,7 +33,6 @@
import spack.paths
import spack.repo
import spack.spec
import spack.store
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
@@ -42,7 +41,6 @@
from spack import traverse
from spack.error import SpackError
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.version import GitVersion, StandardVersion
from .common import (
IS_WINDOWS,
@@ -81,45 +79,6 @@ def get_change_revisions():
return None, None
def get_added_versions(
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
path: str,
from_ref: str = "HEAD~1",
to_ref: str = "HEAD",
) -> List[Union[StandardVersion, GitVersion]]:
"""Get a list of the versions added between `from_ref` and `to_ref`.
Args:
checksums_version_dict (Dict): all package versions keyed by known checksums.
path (str): path to the package.py
from_ref (str): oldest git ref, defaults to `HEAD~1`
to_ref (str): newer git ref, defaults to `HEAD`
Returns: list of versions added between refs
"""
git_exe = spack.util.git.git(required=True)
# Gather git diff
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
# Store added and removed versions
# Removed versions are tracked here to determine when versions are moved in a file
# and show up as both added and removed in a git diff.
added_checksums = set()
removed_checksums = set()
# Scrape diff for modified versions and prune added versions if they show up
# as also removed (which means they've actually just moved in the file and
# we shouldn't need to rechecksum them)
for checksum in checksums_version_dict.keys():
for line in diff_lines:
if checksum in line:
if line.startswith("+"):
added_checksums.add(checksum)
if line.startswith("-"):
removed_checksums.add(checksum)
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
"""Given an environment manifest path and two revisions to compare, return
whether or not the stack was changed. Returns True if the environment
@@ -265,7 +224,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
reason_msg = ", ".join(reasons)
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
if not prune:
status = colorize("@*g{[x]} ")
@@ -622,25 +581,22 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
tty.debug(f"job spec: {job_spec}")
try:
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
except spack.error.SpackError as e:
tty.error(f"Cannot copy logs: {str(e)}")
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
job_pkg = pkg_cls(job_spec)
tty.debug(f"job package: {job_pkg}")
except AssertionError:
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
tty.error(msg)
return
# Get the package's archived files
archive_files = []
archive_root = package_metadata_root / "archived-files"
if archive_root.is_dir():
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
else:
msg = "Cannot copy package archived files: archived-files must be a directory"
tty.warn(msg)
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
build_env_mods = package_metadata_root / "spack-build-env.txt"
for f in [build_log_zipped, build_env_mods, *archive_files]:
copy_files_to_artifacts(str(f), job_log_dir)
stage_dir = job_pkg.stage.path
tty.debug(f"stage dir: {stage_dir}")
for file in [
job_pkg.log_path,
job_pkg.env_mods_path,
*spack.builder.create(job_pkg).archive_files,
]:
copy_files_to_artifacts(file, job_log_dir)
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
@@ -660,7 +616,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def download_and_extract_artifacts(url, work_dir) -> str:
def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
@@ -668,10 +624,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
url (str): Complete url to artifacts.zip file
work_dir (str): Path to destination where artifacts should be extracted
Output:
Artifacts root path relative to the archive root
"""
tty.msg(f"Fetching artifacts from: {url}")
@@ -689,25 +641,13 @@ def download_and_extract_artifacts(url, work_dir) -> str:
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
# Get the artifact root
artifact_root = ""
for f in zip_file.filelist:
if "spack.lock" in f.filename:
artifact_root = os.path.dirname(os.path.dirname(f.filename))
break
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
finally:
try:
os.remove(artifacts_zip_path)
except FileNotFoundError:
# If the file doesn't exist we are already raising
pass
return artifact_root
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
os.remove(artifacts_zip_path)
def get_spack_info():
@@ -821,7 +761,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
return True
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
attempt to setup an environment in which the failure can be reproduced
locally. This entails the following:
@@ -835,11 +775,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commands to run to reproduce the build once inside the container.
"""
work_dir = os.path.realpath(work_dir)
if os.path.exists(work_dir) and os.listdir(work_dir):
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
artifact_root = download_and_extract_artifacts(url, work_dir)
download_and_extract_artifacts(url, work_dir)
gpg_path = None
if gpg_url:
@@ -901,9 +838,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
with open(repro_file, encoding="utf-8") as fd:
repro_details = json.load(fd)
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
repro_dir = os.path.dirname(repro_file)
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
@@ -964,20 +898,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commit_regex = re.compile(r"commit\s+([^\s]+)")
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
if use_local_head:
commit_1 = "HEAD"
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
commit_1 = m.group(1)
setup_result = False
if commit_1:
@@ -1052,8 +983,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
)
# Attempt to create a unique name for the reproducer container
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
docker_command = [
runtime,
"run",
@@ -1061,14 +990,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"-t",
"--rm",
"--name",
f"spack_reproducer{container_suffix}",
"spack_reproducer",
"-v",
":".join([work_dir, mounted_workdir, "Z"]),
"-v",
":".join(
[
os.path.join(work_dir, artifact_root),
os.path.join(mount_as_dir, artifact_root),
os.path.join(work_dir, "jobs_scratch_dir"),
os.path.join(mount_as_dir, "jobs_scratch_dir"),
"Z",
]
),

View File

@@ -330,7 +330,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
if len(matching_specs) <= 1:
return
format_string = "{name}{@version}{ arch=architecture} {%compiler.name}{@compiler.version}"
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
args = ["%s matches multiple packages." % spec, "Matching packages:"]
args += [
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
@@ -471,11 +471,12 @@ def get_arg(name, default=None):
nfmt = "{fullname}" if namespaces else "{name}"
ffmt = ""
if full_compiler or flags:
ffmt += "{compiler_flags} {%compiler.name}"
ffmt += "{%compiler.name}"
if full_compiler:
ffmt += "{@compiler.version}"
ffmt += " {compiler_flags}"
vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + vfmt + ffmt
format_string = nfmt + "{@version}" + ffmt + vfmt
def fmt(s, depth=0):
"""Formatter function for all output specs"""

View File

@@ -4,7 +4,7 @@
import re
import sys
from typing import Dict, Optional, Tuple
from typing import Dict, Optional
import llnl.string
import llnl.util.lang
@@ -181,11 +181,7 @@ def checksum(parser, args):
print()
if args.add_to_package:
path = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = add_versions_to_pkg(path, version_lines)
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
if not args.batch and sys.stdin.isatty():
editor(path)
add_versions_to_package(pkg, version_lines, args.batch)
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
tty.die("Invalid checksums found.")
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
"""Returns a tuple of number of versions added and the package's modified contents."""
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
"""
Add checksumed versions to a package's instructions and open a user's
editor so they may double check the work of the function.
Args:
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
version_lines (str): A string of rendered version lines.
"""
# Get filename and path for package
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = 0
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
if match:
new_versions.append((Version(match.group(1)), ver_line))
split_contents = version_statement_re.split(package_src)
with open(filename, "r+", encoding="utf-8") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
return num_versions_added, "".join(split_contents)
# Seek back to the start of the file so we can rewrite the file contents.
f.seek(0)
f.writelines("".join(split_contents))
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
tty.msg(f"Open {filename} to review the additions.")
def add_versions_to_pkg(path: str, version_lines: str) -> int:
"""Add new versions to a package.py file. Returns the number of versions added."""
with open(path, "r", encoding="utf-8") as f:
package_src = f.read()
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
if num_versions_added > 0:
with open(path, "w", encoding="utf-8") as f:
f.write(package_src)
return num_versions_added
if sys.stdout.isatty() and not is_batch:
editor(filename)

View File

@@ -4,15 +4,12 @@
import json
import os
import re
import shutil
import sys
from typing import Dict
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util import tty
import spack.binary_distribution as bindist
import spack.ci as spack_ci
@@ -21,22 +18,12 @@
import spack.cmd.common.arguments
import spack.config as cfg
import spack.environment as ev
import spack.error
import spack.fetch_strategy
import spack.hash_types as ht
import spack.mirrors.mirror
import spack.package_base
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.executable
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
import spack.version
description = "manage continuous integration pipelines"
section = "build"
@@ -45,7 +32,6 @@
SPACK_COMMAND = "spack"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
BUILTIN = re.compile(r"var\/spack\/repos\/builtin\/packages\/([^\/]+)\/package\.py")
def deindent(desc):
@@ -190,11 +176,6 @@ def setup_parser(subparser):
reproduce.add_argument(
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
)
reproduce.add_argument(
"--use-local-head",
help="Use the HEAD of the local Spack instead of reproducing a commit",
action="store_true",
)
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
gpg_group.add_argument(
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
@@ -205,16 +186,6 @@ def setup_parser(subparser):
reproduce.set_defaults(func=ci_reproduce)
# Verify checksums inside of ci workflows
verify_versions = subparsers.add_parser(
"verify-versions",
description=deindent(ci_verify_versions.__doc__),
help=spack.cmd.first_line(ci_verify_versions.__doc__),
)
verify_versions.add_argument("from_ref", help="git ref from which start looking at changes")
verify_versions.add_argument("to_ref", help="git ref to end looking at changes")
verify_versions.set_defaults(func=ci_verify_versions)
def ci_generate(args):
"""generate jobs file from a CI-aware spack file
@@ -451,7 +422,7 @@ def ci_rebuild(args):
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--only=package"]
root_install_args = install_args + ["--keep-stage", "--only=package"]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
@@ -488,7 +459,8 @@ def ci_rebuild(args):
job_spec.to_dict(hash=ht.dag_hash),
)
# Copy logs and archived files from the install metadata (.spack) directory to artifacts now
# We generated the "spack install ..." command to "--keep-stage", copy
# any logs from the staging directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# If the installation succeeded and we're running stand-alone tests for
@@ -636,12 +608,7 @@ def ci_reproduce(args):
gpg_key_url = None
return spack_ci.reproduce_ci_job(
args.job_url,
args.working_dir,
args.autostart,
gpg_key_url,
args.runtime,
args.use_local_head,
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
)
@@ -683,159 +650,6 @@ def _gitlab_artifacts_url(url: str) -> str:
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
def validate_standard_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the checksum of a package version based on a tarball.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version checksum
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
url_dict: Dict[spack.version.StandardVersion, str] = {}
for version in versions:
url = pkg.find_valid_url_for_version(version)
url_dict[version] = url
version_hashes = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, fetch_options=pkg.fetch_options
)
valid_checksums = True
for version, sha in version_hashes.items():
if sha != pkg.versions[version]["sha256"]:
tty.error(
f"Invalid checksum found {pkg.name}@{version}\n"
f" [package.py] {pkg.versions[version]['sha256']}\n"
f" [Downloaded] {sha}"
)
valid_checksums = False
continue
tty.info(f"Validated {pkg.name}@{version} --> {sha}")
return valid_checksums
def validate_git_versions(
pkg: spack.package_base.PackageBase, versions: spack.version.VersionList
) -> bool:
"""Get and test the commit and tag of a package version based on a git repository.
Args:
pkg spack.package_base.PackageBase: Spack package for which to validate a version
versions spack.version.VersionList: list of package versions to validate
Returns: bool: result of the validation. True is valid and false is failed.
"""
valid_commit = True
for version in versions:
fetcher = spack.fetch_strategy.for_package_version(pkg, version)
with spack.stage.Stage(fetcher) as stage:
known_commit = pkg.versions[version]["commit"]
try:
stage.fetch()
except spack.error.FetchError:
tty.error(
f"Invalid commit for {pkg.name}@{version}\n"
f" {known_commit} could not be checked out in the git repository."
)
valid_commit = False
continue
# Test if the specified tag matches the commit in the package.py
# We retrieve the commit associated with a tag and compare it to the
# commit that is located in the package.py file.
if "tag" in pkg.versions[version]:
tag = pkg.versions[version]["tag"]
try:
with fs.working_dir(stage.source_path):
found_commit = fetcher.git(
"rev-list", "-n", "1", tag, output=str, error=str
).strip()
except spack.util.executable.ProcessError:
tty.error(
f"Invalid tag for {pkg.name}@{version}\n"
f" {tag} could not be found in the git repository."
)
valid_commit = False
continue
if found_commit != known_commit:
tty.error(
f"Mismatched tag <-> commit found for {pkg.name}@{version}\n"
f" [package.py] {known_commit}\n"
f" [Downloaded] {found_commit}"
)
valid_commit = False
continue
# If we have downloaded the repository, found the commit, and compared
# the tag (if specified) we can conclude that the version is pointing
# at what we would expect.
tty.info(f"Validated {pkg.name}@{version} --> {known_commit}")
return valid_commit
def ci_verify_versions(args):
"""validate version checksum & commits between git refs
This command takes a from_ref and to_ref arguments and
then parses the git diff between the two to determine which packages
have been modified verifies the new checksums inside of them.
"""
with fs.working_dir(spack.paths.prefix):
# We use HEAD^1 explicitly on the merge commit created by
# GitHub Actions. However HEAD~1 is a safer default for the helper function.
files = spack.util.git.get_modified_files(from_ref=args.from_ref, to_ref=args.to_ref)
# Get a list of package names from the modified files.
pkgs = [(m.group(1), p) for p in files for m in [BUILTIN.search(p)] if m]
failed_version = False
for pkg_name, path in pkgs:
spec = spack.spec.Spec(pkg_name)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
# Skip checking manual download packages and trust the maintainers
if pkg.manual_download:
tty.warn(f"Skipping manual download package: {pkg_name}")
continue
# Store versions checksums / commits for future loop
checksums_version_dict = {}
commits_version_dict = {}
for version in pkg.versions:
# If the package version defines a sha256 we'll use that as the high entropy
# string to detect which versions have been added between from_ref and to_ref
if "sha256" in pkg.versions[version]:
checksums_version_dict[pkg.versions[version]["sha256"]] = version
# If a package version instead defines a commit we'll use that as a
# high entropy string to detect new versions.
elif "commit" in pkg.versions[version]:
commits_version_dict[pkg.versions[version]["commit"]] = version
# TODO: enforce every version have a commit or a sha256 defined if not
# an infinite version (there are a lot of package's where this doesn't work yet.)
with fs.working_dir(spack.paths.prefix):
added_checksums = spack_ci.get_added_versions(
checksums_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
added_commits = spack_ci.get_added_versions(
commits_version_dict, path, from_ref=args.from_ref, to_ref=args.to_ref
)
if added_checksums:
failed_version = not validate_standard_versions(pkg, added_checksums) or failed_version
if added_commits:
failed_version = not validate_git_versions(pkg, added_commits) or failed_version
if failed_version:
sys.exit(1)
def ci(parser, args):
if args.func:
return args.func(args)

View File

@@ -528,6 +528,7 @@ def __call__(self, parser, namespace, values, option_string):
# the const from the constructor or a value from the CLI.
# Note that this is only called if the argument is actually
# specified on the command line.
spack.config.CONFIG.ensure_scope_ordering()
spack.config.set(self.config_path, self.const, scope="command_line")

View File

@@ -350,12 +350,9 @@ def _config_change(config_path, match_spec_str=None):
if spack.config.get(key_path, scope=scope):
ideal_scope_to_modify = scope
break
# If we find our key in a specific scope, that's the one we want
# to modify. Otherwise we use the default write scope.
write_scope = ideal_scope_to_modify or spack.config.default_modify_scope()
update_path = f"{key_path}:[{str(spec)}]"
spack.config.add(update_path, scope=write_scope)
spack.config.add(update_path, scope=ideal_scope_to_modify)
else:
raise ValueError("'config change' can currently only change 'require' sections")

View File

@@ -2,11 +2,23 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import re
import sys
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.paths
import spack.platforms
import spack.spec
import spack.store
import spack.util.git
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack"
section = "developer"
@@ -15,9 +27,63 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
sp.add_parser("report", help="print information useful for bug reports")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = spack.util.git.git()
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.paths.prefix):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
with working_dir(wd):
files = [spack.store.STORE.db._index_path]
files += glob("%s/*/*/*/.spack/spec.json" % base)
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
def report(args):
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
@@ -29,5 +95,5 @@ def report(args):
def debug(parser, args):
if args.debug_command == "report":
report(args)
action = {"create-db-tarball": create_db_tarball, "report": report}
action[args.debug_command](args)

View File

@@ -55,7 +55,7 @@ def dependencies(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
format_string = "{name}{@version}{/hash:7}{%compiler}"
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
deps = spack.store.STORE.db.installed_relatives(

View File

@@ -93,7 +93,7 @@ def dependents(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
format_string = "{name}{@version}{/hash:7}{%compiler}"
format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependents of %s" % spec.cformat(format_string))
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)

View File

@@ -3,13 +3,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
from typing import Optional
import llnl.util.tty as tty
import spack.cmd
import spack.config
import spack.environment
import spack.fetch_strategy
import spack.repo
import spack.spec
@@ -33,33 +31,37 @@ def setup_parser(subparser):
"--no-clone",
action="store_false",
dest="clone",
default=None,
help="do not clone, the package already exists at the source path",
)
clone_group.add_argument(
"--clone",
action="store_true",
dest="clone",
default=True,
help=(
"(default) clone the package unless the path already exists, "
"use --force to overwrite"
),
default=None,
help="clone the package even if the path already exists",
)
subparser.add_argument(
"-f", "--force", help="remove any files or directories that block cloning source code"
)
subparser.add_argument(
"-r",
"--recursive",
action="store_true",
help="traverse nodes of the graph to mark everything up to the root as a develop spec",
)
arguments.add_common_arguments(subparser, ["spec"])
def _update_config(spec, path):
find_fn = lambda section: spec.name in section
entry = {"spec": str(spec)}
if path != spec.name:
entry["path"] = path
def change_fn(section):
section[spec.name] = entry
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
@@ -81,43 +83,44 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
package.stage.steal_source(abspath)
def assure_concrete_spec(env: spack.environment.Environment, spec: spack.spec.Spec):
version = spec.versions.concrete_range_as_version
if not version:
# first check environment for a matching concrete spec
matching_specs = env.all_matching_specs(spec)
if matching_specs:
version = matching_specs[0].version
test_spec = spack.spec.Spec(f"{spec}@{version}")
for m_spec in matching_specs:
if not m_spec.satisfies(test_spec):
raise SpackError(
f"{spec.name}: has multiple concrete instances in the graph that can't be"
" satisified by a single develop spec. To use `spack develop` ensure one"
" of the following:"
f"\n a) {spec.name} nodes can satisfy the same develop spec (minimally "
"this means they all share the same version)"
f"\n b) Provide a concrete develop spec ({spec.name}@[version]) to clearly"
" indicate what should be developed"
)
else:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])
def develop(parser, args):
# Note: we could put develop specs in any scope, but I assume
# users would only ever want to do this for either (a) an active
# env or (b) a specified config file (e.g. that is included by
# an environment)
# TODO: when https://github.com/spack/spack/pull/35307 is merged,
# an active env is not required if a scope is specified
env = spack.cmd.require_active_env(cmd_name="develop")
if not args.spec:
if args.clone is False:
raise SpackError("No spec provided to spack develop command")
# download all dev specs
for name, entry in env.dev_specs.items():
path = entry.get("path", name)
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, force: bool = False):
"""
Handle checking, cloning or overwriting source code
"""
assert spec.versions
if os.path.exists(abspath):
msg = "Skipping developer download of %s" % entry["spec"]
msg += " because its path already exists."
tty.msg(msg)
continue
if clone:
_clone(spec, src_path, force)
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
# are currently supported.
spec = spack.spec.parse_with_version_concrete(entry["spec"])
_retrieve_develop_source(spec, abspath)
if not clone and not os.path.exists(src_path):
raise SpackError(f"Provided path {src_path} does not exist")
if not env.dev_specs:
tty.warn("No develop specs to download")
return
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
raise SpackError("spack develop requires at most one named spec")
spec = specs[0]
version = spec.versions.concrete_range_as_version
if not version:
@@ -126,114 +129,40 @@ def setup_src_code(spec: spack.spec.Spec, src_path: str, clone: bool = True, for
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])
# If user does not specify --path, we choose to create a directory in the
# active environment's directory, named after the spec
path = args.path or spec.name
if not os.path.isabs(path):
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
else:
abspath = path
def _update_config(spec, path):
find_fn = lambda section: spec.name in section
# clone default: only if the path doesn't exist
clone = args.clone
if clone is None:
clone = not os.path.exists(abspath)
entry = {"spec": str(spec)}
if path and path != spec.name:
entry["path"] = path
if not clone and not os.path.exists(abspath):
raise SpackError("Provided path %s does not exist" % abspath)
def change_fn(section):
section[spec.name] = entry
if clone:
if os.path.exists(abspath):
if args.force:
shutil.rmtree(abspath)
else:
msg = "Path %s already exists and cannot be cloned to." % abspath
msg += " Use `spack develop -f` to overwrite."
raise SpackError(msg)
spack.config.change_or_add("develop", find_fn, change_fn)
def update_env(
env: spack.environment.Environment,
spec: spack.spec.Spec,
specified_path: Optional[str] = None,
build_dir: Optional[str] = None,
):
"""
Update the spack.yaml file with additions or changes from a develop call
"""
tty.debug(f"Updating develop config for {env.name} transactionally")
if not specified_path:
dev_entry = env.dev_specs.get(spec.name)
if dev_entry:
specified_path = dev_entry.get("path", None)
_retrieve_develop_source(spec, abspath)
tty.debug("Updating develop config for {0} transactionally".format(env.name))
with env.write_transaction():
if build_dir is not None:
if args.build_directory is not None:
spack.config.add(
f"packages:{spec.name}:package_attributes:build_directory:{build_dir}",
"packages:{}:package_attributes:build_directory:{}".format(
spec.name, args.build_directory
),
env.scope_name,
)
# add develop spec and update path
_update_config(spec, specified_path)
def _clone(spec: spack.spec.Spec, abspath: str, force: bool = False):
if os.path.exists(abspath):
if force:
shutil.rmtree(abspath)
else:
msg = f"Skipping developer download of {spec.name}"
msg += f" because its path {abspath} already exists."
tty.msg(msg)
return
# cloning can take a while and it's nice to get a message for the longer clones
tty.msg(f"Cloning source code for {spec}")
_retrieve_develop_source(spec, abspath)
def _abs_code_path(
env: spack.environment.Environment, spec: spack.spec.Spec, path: Optional[str] = None
):
src_path = path if path else spec.name
return spack.util.path.canonicalize_path(src_path, default_wd=env.path)
def _dev_spec_generator(args, env):
"""
Generator function to loop over all the develop specs based on how the command is called
If no specs are supplied then loop over the develop specs listed in the environment.
"""
if not args.spec:
if args.clone is False:
raise SpackError("No spec provided to spack develop command")
for name, entry in env.dev_specs.items():
path = entry.get("path", name)
abspath = spack.util.path.canonicalize_path(path, default_wd=env.path)
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
# are currently supported.
spec = spack.spec.parse_with_version_concrete(entry["spec"])
yield spec, abspath
else:
specs = spack.cmd.parse_specs(args.spec)
if (args.path or args.build_directory) and len(specs) > 1:
raise SpackError(
"spack develop requires at most one named spec when using the --path or"
" --build-directory arguments"
)
for spec in specs:
if args.recursive:
concrete_specs = env.all_matching_specs(spec)
if not concrete_specs:
tty.warn(
f"{spec.name} has no matching concrete specs in the environment and "
"will be skipped. `spack develop --recursive` requires a concretized"
" environment"
)
else:
for s in concrete_specs:
for node_spec in s.traverse(direction="parents", root=True):
tty.debug(f"Recursive develop for {node_spec.name}")
yield node_spec, _abs_code_path(env, node_spec, args.path)
else:
yield spec, _abs_code_path(env, spec, args.path)
def develop(parser, args):
env = spack.cmd.require_active_env(cmd_name="develop")
for spec, abspath in _dev_spec_generator(args, env):
assure_concrete_spec(env, spec)
setup_src_code(spec, abspath, clone=args.clone, force=args.force)
update_env(env, spec, args.path, args.build_directory)
_update_config(spec, path)

View File

@@ -73,7 +73,7 @@
boxlib @B{dim=2} boxlib built for 2 dimensions
libdwarf @g{%intel} ^libelf@g{%gcc}
libdwarf, built with intel compiler, linked to libelf built with gcc
mvapich2 @B{fabrics=psm,mrail,sock} @g{%gcc}
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
mvapich2, built with gcc compiler, with support for multiple fabrics
"""

View File

@@ -383,10 +383,8 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
query = " ".join(str(s) for s in args.constraint_specs)
msg = f"the constraint '{query}' matches multiple packages:\n"
for s in specs:
spec_fmt = (
"{hash:7} {name}{@version}{compiler_flags}{variants}"
"{arch=architecture} {%compiler}"
)
spec_fmt = "{hash:7} {name}{@version}{%compiler}"
spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
msg += "\t" + s.cformat(spec_fmt) + "\n"
tty.die(msg, "In this context exactly *one* match is needed.")

View File

@@ -6,9 +6,8 @@
import os
import re
import sys
import warnings
from itertools import islice, zip_longest
from typing import Callable, Dict, List, Optional
from typing import Dict, List, Optional
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -17,9 +16,6 @@
import spack.paths
import spack.repo
import spack.util.git
import spack.util.spack_yaml
from spack.spec_parser import SPEC_TOKENIZER, SpecTokens
from spack.tokenize import Token
from spack.util.executable import Executable, which
description = "runs source code style checks on spack"
@@ -202,13 +198,6 @@ def setup_parser(subparser):
action="append",
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
)
subparser.add_argument(
"--spec-strings",
action="store_true",
help="upgrade spec strings in Python, JSON and YAML files for compatibility with Spack "
"v1.0 and v0.x. Example: spack style --spec-strings $(git ls-files). Note: this flag "
"will be removed in Spack v1.0.",
)
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
@@ -518,196 +507,7 @@ def _bootstrap_dev_dependencies():
spack.bootstrap.ensure_environment_dependencies()
IS_PROBABLY_COMPILER = re.compile(r"%[a-zA-Z_][a-zA-Z0-9\-]")
def _spec_str_reorder_compiler(idx: int, blocks: List[List[Token]]) -> None:
# only move the compiler to the back if it exists and is not already at the end
if not 0 <= idx < len(blocks) - 1:
return
# if there's only whitespace after the compiler, don't move it
if all(token.kind == SpecTokens.WS for block in blocks[idx + 1 :] for token in block):
return
# rotate left and always add at least one WS token between compiler and previous token
compiler_block = blocks.pop(idx)
if compiler_block[0].kind != SpecTokens.WS:
compiler_block.insert(0, Token(SpecTokens.WS, " "))
# delete the WS tokens from the new first block if it was at the very start, to prevent leading
# WS tokens.
while idx == 0 and blocks[0][0].kind == SpecTokens.WS:
blocks[0].pop(0)
blocks.append(compiler_block)
def _spec_str_format(spec_str: str) -> Optional[str]:
"""Given any string, try to parse as spec string, and rotate the compiler token to the end
of each spec instance. Returns the formatted string if it was changed, otherwise None."""
# We parse blocks of tokens that include leading whitespace, and move the compiler block to
# the end when we hit a dependency ^... or the end of a string.
# [@3.1][ +foo][ +bar][ %gcc@3.1][ +baz]
# [@3.1][ +foo][ +bar][ +baz][ %gcc@3.1]
current_block: List[Token] = []
blocks: List[List[Token]] = []
compiler_block_idx = -1
in_edge_attr = False
for token in SPEC_TOKENIZER.tokenize(spec_str):
if token.kind == SpecTokens.UNEXPECTED:
# parsing error, we cannot fix this string.
return None
elif token.kind in (SpecTokens.COMPILER, SpecTokens.COMPILER_AND_VERSION):
# multiple compilers are not supported in Spack v0.x, so early return
if compiler_block_idx != -1:
return None
current_block.append(token)
blocks.append(current_block)
current_block = []
compiler_block_idx = len(blocks) - 1
elif token.kind in (
SpecTokens.START_EDGE_PROPERTIES,
SpecTokens.DEPENDENCY,
SpecTokens.UNQUALIFIED_PACKAGE_NAME,
SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME,
):
_spec_str_reorder_compiler(compiler_block_idx, blocks)
compiler_block_idx = -1
if token.kind == SpecTokens.START_EDGE_PROPERTIES:
in_edge_attr = True
current_block.append(token)
blocks.append(current_block)
current_block = []
elif token.kind == SpecTokens.END_EDGE_PROPERTIES:
in_edge_attr = False
current_block.append(token)
blocks.append(current_block)
current_block = []
elif in_edge_attr:
current_block.append(token)
elif token.kind in (
SpecTokens.VERSION_HASH_PAIR,
SpecTokens.GIT_VERSION,
SpecTokens.VERSION,
SpecTokens.PROPAGATED_BOOL_VARIANT,
SpecTokens.BOOL_VARIANT,
SpecTokens.PROPAGATED_KEY_VALUE_PAIR,
SpecTokens.KEY_VALUE_PAIR,
SpecTokens.DAG_HASH,
):
current_block.append(token)
blocks.append(current_block)
current_block = []
elif token.kind == SpecTokens.WS:
current_block.append(token)
else:
raise ValueError(f"unexpected token {token}")
if current_block:
blocks.append(current_block)
_spec_str_reorder_compiler(compiler_block_idx, blocks)
new_spec_str = "".join(token.value for block in blocks for token in block)
return new_spec_str if spec_str != new_spec_str else None
SpecStrHandler = Callable[[str, int, int, str, str], None]
def _spec_str_default_handler(path: str, line: int, col: int, old: str, new: str):
"""A SpecStrHandler that prints formatted spec strings and their locations."""
print(f"{path}:{line}:{col}: `{old}` -> `{new}`")
def _spec_str_fix_handler(path: str, line: int, col: int, old: str, new: str):
"""A SpecStrHandler that updates formatted spec strings in files."""
with open(path, "r", encoding="utf-8") as f:
lines = f.readlines()
new_line = lines[line - 1].replace(old, new)
if new_line == lines[line - 1]:
tty.warn(f"{path}:{line}:{col}: could not apply fix: `{old}` -> `{new}`")
return
lines[line - 1] = new_line
print(f"{path}:{line}:{col}: fixed `{old}` -> `{new}`")
with open(path, "w", encoding="utf-8") as f:
f.writelines(lines)
def _spec_str_ast(path: str, tree: ast.AST, handler: SpecStrHandler) -> None:
"""Walk the AST of a Python file and apply handler to formatted spec strings."""
has_constant = sys.version_info >= (3, 8)
for node in ast.walk(tree):
if has_constant and isinstance(node, ast.Constant) and isinstance(node.value, str):
current_str = node.value
elif not has_constant and isinstance(node, ast.Str):
current_str = node.s
else:
continue
if not IS_PROBABLY_COMPILER.search(current_str):
continue
new = _spec_str_format(current_str)
if new is not None:
handler(path, node.lineno, node.col_offset, current_str, new)
def _spec_str_json_and_yaml(path: str, data: dict, handler: SpecStrHandler) -> None:
"""Walk a YAML or JSON data structure and apply handler to formatted spec strings."""
queue = [data]
seen = set()
while queue:
current = queue.pop(0)
if id(current) in seen:
continue
seen.add(id(current))
if isinstance(current, dict):
queue.extend(current.values())
queue.extend(current.keys())
elif isinstance(current, list):
queue.extend(current)
elif isinstance(current, str) and IS_PROBABLY_COMPILER.search(current):
new = _spec_str_format(current)
if new is not None:
mark = getattr(current, "_start_mark", None)
if mark:
line, col = mark.line + 1, mark.column + 1
else:
line, col = 0, 0
handler(path, line, col, current, new)
def _check_spec_strings(
paths: List[str], handler: SpecStrHandler = _spec_str_default_handler
) -> None:
"""Open Python, JSON and YAML files, and format their string literals that look like spec
strings. A handler is called for each formatting, which can be used to print or apply fixes."""
for path in paths:
is_json_or_yaml = path.endswith(".json") or path.endswith(".yaml") or path.endswith(".yml")
is_python = path.endswith(".py")
if not is_json_or_yaml and not is_python:
continue
try:
with open(path, "r", encoding="utf-8") as f:
# skip files that are likely too large to be user code or config
if os.fstat(f.fileno()).st_size > 1024 * 1024:
warnings.warn(f"skipping {path}: too large.")
continue
if is_json_or_yaml:
_spec_str_json_and_yaml(path, spack.util.spack_yaml.load_config(f), handler)
elif is_python:
_spec_str_ast(path, ast.parse(f.read()), handler)
except (OSError, spack.util.spack_yaml.SpackYAMLError, SyntaxError, ValueError):
warnings.warn(f"skipping {path}")
continue
def style(parser, args):
if args.spec_strings:
if not args.files:
tty.die("No files provided to check spec strings.")
handler = _spec_str_fix_handler if args.fix else _spec_str_default_handler
return _check_spec_strings(args.files, handler)
# save initial working directory for relativizing paths later
args.initial_working_dir = os.getcwd()

View File

@@ -17,7 +17,6 @@
pytest = None # type: ignore
import llnl.util.filesystem
import llnl.util.tty as tty
import llnl.util.tty.color as color
from llnl.util.tty.colify import colify
@@ -217,7 +216,7 @@ def unit_test(parser, args, unknown_args):
# Ensure clingo is available before switching to the
# mock configuration used by unit tests
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
spack.bootstrap.ensure_core_dependencies()
if pytest is None:
spack.bootstrap.ensure_environment_dependencies()
import pytest
@@ -237,12 +236,6 @@ def unit_test(parser, args, unknown_args):
pytest_root = spack.extensions.load_extension(args.extension)
if args.numprocesses is not None and args.numprocesses > 1:
try:
import xdist # noqa: F401
except ImportError:
tty.error("parallel unit-test requires pytest-xdist module")
return 1
pytest_args.extend(
[
"--dist",

View File

@@ -2,48 +2,35 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import io
from typing import List, Optional
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import visit_directory_tree
import spack.cmd
import spack.environment as ev
import spack.spec
import spack.store
import spack.verify
import spack.verify_libraries
from spack.cmd.common import arguments
description = "verify spack installations on disk"
description = "check that all spack packages are on disk as installed"
section = "admin"
level = "long"
MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None
def setup_parser(subparser):
setup_parser.parser = subparser
def setup_parser(subparser: argparse.ArgumentParser):
global MANIFEST_SUBPARSER
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command")
MANIFEST_SUBPARSER = sp.add_parser(
"manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__
)
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument(
"-l", "--local", action="store_true", help="verify only locally installed packages"
)
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument(
"-j", "--json", action="store_true", help="ouptut json-formatted errors"
)
MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages")
MANIFEST_SUBPARSER.add_argument(
subparser.add_argument("-a", "--all", action="store_true", help="verify all packages")
subparser.add_argument(
"specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify"
)
manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group()
manifest_sp_type.add_argument(
type = subparser.add_mutually_exclusive_group()
type.add_argument(
"-s",
"--specs",
action="store_const",
@@ -52,7 +39,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
default="specs",
help="treat entries as specs (default)",
)
manifest_sp_type.add_argument(
type.add_argument(
"-f",
"--files",
action="store_const",
@@ -62,67 +49,14 @@ def setup_parser(subparser: argparse.ArgumentParser):
help="treat entries as absolute filenames\n\ncannot be used with '-a'",
)
libraries_subparser = sp.add_parser(
"libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__
)
arguments.add_common_arguments(libraries_subparser, ["constraint"])
def verify(parser, args):
cmd = args.verify_command
if cmd == "libraries":
return verify_libraries(args)
elif cmd == "manifest":
return verify_manifest(args)
parser.error("invalid verify subcommand")
def verify_libraries(args):
"""verify that shared libraries of install packages can be located in rpaths (Linux only)"""
specs_from_db = [s for s in args.specs(installed=True) if not s.external]
tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution")
errors = 0
for spec in specs_from_db:
try:
pkg = spec.package
except Exception:
tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package")
error_msg = _verify_libraries(spec, pkg.unresolved_libraries)
if error_msg is not None:
errors += 1
tty.error(error_msg)
if errors:
tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}")
return 1
def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]:
"""Go over the prefix of the installed spec and verify its shared libraries can be resolved."""
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
if not visitor.problems:
return None
output = io.StringIO()
visitor.write(output, indent=4, brief=True)
message = output.getvalue().rstrip()
return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}"
def verify_manifest(args):
"""verify that install directories have not been modified since installation"""
local = args.local
if args.type == "files":
if args.all:
MANIFEST_SUBPARSER.error("cannot use --all with --files")
setup_parser.parser.print_help()
return 1
for file in args.specs_or_files:
results = spack.verify.check_file_manifest(file)
@@ -153,7 +87,8 @@ def verify_manifest(args):
env = ev.active_environment()
specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
else:
MANIFEST_SUBPARSER.error("use --all or specify specs to verify")
setup_parser.parser.print_help()
return 1
for spec in specs:
tty.debug("Verifying package %s")

View File

@@ -32,10 +32,9 @@
import copy
import functools
import os
import os.path
import re
import sys
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Tuple, Union
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
import jsonschema
@@ -43,6 +42,7 @@
import spack.error
import spack.paths
import spack.platforms
import spack.schema
import spack.schema.bootstrap
import spack.schema.cdash
@@ -54,20 +54,17 @@
import spack.schema.develop
import spack.schema.env
import spack.schema.env_vars
import spack.schema.include
import spack.schema.merged
import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
import spack.schema.repos
import spack.schema.upstreams
import spack.schema.view
import spack.util.remote_file_cache as rfc_util
import spack.util.spack_yaml as syaml
from spack.util.cpus import cpus_available
from spack.util.spack_yaml import get_mark_from_yaml_data
from .enums import ConfigScopePriority
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
import spack.util.web as web_util
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
SECTION_SCHEMAS: Dict[str, Any] = {
@@ -75,7 +72,6 @@
"concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema,
"env_vars": spack.schema.env_vars.schema,
"include": spack.schema.include.schema,
"view": spack.schema.view.schema,
"develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema,
@@ -123,17 +119,6 @@
#: Type used for raw YAML configuration
YamlConfigDict = Dict[str, Any]
#: prefix for name of included configuration scopes
INCLUDE_SCOPE_PREFIX = "include"
#: safeguard for recursive includes -- maximum include depth
MAX_RECURSIVE_INCLUDES = 100
def _include_cache_location():
"""Location to cache included configuration files."""
return os.path.join(spack.paths.user_cache_path, "includes")
class ConfigScope:
def __init__(self, name: str) -> None:
@@ -141,25 +126,6 @@ def __init__(self, name: str) -> None:
self.writable = False
self.sections = syaml.syaml_dict()
#: names of any included scopes
self._included_scopes: Optional[List["ConfigScope"]] = None
@property
def included_scopes(self) -> List["ConfigScope"]:
"""Memoized list of included scopes, in the order they appear in this scope."""
if self._included_scopes is None:
self._included_scopes = []
includes = self.get_section("include")
if includes:
include_paths = [included_path(data) for data in includes["include"]]
for path in include_paths:
included_scope = include_path_scope(path)
if included_scope:
self._included_scopes.append(included_scope)
return self._included_scopes
def get_section_filename(self, section: str) -> str:
raise NotImplementedError
@@ -442,18 +408,26 @@ def _method(self, *args, **kwargs):
return _method
ScopeWithOptionalPriority = Union[ConfigScope, Tuple[int, ConfigScope]]
ScopeWithPriority = Tuple[int, ConfigScope]
class Configuration:
"""A hierarchical configuration, merging a number of scopes at different priorities."""
"""A full Spack configuration, from a hierarchy of config files.
This class makes it easy to add a new scope on top of an existing one.
"""
# convert to typing.OrderedDict when we drop 3.6, or OrderedDict when we reach 3.9
scopes: lang.PriorityOrderedMapping[str, ConfigScope]
scopes: Dict[str, ConfigScope]
def __init__(self) -> None:
self.scopes = lang.PriorityOrderedMapping()
def __init__(self, *scopes: ConfigScope) -> None:
"""Initialize a configuration with an initial list of scopes.
Args:
scopes: list of scopes to add to this
Configuration, ordered from lowest to highest precedence
"""
self.scopes = collections.OrderedDict()
for scope in scopes:
self.push_scope(scope)
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
def ensure_unwrapped(self) -> "Configuration":
@@ -461,59 +435,36 @@ def ensure_unwrapped(self) -> "Configuration":
return self
def highest(self) -> ConfigScope:
"""Scope with the highest precedence"""
return next(self.scopes.reversed_values()) # type: ignore
"""Scope with highest precedence"""
return next(reversed(self.scopes.values())) # type: ignore
@_config_mutator
def push_scope(
self, scope: ConfigScope, priority: Optional[int] = None, _depth: int = 0
) -> None:
"""Adds a scope to the Configuration, at a given priority.
def ensure_scope_ordering(self):
"""Ensure that scope order matches documented precedent"""
# FIXME: We also need to consider that custom configurations and other orderings
# may not be preserved correctly
if "command_line" in self.scopes:
# TODO (when dropping python 3.6): self.scopes.move_to_end
self.scopes["command_line"] = self.remove_scope("command_line")
If a priority is not given, it is assumed to be the current highest priority.
@_config_mutator
def push_scope(self, scope: ConfigScope) -> None:
"""Add a higher precedence scope to the Configuration."""
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}", level=2)
self.scopes[scope.name] = scope
Args:
scope: scope to be added
priority: priority of the scope
"""
# TODO: As a follow on to #48784, change this to create a graph of the
# TODO: includes AND ensure properly sorted such that the order included
# TODO: at the highest level is reflected in the value of an option that
# TODO: is set in multiple included files.
# before pushing the scope itself, push any included scopes recursively, at same priority
for included_scope in reversed(scope.included_scopes):
if _depth + 1 > MAX_RECURSIVE_INCLUDES: # make sure we're not recursing endlessly
mark = ""
if hasattr(included_scope, "path") and syaml.marked(included_scope.path):
mark = included_scope.path._start_mark # type: ignore
raise RecursiveIncludeError(
f"Maximum include recursion exceeded in {included_scope.name}", str(mark)
)
# record this inclusion so that remove_scope() can use it
self.push_scope(included_scope, priority=priority, _depth=_depth + 1)
tty.debug(f"[CONFIGURATION: PUSH SCOPE]: {str(scope)}, priority={priority}", level=2)
self.scopes.add(scope.name, value=scope, priority=priority)
@_config_mutator
def pop_scope(self) -> ConfigScope:
"""Remove the highest precedence scope and return it."""
name, scope = self.scopes.popitem(last=True) # type: ignore[call-arg]
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
return scope
@_config_mutator
def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
"""Removes a scope by name, and returns it. If the scope does not exist, returns None."""
try:
scope = self.scopes.remove(scope_name)
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {str(scope)}", level=2)
except KeyError as e:
tty.debug(f"[CONFIGURATION: REMOVE SCOPE]: {e}", level=2)
return None
# transitively remove included scopes
for included_scope in scope.included_scopes:
assert (
included_scope.name in self.scopes
), f"Included scope '{included_scope.name}' was never added to configuration!"
self.remove_scope(included_scope.name)
"""Remove scope by name; has no effect when ``scope_name`` does not exist"""
scope = self.scopes.pop(scope_name, None)
tty.debug(f"[CONFIGURATION: POP SCOPE]: {str(scope)}", level=2)
return scope
@property
@@ -522,13 +473,15 @@ def writable_scopes(self) -> Generator[ConfigScope, None, None]:
return (s for s in self.scopes.values() if s.writable)
def highest_precedence_scope(self) -> ConfigScope:
"""Writable scope with the highest precedence."""
return next(s for s in self.scopes.reversed_values() if s.writable)
"""Writable scope with highest precedence."""
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
def highest_precedence_non_platform_scope(self) -> ConfigScope:
"""Writable non-platform scope with the highest precedence"""
"""Writable non-platform scope with highest precedence"""
return next(
s for s in self.scopes.reversed_values() if s.writable and not s.is_platform_dependent
s
for s in reversed(self.scopes.values()) # type: ignore
if s.writable and not s.is_platform_dependent
)
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
@@ -795,7 +748,7 @@ def override(
"""
if isinstance(path_or_scope, ConfigScope):
overrides = path_or_scope
CONFIG.push_scope(path_or_scope, priority=None)
CONFIG.push_scope(path_or_scope)
else:
base_name = _OVERRIDES_BASE_NAME
# Ensure the new override gets a unique scope name
@@ -809,7 +762,7 @@ def override(
break
overrides = InternalConfigScope(scope_name)
CONFIG.push_scope(overrides, priority=None)
CONFIG.push_scope(overrides)
CONFIG.set(path_or_scope, value, scope=scope_name)
try:
@@ -819,86 +772,13 @@ def override(
assert scope is overrides
def _add_platform_scope(
cfg: Configuration, name: str, path: str, priority: ConfigScopePriority, writable: bool = True
) -> None:
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
"""Add a platform-specific subdirectory for the current platform."""
import spack.platforms # circular dependency
platform = spack.platforms.host().name
scope = DirectoryConfigScope(
f"{name}/{platform}", os.path.join(path, platform), writable=writable
)
cfg.push_scope(scope, priority=priority)
#: Class for the relevance of an optional path conditioned on a limited
#: python code that evaluates to a boolean and or explicit specification
#: as optional.
class IncludePath(NamedTuple):
path: str
when: str
sha256: str
optional: bool
def included_path(entry: Union[str, dict]) -> IncludePath:
"""Convert the included path entry into an IncludePath.
Args:
entry: include configuration entry
Returns: converted entry, where an empty ``when`` means the path is
not conditionally included
"""
if isinstance(entry, str):
return IncludePath(path=entry, sha256="", when="", optional=False)
path = entry["path"]
sha256 = entry.get("sha256", "")
when = entry.get("when", "")
optional = entry.get("optional", False)
return IncludePath(path=path, sha256=sha256, when=when, optional=optional)
def include_path_scope(include: IncludePath) -> Optional[ConfigScope]:
"""Instantiate an appropriate configuration scope for the given path.
Args:
include: optional include path
Returns: configuration scope
Raises:
ValueError: included path has an unsupported URL scheme, is required
but does not exist; configuration stage directory argument is missing
ConfigFileError: unable to access remote configuration file(s)
"""
# circular dependencies
import spack.spec
if (not include.when) or spack.spec.eval_conditional(include.when):
config_path = rfc_util.local_path(include.path, include.sha256, _include_cache_location)
if not config_path:
raise ConfigFileError(f"Unable to fetch remote configuration from {include.path}")
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = f"{INCLUDE_SCOPE_PREFIX}:{os.path.basename(config_path)}"
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
return DirectoryConfigScope(config_name, config_path)
if os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = f"{INCLUDE_SCOPE_PREFIX}:{config_path}"
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
return SingleFileScope(config_name, config_path, spack.schema.merged.schema)
if not include.optional:
path = f" at ({config_path})" if config_path != include.path else ""
raise ValueError(f"Required path ({include.path}) does not exist{path}")
return None
cfg.push_scope(scope)
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
@@ -926,17 +806,18 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
return config_paths
def create_incremental() -> Generator[Configuration, None, None]:
def create() -> Configuration:
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
it. It is bundled inside a function so that configuration can be
initialized lazily.
"""
cfg = Configuration()
# first do the builtin, hardcoded defaults
cfg = create_from(
(ConfigScopePriority.BUILTIN, InternalConfigScope("_builtin", CONFIG_DEFAULTS))
)
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
cfg.push_scope(builtin)
# Builtin paths to configuration files in Spack
configuration_paths = [
@@ -966,29 +847,16 @@ def create_incremental() -> Generator[Configuration, None, None]:
# add each scope and its platform-specific directory
for name, path in configuration_paths:
cfg.push_scope(DirectoryConfigScope(name, path), priority=ConfigScopePriority.CONFIG_FILES)
# Each scope can have per-platform overrides in subdirectories
_add_platform_scope(cfg, name, path, priority=ConfigScopePriority.CONFIG_FILES)
cfg.push_scope(DirectoryConfigScope(name, path))
# yield the config incrementally so that each config level's init code can get
# data from the one below. This can be tricky, but it enables us to have a
# single unified config system.
#
# TODO: think about whether we want to restrict what types of config can be used
# at each level. e.g., we may want to just more forcibly disallow remote
# config (which uses ssl and other config options) for some of the scopes,
# to make the bootstrap issues more explicit, even if allowing config scope
# init to reference lower scopes is more flexible.
yield cfg
# Each scope can have per-platfom overrides in subdirectories
_add_platform_scope(cfg, name, path)
def create() -> Configuration:
"""Create a configuration using create_incremental(), return the last yielded result."""
return list(create_incremental())[-1]
return cfg
#: This is the singleton configuration instance for Spack.
CONFIG: Configuration = lang.Singleton(create_incremental) # type: ignore
CONFIG: Configuration = lang.Singleton(create) # type: ignore
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
@@ -1084,11 +952,10 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
Accepts the path syntax described in ``get()``.
"""
result = CONFIG.set(path, value, scope)
return result
return CONFIG.set(path, value, scope)
def scopes() -> lang.PriorityOrderedMapping[str, ConfigScope]:
def scopes() -> Dict[str, ConfigScope]:
"""Convenience function to get list of configuration scopes."""
return CONFIG.scopes
@@ -1542,7 +1409,7 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
@contextlib.contextmanager
def use_configuration(
*scopes_or_paths: Union[ScopeWithOptionalPriority, str]
*scopes_or_paths: Union[ConfigScope, str]
) -> Generator[Configuration, None, None]:
"""Use the configuration scopes passed as arguments within the context manager.
@@ -1557,7 +1424,7 @@ def use_configuration(
global CONFIG
# Normalize input and construct a Configuration object
configuration = create_from(*scopes_or_paths)
configuration = _config_from(scopes_or_paths)
CONFIG.clear_caches(), configuration.clear_caches()
saved_config, CONFIG = CONFIG, configuration
@@ -1568,44 +1435,137 @@ def use_configuration(
CONFIG = saved_config
def _normalize_input(entry: Union[ScopeWithOptionalPriority, str]) -> ScopeWithPriority:
if isinstance(entry, tuple):
return entry
default_priority = ConfigScopePriority.CONFIG_FILES
if isinstance(entry, ConfigScope):
return default_priority, entry
# Otherwise we need to construct it
path = os.path.normpath(entry)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
return default_priority, DirectoryConfigScope(name, path)
@lang.memoized
def create_from(*scopes_or_paths: Union[ScopeWithOptionalPriority, str]) -> Configuration:
"""Creates a configuration object from the scopes passed in input.
def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuration:
scopes = []
for scope_or_path in scopes_or_paths:
# If we have a config scope we are already done
if isinstance(scope_or_path, ConfigScope):
scopes.append(scope_or_path)
continue
# Otherwise we need to construct it
path = os.path.normpath(scope_or_path)
assert os.path.isdir(path), f'"{path}" must be a directory'
name = os.path.basename(path)
scopes.append(DirectoryConfigScope(name, path))
configuration = Configuration(*scopes)
return configuration
def raw_github_gitlab_url(url: str) -> str:
"""Transform a github URL to the raw form to avoid undesirable html.
Args:
*scopes_or_paths: either a tuple of (priority, ConfigScope), or a ConfigScope, or a string
If priority is not given, it is assumed to be ConfigScopePriority.CONFIG_FILES. If a
string is given, a DirectoryConfigScope is created from it.
url: url to be converted to raw form
Examples:
>>> builtin_scope = InternalConfigScope("_builtin", {"config": {"build_jobs": 1}})
>>> cl_scope = InternalConfigScope("command_line", {"config": {"build_jobs": 10}})
>>> cfg = create_from(
... (ConfigScopePriority.COMMAND_LINE, cl_scope),
... (ConfigScopePriority.BUILTIN, builtin_scope)
... )
Returns:
Raw github/gitlab url or the original url
"""
scopes_with_priority = [_normalize_input(x) for x in scopes_or_paths]
result = Configuration()
for priority, scope in scopes_with_priority:
result.push_scope(scope, priority=priority)
return result
# Note we rely on GitHub to redirect the 'raw' URL returned here to the
# actual URL under https://raw.githubusercontent.com/ with '/blob'
# removed and or, '/blame' if needed.
if "github" in url or "gitlab" in url:
return url.replace("/blob/", "/raw/")
return url
def collect_urls(base_url: str) -> list:
"""Return a list of configuration URLs.
Arguments:
base_url: URL for a configuration (yaml) file or a directory
containing yaml file(s)
Returns:
List of configuration file(s) or empty list if none
"""
if not base_url:
return []
extension = ".yaml"
if base_url.endswith(extension):
return [base_url]
# Collect configuration URLs if the base_url is a "directory".
_, links = web_util.spider(base_url, 0)
return [link for link in links if link.endswith(extension)]
def fetch_remote_configs(url: str, dest_dir: str, skip_existing: bool = True) -> str:
"""Retrieve configuration file(s) at the specified URL.
Arguments:
url: URL for a configuration (yaml) file or a directory containing
yaml file(s)
dest_dir: destination directory
skip_existing: Skip files that already exist in dest_dir if
``True``; otherwise, replace those files
Returns:
Path to the corresponding file if URL is or contains a
single file and it is the only file in the destination directory or
the root (dest_dir) directory if multiple configuration files exist
or are retrieved.
"""
def _fetch_file(url):
raw = raw_github_gitlab_url(url)
tty.debug(f"Reading config from url {raw}")
return web_util.fetch_url_text(raw, dest_dir=dest_dir)
if not url:
raise ConfigFileError("Cannot retrieve configuration without a URL")
# Return the local path to the cached configuration file OR to the
# directory containing the cached configuration files.
config_links = collect_urls(url)
existing_files = os.listdir(dest_dir) if os.path.isdir(dest_dir) else []
paths = []
for config_url in config_links:
basename = os.path.basename(config_url)
if skip_existing and basename in existing_files:
tty.warn(
f"Will not fetch configuration from {config_url} since a "
f"version already exists in {dest_dir}"
)
path = os.path.join(dest_dir, basename)
else:
path = _fetch_file(config_url)
if path:
paths.append(path)
if paths:
return dest_dir if len(paths) > 1 else paths[0]
raise ConfigFileError(f"Cannot retrieve configuration (yaml) from {url}")
def get_mark_from_yaml_data(obj):
"""Try to get ``spack.util.spack_yaml`` mark from YAML data.
We try the object, and if that fails we try its first member (if it's a container).
Returns:
mark if one is found, otherwise None.
"""
# mark of object itelf
mark = getattr(obj, "_start_mark", None)
if mark:
return mark
# mark of first member if it is a container
if isinstance(obj, (list, dict)):
first_member = next(iter(obj), None)
if first_member:
mark = getattr(first_member, "_start_mark", None)
return mark
def determine_number_of_jobs(
@@ -1712,7 +1672,3 @@ def get_path(path, data):
# give up and return None if nothing worked
return None
class RecursiveIncludeError(spack.error.SpackError):
"""Too many levels of recursive includes."""

View File

@@ -1126,7 +1126,7 @@ def _add(
installation_time:
Date and time of installation
allow_missing: if True, don't warn when installation is not found on on disk
This is useful when installing specs without build/test deps.
This is useful when installing specs without build deps.
"""
if not spec.concrete:
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
@@ -1146,8 +1146,10 @@ def _add(
edge.spec,
explicit=False,
installation_time=installation_time,
# allow missing build / test only deps
allow_missing=allow_missing or edge.depflag & (dt.BUILD | dt.TEST) == edge.depflag,
# allow missing build-only deps. This prevents excessive warnings when a spec is
# installed, and its build dep is missing a build dep; there's no need to install
# the build dep's build dep first, and there's no need to warn about it missing.
allow_missing=allow_missing or edge.depflag == dt.BUILD,
)
# Make sure the directory layout agrees whether the spec is installed

View File

@@ -310,7 +310,7 @@ def find_windows_kit_roots() -> List[str]:
@staticmethod
def find_windows_kit_bin_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
@@ -325,7 +325,7 @@ def find_windows_kit_bin_paths(
@staticmethod
def find_windows_kit_lib_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base

View File

@@ -7,7 +7,6 @@
import collections
import concurrent.futures
import os
import pathlib
import re
import sys
import traceback
@@ -16,7 +15,6 @@
import llnl.util.filesystem
import llnl.util.lang
import llnl.util.symlink
import llnl.util.tty
import spack.error
@@ -72,21 +70,13 @@ def dedupe_paths(paths: List[str]) -> List[str]:
"""Deduplicate paths based on inode and device number. In case the list contains first a
symlink and then the directory it points to, the symlink is replaced with the directory path.
This ensures that we pick for example ``/usr/bin`` over ``/bin`` if the latter is a symlink to
the former."""
the former`."""
seen: Dict[Tuple[int, int], str] = {}
linked_parent_check = lambda x: any(
[llnl.util.symlink.islink(str(y)) for y in pathlib.Path(x).parents]
)
for path in paths:
identifier = file_identifier(path)
if identifier not in seen:
seen[identifier] = path
# we also want to deprioritize paths if they contain a symlink in any parent
# (not just the basedir): e.g. oneapi has "latest/bin",
# where "latest" is a symlink to 2025.0"
elif not (llnl.util.symlink.islink(path) or linked_parent_check(path)):
elif not os.path.islink(path):
seen[identifier] = path
return list(seen.values())
@@ -253,7 +243,7 @@ def prefix_from_path(self, *, path: str) -> str:
raise NotImplementedError("must be implemented by derived classes")
def detect_specs(
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str]
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
) -> List["spack.spec.Spec"]:
"""Given a list of files matching the search patterns, returns a list of detected specs.

View File

@@ -462,7 +462,8 @@ def _execute_extends(pkg):
if dep_spec.name == "python" and not pkg.name == "python-venv":
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
pkg.extendees[dep_spec.name] = (dep_spec, when_spec)
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
pkg.extendees[dep_spec.name] = (dep_spec, None)
return _execute_extends
@@ -567,7 +568,7 @@ def patch(
"""
def _execute_patch(
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency],
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
) -> None:
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep

View File

@@ -25,7 +25,7 @@
}
def _check_concrete(spec: "spack.spec.Spec") -> None:
def _check_concrete(spec):
"""If the spec is not concrete, raise a ValueError"""
if not spec.concrete:
raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
@@ -51,7 +51,7 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
spec = _get_spec(prefix)
if spec:
spec.set_prefix(prefix)
spec.prefix = prefix
specs.append(spec)
continue
@@ -84,7 +84,7 @@ class DirectoryLayout:
def __init__(
self,
root: str,
root,
*,
projections: Optional[Dict[str, str]] = None,
hash_length: Optional[int] = None,
@@ -120,17 +120,17 @@ def __init__(
self.manifest_file_name = "install_manifest.json"
@property
def hidden_file_regexes(self) -> Tuple[str]:
def hidden_file_regexes(self):
return ("^{0}$".format(re.escape(self.metadata_dir)),)
def relative_path_for_spec(self, spec: "spack.spec.Spec") -> str:
def relative_path_for_spec(self, spec):
_check_concrete(spec)
projection = spack.projections.get_projection(self.projections, spec)
path = spec.format_path(projection)
return str(Path(path))
def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, "w", encoding="utf-8") as f:
@@ -138,7 +138,7 @@ def write_spec(self, spec: "spack.spec.Spec", path: str) -> None:
# the full provenance, so it's availabe if we want it later
spec.to_json(f, hash=ht.dag_hash)
def write_host_environment(self, spec: "spack.spec.Spec") -> None:
def write_host_environment(self, spec):
"""The host environment is a json file with os, kernel, and spack
versioning. We use it in the case that an analysis later needs to
easily access this information.
@@ -148,7 +148,7 @@ def write_host_environment(self, spec: "spack.spec.Spec") -> None:
with open(env_file, "w", encoding="utf-8") as fd:
sjson.dump(environ, fd)
def read_spec(self, path: str) -> "spack.spec.Spec":
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
try:
with open(path, encoding="utf-8") as f:
@@ -159,28 +159,26 @@ def read_spec(self, path: str) -> "spack.spec.Spec":
# Too late for conversion; spec_file_path() already called.
spec = spack.spec.Spec.from_yaml(f)
else:
raise SpecReadError(f"Did not recognize spec file extension: {extension}")
raise SpecReadError(
"Did not recognize spec file extension:" " {0}".format(extension)
)
except Exception as e:
if spack.config.get("config:debug"):
raise
raise SpecReadError(f"Unable to read file: {path}", f"Cause: {e}")
raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
return spec
def spec_file_path(self, spec: "spack.spec.Spec") -> str:
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
return yaml_path if os.path.exists(yaml_path) else json_path
def deprecated_file_path(
self,
deprecated_spec: "spack.spec.Spec",
deprecator_spec: Optional["spack.spec.Spec"] = None,
) -> str:
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
"""Gets full path to spec file for deprecated spec
If the deprecator_spec is provided, use that. Otherwise, assume
@@ -214,16 +212,16 @@ def deprecated_file_path(
return yaml_path if os.path.exists(yaml_path) else json_path
def metadata_path(self, spec: "spack.spec.Spec") -> str:
def metadata_path(self, spec):
return os.path.join(spec.prefix, self.metadata_dir)
def env_metadata_path(self, spec: "spack.spec.Spec") -> str:
def env_metadata_path(self, spec):
return os.path.join(self.metadata_path(spec), "install_environment.json")
def build_packages_path(self, spec: "spack.spec.Spec") -> str:
def build_packages_path(self, spec):
return os.path.join(self.metadata_path(spec), self.packages_dir)
def create_install_directory(self, spec: "spack.spec.Spec") -> None:
def create_install_directory(self, spec):
_check_concrete(spec)
# Create install directory with properly configured permissions
@@ -241,7 +239,7 @@ def create_install_directory(self, spec: "spack.spec.Spec") -> None:
self.write_spec(spec, self.spec_file_path(spec))
def ensure_installed(self, spec: "spack.spec.Spec") -> None:
def ensure_installed(self, spec):
"""
Throws InconsistentInstallDirectoryError if:
1. spec prefix does not exist
@@ -268,7 +266,7 @@ def ensure_installed(self, spec: "spack.spec.Spec") -> None:
"Spec file in %s does not match hash!" % spec_file_path
)
def path_for_spec(self, spec: "spack.spec.Spec") -> str:
def path_for_spec(self, spec):
"""Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@@ -279,13 +277,23 @@ def path_for_spec(self, spec: "spack.spec.Spec") -> str:
assert not path.startswith(self.root)
return os.path.join(self.root, path)
def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = False) -> None:
def remove_install_directory(self, spec, deprecated=False):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
assert path.startswith(self.root)
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if sys.platform == "win32":
kwargs = {
"ignore_errors": False,
"onerror": fs.readonly_file_handler(ignore_errors=False),
}
else:
kwargs = {} # the default value for ignore_errors is false
if deprecated:
if os.path.exists(path):
try:
@@ -296,16 +304,7 @@ def remove_install_directory(self, spec: "spack.spec.Spec", deprecated: bool = F
raise RemoveFailedError(spec, path, e) from e
elif os.path.exists(path):
try:
if sys.platform == "win32":
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
shutil.rmtree(
path,
ignore_errors=False,
onerror=fs.readonly_file_handler(ignore_errors=False),
)
else:
shutil.rmtree(path)
shutil.rmtree(path, **kwargs)
except OSError as e:
raise RemoveFailedError(spec, path, e) from e

View File

@@ -12,13 +12,3 @@ class InstallRecordStatus(enum.Flag):
DEPRECATED = enum.auto()
MISSING = enum.auto()
ANY = INSTALLED | DEPRECATED | MISSING
class ConfigScopePriority(enum.IntEnum):
"""Priorities of the different kind of config scopes used by Spack"""
BUILTIN = 0
CONFIG_FILES = 1
CUSTOM = 2
ENVIRONMENT = 3
COMMAND_LINE = 4

View File

@@ -166,7 +166,7 @@ def __init__(
" ".join(self._install_target(s.safe_name()) for s in item.prereqs),
item.target.spec_hash(),
item.target.unsafe_format(
"{name}{@version}{variants}{ arch=architecture} {%compiler}"
"{name}{@version}{%compiler}{variants}{arch=architecture}"
),
item.buildcache_flag,
)

View File

@@ -10,6 +10,8 @@
import re
import shutil
import stat
import urllib.parse
import urllib.request
import warnings
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
@@ -30,6 +32,7 @@
import spack.paths
import spack.repo
import spack.schema.env
import spack.schema.merged
import spack.spec
import spack.spec_list
import spack.store
@@ -40,6 +43,7 @@
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url
from spack import traverse
from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY
@@ -47,8 +51,6 @@
from spack.spec_list import SpecList
from spack.util.path import substitute_path_variables
from ..enums import ConfigScopePriority
SpecPair = spack.concretize.SpecPair
#: environment variable used to indicate the active environment
@@ -385,7 +387,6 @@ def create_in_dir(
# dev paths in this environment to refer to their original
# locations.
_rewrite_relative_dev_paths_on_relocation(env, init_file_dir)
_rewrite_relative_repos_paths_on_relocation(env, init_file_dir)
return env
@@ -402,8 +403,8 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
dev_path = substitute_path_variables(entry["path"])
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute)
if entry["path"] == expanded_path:
# Skip if the expanded path is the same (e.g. when absolute)
if dev_path == expanded_path:
continue
tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
@@ -418,34 +419,6 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
env._re_read()
def _rewrite_relative_repos_paths_on_relocation(env, init_file_dir):
"""When initializing the environment from a manifest file and we plan
to store the environment in a different directory, we have to rewrite
relative repo paths to absolute ones and expand environment variables."""
with env:
repos_specs = spack.config.get("repos", default={}, scope=env.scope_name)
if not repos_specs:
return
for i, entry in enumerate(repos_specs):
repo_path = substitute_path_variables(entry)
expanded_path = spack.util.path.canonicalize_path(repo_path, default_wd=init_file_dir)
# Skip if the substituted and expanded path is the same (e.g. when absolute)
if entry == expanded_path:
continue
tty.debug("Expanding repo path for {0} to {1}".format(entry, expanded_path))
repos_specs[i] = expanded_path
spack.config.set("repos", repos_specs, scope=env.scope_name)
env.repos_specs = None
# If we changed the environment's spack.yaml scope, that will not be reflected
# in the manifest that we read
env._re_read()
def environment_dir_from_name(name: str, exists_ok: bool = True) -> str:
"""Returns the directory associated with a named environment.
@@ -573,6 +546,13 @@ def _write_yaml(data, str_or_file):
syaml.dump_config(data, str_or_file, default_flow_style=False)
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.spec.get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
return eval(string, valid_variables)
def _is_dev_spec_and_has_changed(spec):
"""Check if the passed spec is a dev build and whether it has changed since the
last installation"""
@@ -1005,7 +985,7 @@ def _process_definition(self, entry):
"""Process a single spec definition item."""
when_string = entry.get("when")
if when_string is not None:
when = spack.spec.eval_conditional(when_string)
when = _eval_conditional(when_string)
assert len([x for x in entry if x != "when"]) == 1
else:
when = True
@@ -1128,6 +1108,11 @@ def user_specs(self):
@property
def dev_specs(self):
if not self._dev_specs:
self._dev_specs = self._read_dev_specs()
return self._dev_specs
def _read_dev_specs(self):
dev_specs = {}
dev_config = spack.config.get("develop", {})
for name, entry in dev_config.items():
@@ -1545,6 +1530,9 @@ def _get_specs_to_concretize(
return new_user_specs, kept_user_specs, specs_to_concretize
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
# Avoid cyclic dependency
import spack.solver.asp
# Exit early if the set of concretized specs is the set of user specs
new_user_specs, _, specs_to_concretize = self._get_specs_to_concretize()
if not new_user_specs:
@@ -2404,8 +2392,6 @@ def invalidate_repository_cache(self):
def __enter__(self):
self._previous_active = _active_environment
if self._previous_active:
deactivate()
activate(self)
return self
@@ -2655,23 +2641,20 @@ def _ensure_env_dir():
# error handling for bad manifests is handled on other code paths
return
# TODO: make this recursive
includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes:
included_path = spack.config.included_path(include)
path = included_path.path
if os.path.isabs(path):
if os.path.isabs(include):
continue
abspath = pathlib.Path(os.path.normpath(environment_dir / path))
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir:
tty.debug(f"Will not copy relative include file from outside environment: {path}")
tty.debug(f"Will not copy relative include from outside environment: {include}")
continue
orig_abspath = os.path.normpath(envfile.parent / path)
orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{path}'")
tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue
fs.touchp(abspath)
@@ -2894,7 +2877,7 @@ def extract_name(_item):
continue
condition_str = item.get("when", "True")
if not spack.spec.eval_conditional(condition_str):
if not _eval_conditional(condition_str):
continue
yield idx, item
@@ -2955,20 +2938,127 @@ def __iter__(self):
def __str__(self):
return str(self.manifest_file)
@property
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
"""List of included configuration scopes from the manifest.
Scopes are listed in the YAML file in order from highest to
lowest precedence, so configuration from earlier scope will take
precedence over later ones.
This routine returns them in the order they should be pushed onto
the internal scope stack (so, in reverse, from lowest to highest).
Returns: Configuration scopes associated with the environment manifest
Raises:
SpackEnvironmentError: if the manifest includes a remote file but
no configuration stage directory has been identified
"""
scopes: List[spack.config.ConfigScope] = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self[TOP_LEVEL_KEY].get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path)
# If scheme is not valid, config_path is not a url
# of a type Spack is generally aware
if spack.util.url.validate_scheme(include_url.scheme):
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.manifest_dir, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
elif os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = f"env:{self.name}:{config_path}"
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
scopes.append(
spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema
)
)
else:
missing.append(config_path)
continue
if missing:
msg = "Detected {0} missing include path(s):".format(len(missing))
msg += "\n {0}".format("\n ".join(missing))
raise spack.config.ConfigFileError(msg)
return scopes
@property
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""A list of all configuration scopes for the environment manifest. On the first call this
instantiates all the scopes, on subsequent calls it returns the cached list."""
if self._config_scopes is not None:
return self._config_scopes
scopes: List[spack.config.ConfigScope] = [
*self.included_config_scopes,
spack.config.SingleFileScope(
self.scope_name,
str(self.manifest_file),
spack.schema.env.schema,
yaml_path=[TOP_LEVEL_KEY],
)
),
]
ensure_no_disallowed_env_config_mods(scopes)
self._config_scopes = scopes
@@ -2977,12 +3067,14 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.push_scope(scope, priority=ConfigScopePriority.ENVIRONMENT)
spack.config.CONFIG.push_scope(scope)
spack.config.CONFIG.ensure_scope_ordering()
def deactivate_config_scope(self) -> None:
"""Remove any of the manifest's scopes from the global config path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name)
spack.config.CONFIG.ensure_scope_ordering()
@contextlib.contextmanager
def use_config(self):

View File

@@ -8,7 +8,6 @@
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
import spack.config
import spack.environment as ev
import spack.repo
import spack.schema.environment
@@ -159,8 +158,7 @@ def activate(
# become PATH variables.
#
with env.manifest.use_config():
env_vars_yaml = spack.config.get("env_vars", None)
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
@@ -197,8 +195,7 @@ def deactivate() -> EnvironmentModifications:
if active is None:
return env_mods
with active.manifest.use_config():
env_vars_yaml = spack.config.get("env_vars", None)
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
if env_vars_yaml:
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())

View File

@@ -9,8 +9,7 @@
import shutil
import stat
import sys
import tempfile
from typing import Callable, Dict, List, Optional
from typing import Callable, Dict, Optional
from typing_extensions import Literal
@@ -78,7 +77,7 @@ def view_copy(
# Order of this dict is somewhat irrelevant
prefix_to_projection = {
str(s.prefix): view.get_projection_for_spec(s)
s.prefix: view.get_projection_for_spec(s)
for s in spec.traverse(root=True, order="breadth")
if not s.external
}
@@ -185,7 +184,7 @@ def __init__(
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
self._link(src, dst, self, spec)
def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
def add_specs(self, *specs, **kwargs):
"""
Add given specs to view.
@@ -200,19 +199,19 @@ def add_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
"""
raise NotImplementedError
def add_standalone(self, spec: spack.spec.Spec) -> bool:
def add_standalone(self, spec):
"""
Add (link) a standalone package into this view.
"""
raise NotImplementedError
def check_added(self, spec: spack.spec.Spec) -> bool:
def check_added(self, spec):
"""
Check if the given concrete spec is active in this view.
"""
raise NotImplementedError
def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
def remove_specs(self, *specs, **kwargs):
"""
Removes given specs from view.
@@ -231,25 +230,25 @@ def remove_specs(self, *specs: spack.spec.Spec, **kwargs) -> None:
"""
raise NotImplementedError
def remove_standalone(self, spec: spack.spec.Spec) -> None:
def remove_standalone(self, spec):
"""
Remove (unlink) a standalone package from this view.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec: spack.spec.Spec) -> str:
def get_projection_for_spec(self, spec):
"""
Get the projection in this view for a spec.
"""
raise NotImplementedError
def get_all_specs(self) -> List[spack.spec.Spec]:
def get_all_specs(self):
"""
Get all specs currently active in this view.
"""
raise NotImplementedError
def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
def get_spec(self, spec):
"""
Return the actual spec linked in this view (i.e. do not look it up
in the database by name).
@@ -263,7 +262,7 @@ def get_spec(self, spec: spack.spec.Spec) -> Optional[spack.spec.Spec]:
"""
raise NotImplementedError
def print_status(self, *specs: spack.spec.Spec, **kwargs) -> None:
def print_status(self, *specs, **kwargs):
"""
Print a short summary about the given specs, detailing whether..
* ..they are active in the view.
@@ -643,7 +642,7 @@ def print_status(self, *specs, **kwargs):
specs.sort()
abbreviated = [
s.cformat("{name}{@version}{compiler_flags}{variants}{%compiler}")
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
for s in specs
]
@@ -694,7 +693,7 @@ def _sanity_check_view_projection(self, specs):
raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec
def add_specs(self, *specs, **kwargs) -> None:
def add_specs(self, *specs: spack.spec.Spec) -> None:
"""Link a root-to-leaf topologically ordered list of specs into the view."""
assert all((s.concrete for s in specs))
if len(specs) == 0:
@@ -709,10 +708,7 @@ def add_specs(self, *specs, **kwargs) -> None:
def skip_list(file):
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
# Determine if the root is on a case-insensitive filesystem
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
visitor = SourceMergeVisitor(ignore=skip_list)
# Gather all the directories to be made and files to be linked
for spec in specs:
@@ -831,7 +827,7 @@ def get_projection_for_spec(self, spec):
#####################
# utility functions #
#####################
def get_spec_from_file(filename) -> Optional[spack.spec.Spec]:
def get_spec_from_file(filename):
try:
with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f)
@@ -888,8 +884,3 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually."""
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))

View File

@@ -482,7 +482,7 @@ class SimpleDAG(DotGraphBuilder):
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
def node_entry(self, node):
format_option = "{name}{@version}{/hash:7}{%compiler}"
format_option = "{name}{@version}{%compiler}{/hash:7}"
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
def edge_entry(self, edge):
@@ -515,7 +515,7 @@ def visit(self, edge):
super().visit(edge)
def node_entry(self, node):
node_str = node.format("{name}{@version}{/hash:7}{%compiler}")
node_str = node.format("{name}{@version}{%compiler}{/hash:7}")
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
if node.dag_hash() in self.main_unified_space:
options = f'[label="{node_str}", group="main_psid"]'

View File

@@ -6,7 +6,7 @@
import spack.deptypes as dt
import spack.repo
HASHES = []
hashes = []
class SpecHashDescriptor:
@@ -23,7 +23,7 @@ def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
self.depflag = depflag
self.package_hash = package_hash
self.name = name
HASHES.append(self)
hashes.append(self)
# Allow spec hashes to have an alternate computation method
self.override = override
@@ -43,9 +43,13 @@ def __repr__(self):
)
#: The DAG hash includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="hash"
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
#: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor(
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
)

View File

@@ -2,14 +2,198 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import fnmatch
import io
import os
import re
from typing import Dict, List, Union
import llnl.util.tty as tty
from llnl.util.filesystem import visit_directory_tree
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
from llnl.util.lang import stable_partition
import spack.config
import spack.error
import spack.verify_libraries
import spack.util.elf as elf
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
#: default search paths of the dynamic linker.
ALLOW_UNRESOLVED = [
# kernel
"linux-vdso.so.*",
"libselinux.so.*",
# musl libc
"ld-musl-*.so.*",
# glibc
"ld-linux*.so.*",
"ld64.so.*",
"libanl.so.*",
"libc.so.*",
"libdl.so.*",
"libm.so.*",
"libmemusage.so.*",
"libmvec.so.*",
"libnsl.so.*",
"libnss_compat.so.*",
"libnss_db.so.*",
"libnss_dns.so.*",
"libnss_files.so.*",
"libnss_hesiod.so.*",
"libpcprofile.so.*",
"libpthread.so.*",
"libresolv.so.*",
"librt.so.*",
"libSegFault.so.*",
"libthread_db.so.*",
"libutil.so.*",
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
"libasan.so.*",
"libatomic.so.*",
"libcc1.so.*",
"libgcc_s.so.*",
"libgfortran.so.*",
"libgomp.so.*",
"libitm.so.*",
"liblsan.so.*",
"libquadmath.so.*",
"libssp.so.*",
"libstdc++.so.*",
"libtsan.so.*",
"libubsan.so.*",
# systemd
"libudev.so.*",
# cuda driver
"libcuda.so.*",
]
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
return (
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
and parent.is_little_endian == child.is_little_endian
and parent.is_64_bit == child.is_64_bit
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
)
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
try:
with open(candidate_path, "rb") as g:
return is_compatible(current_elf, elf.parse_elf(g))
except (OSError, elf.ElfParsingError):
return False
class Problem:
def __init__(
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
) -> None:
self.resolved = resolved
self.unresolved = unresolved
self.relative_rpaths = relative_rpaths
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
self.problems: Dict[str, Problem] = {}
self._allow_unresolved_regex = re.compile(
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
)
def allow_unresolved(self, needed: bytes) -> bool:
try:
name = needed.decode("utf-8")
except UnicodeDecodeError:
return False
return bool(self._allow_unresolved_regex.match(name))
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# We work with byte strings for paths.
path = os.path.join(root, rel_path).encode("utf-8")
# For $ORIGIN interpolation: should not have trailing dir seperator.
origin = os.path.dirname(path)
# Retrieve the needed libs + rpaths.
try:
with open(path, "rb") as f:
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
except (OSError, elf.ElfParsingError):
# Not dealing with an invalid ELF file.
return
# If there's no needed libs all is good
if not parsed_elf.has_needed:
return
# Get the needed libs and rpaths (notice: byte strings)
# Don't force an encoding cause paths are just a bag of bytes.
needed_libs = parsed_elf.dt_needed_strs
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
# supported in general. Also remove empty paths.
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
# Do not allow relative rpaths (they are relative to the current working directory)
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
# If there's a / in the needed lib, it's opened directly, otherwise it needs
# a search.
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
# Do not allow relative paths in direct libs (they are relative to the current working
# directory)
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
resolved: Dict[bytes, bytes] = {}
for lib in search_libs:
if self.allow_unresolved(lib):
continue
for rpath in rpaths:
candidate = os.path.join(rpath, lib)
if candidate_matches(parsed_elf, candidate):
resolved[lib] = candidate
break
else:
unresolved.append(lib)
# Check if directly opened libs are compatible
for lib in direct_libs:
if candidate_matches(parsed_elf, lib):
resolved[lib] = lib
else:
unresolved.append(lib)
if unresolved or relative_rpaths:
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# There can be binaries in .spack/test which shouldn't be checked.
if rel_path == ".spack":
return False
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
return False
class CannotLocateSharedLibraries(spack.error.SpackError):
pass
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
try:
return byte_str.decode("utf-8")
except UnicodeDecodeError:
return byte_str
def post_install(spec, explicit):
@@ -20,23 +204,36 @@ def post_install(spec, explicit):
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
return
visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor(
[*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
visitor = ResolveSharedElfLibDepsVisitor(
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
)
visit_directory_tree(spec.prefix, visitor)
# All good?
if not visitor.problems:
return
output = io.StringIO("not all executables and libraries can resolve their dependencies:\n")
visitor.write(output)
# For now just list the issues (print it in ldd style, except we don't recurse)
output = io.StringIO()
output.write("not all executables and libraries can resolve their dependencies:\n")
for path, problem in visitor.problems.items():
output.write(path)
output.write("\n")
for needed, full_path in problem.resolved.items():
output.write(" ")
if needed == full_path:
output.write(maybe_decode(needed))
else:
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
output.write("\n")
for not_found in problem.unresolved:
output.write(f" {maybe_decode(not_found)} => not found\n")
for relative_rpath in problem.relative_rpaths:
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
message = output.getvalue().strip()
if policy == "error":
raise CannotLocateSharedLibraries(message)
tty.warn(message)
class CannotLocateSharedLibraries(spack.error.SpackError):
pass

View File

@@ -21,6 +21,7 @@
from llnl.util.lang import nullcontext
from llnl.util.tty.color import colorize
import spack.build_environment
import spack.config
import spack.error
import spack.package_base
@@ -397,7 +398,7 @@ def stand_alone_tests(self, kwargs):
Args:
kwargs (dict): arguments to be used by the test process
"""
import spack.build_environment # avoid circular dependency
import spack.build_environment
spack.build_environment.start_build_process(self.pkg, test_process, kwargs)
@@ -462,8 +463,6 @@ def write_tested_status(self):
@contextlib.contextmanager
def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbose: bool = False):
import spack.build_environment # avoid circular dependency
wdir = "." if work_dir is None else work_dir
tester = pkg.tester
assert test_name and test_name.startswith(

View File

@@ -47,8 +47,6 @@
import spack.util.environment
import spack.util.lock
from .enums import ConfigScopePriority
#: names of profile statistics
stat_names = pstats.Stats.sort_arg_dict_default
@@ -874,19 +872,14 @@ def add_command_line_scopes(
scopes = ev.environment_path_scopes(name, path)
if scopes is None:
if os.path.isdir(path): # directory with config files
cfg.push_scope(
spack.config.DirectoryConfigScope(name, path, writable=False),
priority=ConfigScopePriority.CUSTOM,
)
spack.config._add_platform_scope(
cfg, name, path, priority=ConfigScopePriority.CUSTOM, writable=False
)
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
spack.config._add_platform_scope(cfg, name, path, writable=False)
continue
else:
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
for scope in scopes:
cfg.push_scope(scope, priority=ConfigScopePriority.CUSTOM)
cfg.push_scope(scope)
def _main(argv=None):
@@ -959,9 +952,7 @@ def _main(argv=None):
# Push scopes from the command line last
if args.config_scopes:
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
spack.config.CONFIG.push_scope(
spack.config.InternalConfigScope("command_line"), priority=ConfigScopePriority.COMMAND_LINE
)
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
setup_main_options(args)
# ------------------------------------------------------------------------
@@ -1007,7 +998,6 @@ def finish_parse_and_run(parser, cmd_name, main_args, env_format_error):
args, unknown = parser.parse_known_args(main_args.command)
# we need to inherit verbose since the install command checks for it
args.verbose = main_args.verbose
args.lines = main_args.lines
# Now that we know what command this is and what its args are, determine
# whether we can continue with a bad environment and raise if not.

View File

@@ -330,17 +330,18 @@ class BaseConfiguration:
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
# Module where type(self) is defined
m = inspect.getmodule(self)
assert m is not None # make mypy happy
self.module = m
# Spec for which we want to generate a module file
self.spec = spec
self.name = module_set_name
self.explicit = explicit
# Dictionary of configuration options that should be applied to the spec
# Dictionary of configuration options that should be applied
# to the spec
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
@property
def module(self):
return inspect.getmodule(self)
@property
def projections(self):
"""Projection from specs to module names"""
@@ -564,12 +565,6 @@ def __init__(self, configuration):
def spec(self):
return self.conf.spec
@tengine.context_property
def tags(self):
if not hasattr(self.spec.package, "tags"):
return []
return self.spec.package.tags
@tengine.context_property
def timestamp(self):
return datetime.datetime.now()
@@ -780,6 +775,10 @@ def __init__(
) -> None:
self.spec = spec
# This class is meant to be derived. Get the module of the
# actual writer.
self.module = inspect.getmodule(self)
assert self.module is not None # make mypy happy
m = self.module
# Create the triplet of configuration/layout/context
@@ -817,10 +816,6 @@ def __init__(
name = type(self).__name__
raise ModulercHeaderNotDefined(msg.format(name))
@property
def module(self):
return inspect.getmodule(self)
def _get_template(self):
"""Gets the template that will be rendered for this spec."""
# Get templates and put them in the order of importance:

View File

@@ -48,7 +48,6 @@
import spack.store
import spack.url
import spack.util.environment
import spack.util.executable
import spack.util.path
import spack.util.web
import spack.variant
@@ -126,10 +125,9 @@ def windows_establish_runtime_linkage(self):
# Spack should in general not modify things it has not installed
# we can reasonably expect externals to have their link interface properly established
if sys.platform == "win32" and not self.spec.external:
win_rpath = fsys.WindowsSimulatedRPath(self)
win_rpath.add_library_dependent(*self.win_add_library_dependent())
win_rpath.add_rpath(*self.win_add_rpath())
win_rpath.establish_link()
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
self.win_rpath.add_rpath(*self.win_add_rpath())
self.win_rpath.establish_link()
#: Registers which are the detectable packages, by repo and package name
@@ -744,6 +742,7 @@ def __init__(self, spec):
# Set up timing variables
self._fetch_time = 0.0
self.win_rpath = fsys.WindowsSimulatedRPath(self)
super().__init__()
def __getitem__(self, key: str) -> "PackageBase":
@@ -1288,13 +1287,12 @@ def extendee_spec(self):
if not self.extendees:
return None
deps = []
# If the extendee is in the spec's deps already, return that.
deps = [
dep
for dep in self.spec.dependencies(deptype=("link", "run"))
for d, when in self.extendees.values()
if dep.satisfies(d) and self.spec.satisfies(when)
]
for dep in self.spec.traverse(deptype=("link", "run")):
if dep.name in self.extendees:
deps.append(dep)
if deps:
assert len(deps) == 1
@@ -1371,14 +1369,6 @@ def prefix(self):
def home(self):
return self.prefix
@property
def command(self) -> spack.util.executable.Executable:
"""Returns the main executable for this package."""
path = os.path.join(self.home.bin, self.spec.name)
if fsys.is_exe(path):
return spack.util.executable.Executable(path)
raise RuntimeError(f"Unable to locate {self.spec.name} command in {self.home.bin}")
@property # type: ignore[misc]
@memoized
def compiler(self):

View File

@@ -83,7 +83,6 @@ def __init__(
level: int,
working_dir: str,
reverse: bool = False,
ordering_key: Optional[Tuple[str, int]] = None,
) -> None:
"""Initialize a new Patch instance.
@@ -93,7 +92,6 @@ def __init__(
level: patch level
working_dir: relative path *within* the stage to change to
reverse: reverse the patch
ordering_key: key used to ensure patches are applied in a consistent order
"""
# validate level (must be an integer >= 0)
if not isinstance(level, int) or not level >= 0:
@@ -107,13 +105,6 @@ def __init__(
self.working_dir = working_dir
self.reverse = reverse
# The ordering key is passed when executing package.py directives, and is only relevant
# after a solve to build concrete specs with consistently ordered patches. For concrete
# specs read from a file, we add patches in the order of its patches variants and the
# ordering_key is irrelevant. In that case, use a default value so we don't need to branch
# on whether ordering_key is None where it's used, just to make static analysis happy.
self.ordering_key: Tuple[str, int] = ordering_key or ("", 0)
def apply(self, stage: "spack.stage.Stage") -> None:
"""Apply a patch to source in a stage.
@@ -211,8 +202,9 @@ def __init__(
msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
raise ValueError(msg)
super().__init__(pkg, abs_path, level, working_dir, reverse, ordering_key)
super().__init__(pkg, abs_path, level, working_dir, reverse)
self.path = abs_path
self.ordering_key = ordering_key
@property
def sha256(self) -> str:
@@ -274,11 +266,13 @@ def __init__(
archive_sha256: sha256 sum of the *archive*, if the patch is compressed
(only required for compressed URL patches)
"""
super().__init__(pkg, url, level, working_dir, reverse, ordering_key)
super().__init__(pkg, url, level, working_dir, reverse)
self.url = url
self._stage: Optional["spack.stage.Stage"] = None
self.ordering_key = ordering_key
if allowed_archive(self.url) and not archive_sha256:
raise spack.error.PatchDirectiveError(
"Compressed patches require 'archive_sha256' "

View File

@@ -108,8 +108,6 @@ def _get_user_cache_path():
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
default_misc_cache_path = os.path.join(user_cache_path, "cache")
#: concretization cache for Spack concretizations
default_conc_cache_path = os.path.join(default_misc_cache_path, "concretization")
# Below paths pull configuration from the host environment.
#

View File

@@ -283,21 +283,21 @@ def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix)
def is_macho_magic(magic: bytes) -> bool:
return (
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
magic.startswith(b"\xcf\xfa\xed\xfe")
or magic.startswith(b"\xfe\xed\xfa\xcf")
or magic.startswith(b"\xce\xfa\xed\xfe")
or magic.startswith(b"\xfe\xed\xfa\xce")
magic.startswith(b"\xCF\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCF")
or magic.startswith(b"\xCE\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCE")
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
# the number of binaries; in JVM class files it's the java version number. We assume there
# are less than 10 binaries in a universal binary.
or (magic.startswith(b"\xca\xfe\xba\xbe") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xbe\xba\xfe\xca") and int.from_bytes(magic[4:8], "little") < 10)
or (magic.startswith(b"\xCA\xFE\xBA\xBE") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xBE\xBA\xFE\xCA") and int.from_bytes(magic[4:8], "little") < 10)
)
def is_elf_magic(magic: bytes) -> bool:
return magic.startswith(b"\x7fELF")
return magic.startswith(b"\x7FELF")
def is_binary(filename: str) -> bool:

View File

@@ -32,7 +32,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.caches
import spack.config
import spack.error
@@ -50,8 +49,6 @@
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
ROOT_PYTHON_NAMESPACE = "spack.pkg"
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
def python_package_for_repo(namespace):
"""Returns the full namespace of a repository, given its relative one
@@ -912,52 +909,19 @@ def __reduce__(self):
return RepoPath.unmarshal, self.marshal()
def _parse_package_api_version(
config: Dict[str, Any],
min_api: Tuple[int, int] = spack.min_package_api_version,
max_api: Tuple[int, int] = spack.package_api_version,
) -> Tuple[int, int]:
api = config.get("api")
if api is None:
package_api = (1, 0)
else:
if not isinstance(api, str):
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
api_match = _API_REGEX.match(api)
if api_match is None:
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
package_api = (int(api_match.group(1)), int(api_match.group(2)))
if min_api <= package_api <= max_api:
return package_api
min_str = ".".join(str(i) for i in min_api)
max_str = ".".join(str(i) for i in max_api)
curr_str = ".".join(str(i) for i in package_api)
raise BadRepoError(
f"Package API v{curr_str} is not supported by this version of Spack ("
f"must be between v{min_str} and v{max_str})"
)
class Repo:
"""Class representing a package repository in the filesystem.
Each package repository must have a top-level configuration file called `repo.yaml`.
Each package repository must have a top-level configuration file
called `repo.yaml`.
It contains the following keys:
Currently, `repo.yaml` must define:
`namespace`:
A Python namespace where the repository's packages should live.
`subdirectory`:
An optional subdirectory name where packages are placed
`api`:
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
For the repo to be compatible with the current version of Spack, the version must be
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
:py:data:`spack.package_api_version`.
"""
def __init__(
@@ -994,7 +958,7 @@ def check(condition, msg):
f"{os.path.join(root, repo_config_name)} must define a namespace.",
)
self.namespace: str = config["namespace"]
self.namespace = config["namespace"]
check(
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
@@ -1007,14 +971,12 @@ def check(condition, msg):
# Keep name components around for checking prefixes.
self._names = self.full_namespace.split(".")
packages_dir: str = config.get("subdirectory", packages_dir_name)
packages_dir = config.get("subdirectory", packages_dir_name)
self.packages_path = os.path.join(self.root, packages_dir)
check(
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
)
self.package_api = _parse_package_api_version(config)
# Class attribute overrides by package name
self.overrides = overrides or {}
@@ -1064,7 +1026,7 @@ def is_prefix(self, fullname: str) -> bool:
parts = fullname.split(".")
return self._names[: len(parts)] == parts
def _read_config(self) -> Dict[str, Any]:
def _read_config(self) -> Dict[str, str]:
"""Check for a YAML config file in this db's root directory."""
try:
with open(self.config_file, encoding="utf-8") as reponame_file:
@@ -1406,8 +1368,6 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
config.write(f" namespace: '{namespace}'\n")
if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n")
x, y = spack.package_api_version
config.write(f" api: v{x}.{y}\n")
except OSError as e:
# try to clean up.

View File

@@ -177,7 +177,7 @@ def build_report_for_package(self, report_dir, package, duration):
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
# "update" was encounterd.
# dump the report in the configure line so teams can see what the issue is
if len(phases_encountered) == 1 and package.get("exception"):
if len(phases_encountered) == 1 and package["exception"]:
# TODO this mapping is not ideal since these are pre-configure errors
# we need to determine if a more appropriate cdash phase can be utilized
# for now we will add a message to the log explaining this

View File

@@ -7,7 +7,8 @@
import warnings
import jsonschema
import jsonschema.validators
import llnl.util.lang
from spack.error import SpecSyntaxError
@@ -17,59 +18,59 @@ class DeprecationMessage(typing.NamedTuple):
error: bool
def _validate_spec(validator, is_spec, instance, schema):
"""Check if all additional keys are valid specs."""
import spack.spec_parser
# jsonschema is imported lazily as it is heavy to import
# and increases the start-up time
def _make_validator():
def _validate_spec(validator, is_spec, instance, schema):
"""Check if the attributes on instance are valid specs."""
import spack.spec_parser
if not validator.is_type(instance, "object"):
return
if not validator.is_type(instance, "object"):
return
properties = schema.get("properties") or {}
for spec_str in instance:
try:
spack.spec_parser.parse(spec_str)
except SpecSyntaxError:
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
for spec_str in instance:
if spec_str in properties:
continue
try:
spack.spec_parser.parse(spec_str)
except SpecSyntaxError:
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
def _deprecated_properties(validator, deprecated, instance, schema):
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
return
if not deprecated:
return
deprecations = {
name: DeprecationMessage(message=x["message"], error=x["error"])
for x in deprecated
for name in x["names"]
}
# Get a list of the deprecated properties, return if there is none
issues = [entry for entry in instance if entry in deprecations]
if not issues:
return
# Process issues
errors = []
for name in issues:
msg = deprecations[name].message.format(name=name)
if deprecations[name].error:
errors.append(msg)
else:
warnings.warn(msg)
if errors:
yield jsonschema.ValidationError("\n".join(errors))
return jsonschema.validators.extend(
jsonschema.Draft7Validator,
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
)
def _deprecated_properties(validator, deprecated, instance, schema):
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
return
if not deprecated:
return
deprecations = {
name: DeprecationMessage(message=x["message"], error=x["error"])
for x in deprecated
for name in x["names"]
}
# Get a list of the deprecated properties, return if there is none
issues = [entry for entry in instance if entry in deprecations]
if not issues:
return
# Process issues
errors = []
for name in issues:
msg = deprecations[name].message.format(name=name)
if deprecations[name].error:
errors.append(msg)
else:
warnings.warn(msg)
if errors:
yield jsonschema.ValidationError("\n".join(errors))
Validator = jsonschema.validators.extend(
jsonschema.Draft7Validator,
{"additionalKeysAreSpecs": _validate_spec, "deprecatedProperties": _deprecated_properties},
)
Validator = llnl.util.lang.Singleton(_make_validator)
def _append(string: str) -> bool:

View File

@@ -84,11 +84,7 @@
"duplicates": {
"type": "object",
"properties": {
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]},
"max_dupes": {
"type": "object",
"additional_properties": {"type": "integer", "minimum": 1},
},
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"static_analysis": {"type": "boolean"},

View File

@@ -58,15 +58,6 @@
{"type": "string"}, # deprecated
]
},
"concretization_cache": {
"type": "object",
"properties": {
"enable": {"type": "boolean"},
"url": {"type": "string"},
"entry_limit": {"type": "integer", "minimum": 0},
"size_limit": {"type": "integer", "minimum": 0},
},
},
"install_hash_length": {"type": "integer", "minimum": 1},
"install_path_scheme": {"type": "string"}, # deprecated
"build_stage": {

View File

@@ -3,12 +3,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for Cray descriptive manifest: this describes a set of
installed packages on the system and also specifies dependency
relationships between them (so this provides more information than
external entries in packages configuration).
installed packages on the system and also specifies dependency
relationships between them (so this provides more information than
external entries in packages configuration).
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
"""
from typing import Any, Dict

View File

@@ -29,7 +29,11 @@
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{"specs": spec_list_schema, "include_concrete": include_concrete},
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spec_list_schema,
"include_concrete": include_concrete,
},
),
}
}

View File

@@ -1,41 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for include.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/include.py
:lines: 12-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"include": {
"type": "array",
"default": [],
"additionalProperties": False,
"items": {
"anyOf": [
{
"type": "object",
"properties": {
"when": {"type": "string"},
"path": {"type": "string"},
"sha256": {"type": "string"},
"optional": {"type": "boolean"},
},
"required": ["path"],
"additionalProperties": False,
},
{"type": "string"},
]
},
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack include configuration file schema",
"properties": properties,
}

View File

@@ -21,7 +21,6 @@
import spack.schema.definitions
import spack.schema.develop
import spack.schema.env_vars
import spack.schema.include
import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
@@ -41,7 +40,6 @@
spack.schema.definitions.properties,
spack.schema.develop.properties,
spack.schema.env_vars.properties,
spack.schema.include.properties,
spack.schema.mirrors.properties,
spack.schema.modules.properties,
spack.schema.packages.properties,
@@ -50,6 +48,7 @@
spack.schema.view.properties,
)
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",

View File

@@ -39,7 +39,7 @@
"load": array_of_strings,
"suffixes": {
"type": "object",
"additionalKeysAreSpecs": True,
"validate_spec": True,
"additionalProperties": {"type": "string"}, # key
},
"environment": spack.schema.environment.definition,
@@ -48,44 +48,40 @@
projections_scheme = spack.schema.projections.properties["projections"]
common_props = {
"verbose": {"type": "boolean", "default": False},
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
"include": array_of_strings,
"exclude": array_of_strings,
"exclude_implicits": {"type": "boolean", "default": False},
"defaults": array_of_strings,
"hide_implicits": {"type": "boolean", "default": False},
"naming_scheme": {"type": "string"},
"projections": projections_scheme,
"all": module_file_configuration,
}
tcl_configuration = {
module_type_configuration: Dict = {
"type": "object",
"default": {},
"additionalKeysAreSpecs": True,
"properties": {**common_props},
"validate_spec": True,
"properties": {
"verbose": {"type": "boolean", "default": False},
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
"include": array_of_strings,
"exclude": array_of_strings,
"exclude_implicits": {"type": "boolean", "default": False},
"defaults": array_of_strings,
"hide_implicits": {"type": "boolean", "default": False},
"naming_scheme": {"type": "string"},
"projections": projections_scheme,
"all": module_file_configuration,
},
"additionalProperties": module_file_configuration,
}
lmod_configuration = {
"type": "object",
"default": {},
"additionalKeysAreSpecs": True,
"properties": {
**common_props,
tcl_configuration = module_type_configuration.copy()
lmod_configuration = module_type_configuration.copy()
lmod_configuration["properties"].update(
{
"core_compilers": array_of_strings,
"hierarchy": array_of_strings,
"core_specs": array_of_strings,
"filter_hierarchy_specs": {
"type": "object",
"additionalKeysAreSpecs": True,
"validate_spec": True,
"additionalProperties": array_of_strings,
},
},
"additionalProperties": module_file_configuration,
}
}
)
module_config_properties = {
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},

View File

@@ -5,12 +5,9 @@
import collections.abc
import copy
import enum
import errno
import functools
import hashlib
import io
import itertools
import json
import os
import pathlib
import pprint
@@ -20,25 +17,12 @@
import typing
import warnings
from contextlib import contextmanager
from typing import (
IO,
Callable,
Dict,
Iterator,
List,
NamedTuple,
Optional,
Set,
Tuple,
Type,
Union,
)
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
import archspec.cpu
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import current_file_position
from llnl.util.lang import elide_list
import spack
@@ -52,24 +36,19 @@
import spack.error
import spack.package_base
import spack.package_prefs
import spack.patch
import spack.paths
import spack.platforms
import spack.repo
import spack.solver.splicing
import spack.spec
import spack.store
import spack.util.crypto
import spack.util.hash
import spack.util.libc
import spack.util.module_cmd as md
import spack.util.path
import spack.util.timer
import spack.variant as vt
import spack.version as vn
import spack.version.git_ref_lookup
from spack import traverse
from spack.util.file_cache import FileCache
from .core import (
AspFunction,
@@ -557,363 +536,6 @@ def format_unsolved(unsolved_specs):
msg += "\n\t(No candidate specs from solver)"
return msg
def to_dict(self, test: bool = False) -> dict:
"""Produces dict representation of Result object
Does not include anything related to unsatisfiability as we
are only interested in storing satisfiable results
"""
serial_node_arg = (
lambda node_dict: f"""{{"id": "{node_dict.id}", "pkg": "{node_dict.pkg}"}}"""
)
ret = dict()
ret["asp"] = self.asp
ret["criteria"] = self.criteria
ret["optimal"] = self.optimal
ret["warnings"] = self.warnings
ret["nmodels"] = self.nmodels
ret["abstract_specs"] = [str(x) for x in self.abstract_specs]
ret["satisfiable"] = self.satisfiable
serial_answers = []
for answer in self.answers:
serial_answer = answer[:2]
serial_answer_dict = {}
for node, spec in answer[2].items():
serial_answer_dict[serial_node_arg(node)] = spec.to_dict()
serial_answer = serial_answer + (serial_answer_dict,)
serial_answers.append(serial_answer)
ret["answers"] = serial_answers
ret["specs_by_input"] = {}
input_specs = {} if not self.specs_by_input else self.specs_by_input
for input, spec in input_specs.items():
ret["specs_by_input"][str(input)] = spec.to_dict()
return ret
@staticmethod
def from_dict(obj: dict):
"""Returns Result object from compatible dictionary"""
def _dict_to_node_argument(dict):
id = dict["id"]
pkg = dict["pkg"]
return NodeArgument(id=id, pkg=pkg)
def _str_to_spec(spec_str):
return spack.spec.Spec(spec_str)
def _dict_to_spec(spec_dict):
loaded_spec = spack.spec.Spec.from_dict(spec_dict)
_ensure_external_path_if_external(loaded_spec)
spack.spec.Spec.ensure_no_deprecated(loaded_spec)
return loaded_spec
asp = obj.get("asp")
spec_list = obj.get("abstract_specs")
if not spec_list:
raise RuntimeError("Invalid json for concretization Result object")
if spec_list:
spec_list = [_str_to_spec(x) for x in spec_list]
result = Result(spec_list, asp)
result.criteria = obj.get("criteria")
result.optimal = obj.get("optimal")
result.warnings = obj.get("warnings")
result.nmodels = obj.get("nmodels")
result.satisfiable = obj.get("satisfiable")
result._unsolved_specs = []
answers = []
for answer in obj.get("answers", []):
loaded_answer = answer[:2]
answer_node_dict = {}
for node, spec in answer[2].items():
answer_node_dict[_dict_to_node_argument(json.loads(node))] = _dict_to_spec(spec)
loaded_answer.append(answer_node_dict)
answers.append(tuple(loaded_answer))
result.answers = answers
result._concrete_specs_by_input = {}
result._concrete_specs = []
for input, spec in obj.get("specs_by_input", {}).items():
result._concrete_specs_by_input[_str_to_spec(input)] = _dict_to_spec(spec)
result._concrete_specs.append(_dict_to_spec(spec))
return result
class ConcretizationCache:
"""Store for Spack concretization results and statistics
Serializes solver result objects and statistics to json and stores
at a given endpoint in a cache associated by the sha256 of the
asp problem and the involved control files.
"""
def __init__(self, root: Union[str, None] = None):
root = root or spack.config.get(
"config:concretization_cache:url", spack.paths.default_conc_cache_path
)
self.root = pathlib.Path(spack.util.path.canonicalize_path(root))
self._fc = FileCache(self.root)
self._cache_manifest = ".cache_manifest"
self._manifest_queue: List[Tuple[pathlib.Path, int]] = []
def cleanup(self):
"""Prunes the concretization cache according to configured size and entry
count limits. Cleanup is done in FIFO ordering."""
# TODO: determine a better default
entry_limit = spack.config.get("config:concretization_cache:entry_limit", 1000)
bytes_limit = spack.config.get("config:concretization_cache:size_limit", 3e8)
# lock the entire buildcache as we're removing a lot of data from the
# manifest and cache itself
with self._fc.read_transaction(self._cache_manifest) as f:
count, cache_bytes = self._extract_cache_metadata(f)
if not count or not cache_bytes:
return
entry_count = int(count)
manifest_bytes = int(cache_bytes)
# move beyond the metadata entry
f.readline()
if entry_count > entry_limit and entry_limit > 0:
with self._fc.write_transaction(self._cache_manifest) as (old, new):
# prune the oldest 10% or until we have removed 10% of
# total bytes starting from oldest entry
# TODO: make this configurable?
prune_count = entry_limit // 10
lines_to_prune = f.readlines(prune_count)
for i, line in enumerate(lines_to_prune):
sha, cache_entry_bytes = self._parse_manifest_entry(line)
if sha and cache_entry_bytes:
cache_path = self._cache_path_from_hash(sha)
if self._fc.remove(cache_path):
entry_count -= 1
manifest_bytes -= int(cache_entry_bytes)
else:
tty.warn(
f"Invalid concretization cache entry: '{line}' on line: {i+1}"
)
self._write_manifest(f, entry_count, manifest_bytes)
elif manifest_bytes > bytes_limit and bytes_limit > 0:
with self._fc.write_transaction(self._cache_manifest) as (old, new):
# take 10% of current size off
prune_amount = bytes_limit // 10
total_pruned = 0
i = 0
while total_pruned < prune_amount:
sha, manifest_cache_bytes = self._parse_manifest_entry(f.readline())
if sha and manifest_cache_bytes:
entry_bytes = int(manifest_cache_bytes)
cache_path = self.root / sha[:2] / sha
if self._safe_remove(cache_path):
entry_count -= 1
entry_bytes -= entry_bytes
total_pruned += entry_bytes
else:
tty.warn(
"Invalid concretization cache entry "
f"'{sha} {manifest_cache_bytes}' on line: {i}"
)
i += 1
self._write_manifest(f, entry_count, manifest_bytes)
for cache_dir in self.root.iterdir():
if cache_dir.is_dir() and not any(cache_dir.iterdir()):
self._safe_remove(cache_dir)
def cache_entries(self):
"""Generator producing cache entries"""
for cache_dir in self.root.iterdir():
# ensure component is cache entry directory
# not metadata file
if cache_dir.is_dir():
for cache_entry in cache_dir.iterdir():
if not cache_entry.is_dir():
yield cache_entry
else:
raise RuntimeError(
"Improperly formed concretization cache. "
f"Directory {cache_entry.name} is improperly located "
"within the concretization cache."
)
def _parse_manifest_entry(self, line):
"""Returns parsed manifest entry lines
with handling for invalid reads."""
if line:
cache_values = line.strip("\n").split(" ")
if len(cache_values) < 2:
tty.warn(f"Invalid cache entry at {line}")
return None, None
return None, None
def _write_manifest(self, manifest_file, entry_count, entry_bytes):
"""Writes new concretization cache manifest file.
Arguments:
manifest_file: IO stream opened for readin
and writing wrapping the manifest file
with cursor at calltime set to location
where manifest should be truncated
entry_count: new total entry count
entry_bytes: new total entry bytes count
"""
persisted_entries = manifest_file.readlines()
manifest_file.truncate(0)
manifest_file.write(f"{entry_count} {entry_bytes}\n")
manifest_file.writelines(persisted_entries)
def _results_from_cache(self, cache_entry_buffer: IO[str]) -> Union[Result, None]:
"""Returns a Results object from the concretizer cache
Reads the cache hit and uses `Result`'s own deserializer
to produce a new Result object
"""
with current_file_position(cache_entry_buffer, 0):
cache_str = cache_entry_buffer.read()
# TODO: Should this be an error if None?
# Same for _stats_from_cache
if cache_str:
cache_entry = json.loads(cache_str)
result_json = cache_entry["results"]
return Result.from_dict(result_json)
return None
def _stats_from_cache(self, cache_entry_buffer: IO[str]) -> Union[List, None]:
"""Returns concretization statistic from the
concretization associated with the cache.
Deserialzes the the json representation of the
statistics covering the cached concretization run
and returns the Python data structures
"""
with current_file_position(cache_entry_buffer, 0):
cache_str = cache_entry_buffer.read()
if cache_str:
return json.loads(cache_str)["statistics"]
return None
def _extract_cache_metadata(self, cache_stream: IO[str]):
"""Extracts and returns cache entry count and bytes count from head of manifest
file"""
# make sure we're always reading from the beginning of the stream
# concretization cache manifest data lives at the top of the file
with current_file_position(cache_stream, 0):
return self._parse_manifest_entry(cache_stream.readline())
def _prefix_digest(self, problem: str) -> Tuple[str, str]:
"""Return the first two characters of, and the full, sha256 of the given asp problem"""
prob_digest = hashlib.sha256(problem.encode()).hexdigest()
prefix = prob_digest[:2]
return prefix, prob_digest
def _cache_path_from_problem(self, problem: str) -> pathlib.Path:
"""Returns a Path object representing the path to the cache
entry for the given problem"""
prefix, digest = self._prefix_digest(problem)
return pathlib.Path(prefix) / digest
def _cache_path_from_hash(self, hash: str) -> pathlib.Path:
"""Returns a Path object representing the cache entry
corresponding to the given sha256 hash"""
return pathlib.Path(hash[:2]) / hash
def _lock_prefix_from_cache_path(self, cache_path: str):
"""Returns the bit location corresponding to a given cache entry path
for file locking"""
return spack.util.hash.base32_prefix_bits(
spack.util.hash.b32_hash(cache_path), spack.util.crypto.bit_length(sys.maxsize)
)
def flush_manifest(self):
"""Updates the concretization cache manifest file after a cache write operation
Updates the current byte count and entry counts and writes to the head of the
manifest file"""
manifest_file = self.root / self._cache_manifest
manifest_file.touch(exist_ok=True)
with open(manifest_file, "r+", encoding="utf-8") as f:
# check if manifest is empty
count, cache_bytes = self._extract_cache_metadata(f)
if not count or not cache_bytes:
# cache is unintialized
count = 0
cache_bytes = 0
f.seek(0, io.SEEK_END)
for manifest_update in self._manifest_queue:
entry_path, entry_bytes = manifest_update
count += 1
cache_bytes += entry_bytes
f.write(f"{entry_path.name} {entry_bytes}")
f.seek(0, io.SEEK_SET)
new_stats = f"{int(count)+1} {int(cache_bytes)}\n"
f.write(new_stats)
def _register_cache_update(self, cache_path: pathlib.Path, bytes_written: int):
"""Adds manifest entry to update queue for later updates to the manifest"""
self._manifest_queue.append((cache_path, bytes_written))
def _safe_remove(self, cache_dir: pathlib.Path):
"""Removes cache entries with handling for the case where the entry has been
removed already or there are multiple cache entries in a directory"""
try:
if cache_dir.is_dir():
cache_dir.rmdir()
else:
cache_dir.unlink()
return True
except FileNotFoundError:
# This is acceptable, removal is idempotent
pass
except OSError as e:
if e.errno == errno.ENOTEMPTY:
# there exists another cache entry in this directory, don't clean yet
pass
return False
def store(self, problem: str, result: Result, statistics: List, test: bool = False):
"""Creates entry in concretization cache for problem if none exists,
storing the concretization Result object and statistics in the cache
as serialized json joined as a single file.
Hash membership is computed based on the sha256 of the provided asp
problem.
"""
cache_path = self._cache_path_from_problem(problem)
if self._fc.init_entry(cache_path):
# if an entry for this conc hash exists already, we're don't want
# to overwrite, just exit
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
return
with self._fc.write_transaction(cache_path) as (old, new):
if old:
# Entry for this conc hash exists already, do not overwrite
tty.debug(f"Cache entry {cache_path} exists, will not be overwritten")
return
cache_dict = {"results": result.to_dict(test=test), "statistics": statistics}
bytes_written = new.write(json.dumps(cache_dict))
self._register_cache_update(cache_path, bytes_written)
def fetch(self, problem: str) -> Union[Tuple[Result, List], Tuple[None, None]]:
"""Returns the concretization cache result for a lookup based on the given problem.
Checks the concretization cache for the given problem, and either returns the
Python objects cached on disk representing the concretization results and statistics
or returns none if no cache entry was found.
"""
cache_path = self._cache_path_from_problem(problem)
result, statistics = None, None
with self._fc.read_transaction(cache_path) as f:
if f:
result = self._results_from_cache(f)
statistics = self._stats_from_cache(f)
if result and statistics:
tty.debug(f"Concretization cache hit at {str(cache_path)}")
return result, statistics
tty.debug(f"Concretization cache miss at {str(cache_path)}")
return None, None
CONC_CACHE: ConcretizationCache = llnl.util.lang.Singleton(
lambda: ConcretizationCache()
) # type: ignore
def _normalize_packages_yaml(packages_yaml):
normalized_yaml = copy.copy(packages_yaml)
@@ -1182,15 +804,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
if sys.platform == "win32":
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
spack.bootstrap.core.ensure_winsdk_external_or_raise()
control_files = ["concretize.lp", "heuristic.lp", "display.lp"]
if not setup.concretize_everything:
control_files.append("when_possible.lp")
if using_libc_compatibility():
control_files.append("libc_compatibility.lp")
else:
control_files.append("os_compatibility.lp")
if setup.enable_splicing:
control_files.append("splices.lp")
timer.start("setup")
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
@@ -1200,133 +813,123 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
return Result(specs), None, None
timer.stop("setup")
timer.start("cache-check")
timer.start("ordering")
# ensure deterministic output
problem_repr = "\n".join(sorted(asp_problem.split("\n")))
timer.stop("ordering")
timer.start("load")
# Add the problem instance
self.control.add("base", [], asp_problem)
# Load the file itself
parent_dir = os.path.dirname(__file__)
full_path = lambda x: os.path.join(parent_dir, x)
abs_control_files = [full_path(x) for x in control_files]
for ctrl_file in abs_control_files:
with open(ctrl_file, "r", encoding="utf-8") as f:
problem_repr += "\n" + f.read()
self.control.load(os.path.join(parent_dir, "concretize.lp"))
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
if not setup.concretize_everything:
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
result = None
conc_cache_enabled = spack.config.get("config:concretization_cache:enable", True)
if conc_cache_enabled:
result, concretization_stats = CONC_CACHE.fetch(problem_repr)
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
if using_libc_compatibility():
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
else:
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
if setup.enable_splicing:
self.control.load(os.path.join(parent_dir, "splices.lp"))
timer.stop("cache-check")
if not result:
timer.start("load")
# Add the problem instance
self.control.add("base", [], asp_problem)
# Load the files
[self.control.load(lp) for lp in abs_control_files]
timer.stop("load")
timer.stop("load")
# Grounding is the first step in the solve -- it turns our facts
# and first-order logic rules into propositional logic.
timer.start("ground")
self.control.ground([("base", [])])
timer.stop("ground")
# Grounding is the first step in the solve -- it turns our facts
# and first-order logic rules into propositional logic.
timer.start("ground")
self.control.ground([("base", [])])
timer.stop("ground")
# With a grounded program, we can run the solve.
models = [] # stable models if things go well
cores = [] # unsatisfiable cores if they do not
# With a grounded program, we can run the solve.
models = [] # stable models if things go well
cores = [] # unsatisfiable cores if they do not
def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
solve_kwargs = {
"assumptions": setup.assumptions,
"on_model": on_model,
"on_core": cores.append,
}
solve_kwargs = {
"assumptions": setup.assumptions,
"on_model": on_model,
"on_core": cores.append,
}
if clingo_cffi():
solve_kwargs["on_unsat"] = cores.append
if clingo_cffi():
solve_kwargs["on_unsat"] = cores.append
timer.start("solve")
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
# Spack uses 0 to set no time limit, clingo API uses -1
if time_limit == 0:
time_limit = -1
with self.control.solve(**solve_kwargs, async_=True) as handle:
finished = handle.wait(time_limit)
if not finished:
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
header = (
f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
)
if error_on_timeout:
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
warnings.warn(f"{header}, using the best configuration found so far")
handle.cancel()
timer.start("solve")
time_limit = spack.config.CONFIG.get("concretizer:timeout", -1)
error_on_timeout = spack.config.CONFIG.get("concretizer:error_on_timeout", True)
# Spack uses 0 to set no time limit, clingo API uses -1
if time_limit == 0:
time_limit = -1
with self.control.solve(**solve_kwargs, async_=True) as handle:
finished = handle.wait(time_limit)
if not finished:
specs_str = ", ".join(llnl.util.lang.elide_list([str(s) for s in specs], 4))
header = f"Spack is taking more than {time_limit} seconds to solve for {specs_str}"
if error_on_timeout:
raise UnsatisfiableSpecError(f"{header}, stopping concretization")
warnings.warn(f"{header}, using the best configuration found so far")
handle.cancel()
solve_result = handle.get()
timer.stop("solve")
solve_result = handle.get()
timer.stop("solve")
# once done, construct the solve result
result = Result(specs)
result.satisfiable = solve_result.satisfiable
# once done, construct the solve result
result = Result(specs)
result.satisfiable = solve_result.satisfiable
if result.satisfiable:
timer.start("construct_specs")
# get the best model
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
min_cost, best_model = min(models)
if result.satisfiable:
timer.start("construct_specs")
# get the best model
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
min_cost, best_model = min(models)
# first check for errors
error_handler = ErrorHandler(best_model, specs)
error_handler.raise_if_errors()
# first check for errors
error_handler = ErrorHandler(best_model, specs)
error_handler.raise_if_errors()
# build specs from spec attributes in the model
spec_attrs = [
(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")
]
answers = builder.build_specs(spec_attrs)
# build specs from spec attributes in the model
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
answers = builder.build_specs(spec_attrs)
# add best spec to the results
result.answers.append((list(min_cost), 0, answers))
# add best spec to the results
result.answers.append((list(min_cost), 0, answers))
# get optimization criteria
criteria_args = extract_args(best_model, "opt_criterion")
result.criteria = build_criteria_names(min_cost, criteria_args)
# get optimization criteria
criteria_args = extract_args(best_model, "opt_criterion")
result.criteria = build_criteria_names(min_cost, criteria_args)
# record the number of models the solver considered
result.nmodels = len(models)
# record the number of models the solver considered
result.nmodels = len(models)
# record the possible dependencies in the solve
result.possible_dependencies = setup.pkgs
timer.stop("construct_specs")
timer.stop()
elif cores:
result.control = self.control
result.cores.extend(cores)
# record the possible dependencies in the solve
result.possible_dependencies = setup.pkgs
timer.stop("construct_specs")
timer.stop()
elif cores:
result.control = self.control
result.cores.extend(cores)
result.raise_if_unsat()
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
)
if conc_cache_enabled:
CONC_CACHE.store(problem_repr, result, self.control.statistics, test=setup.tests)
concretization_stats = self.control.statistics
if output.timers:
timer.write_tty()
print()
if output.stats:
print("Statistics:")
pprint.pprint(concretization_stats)
return result, timer, concretization_stats
pprint.pprint(self.control.statistics)
result.raise_if_unsat()
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
unsolved_str = Result.format_unsolved(result.unsolved_specs)
raise InternalConcretizerError(
"Internal Spack error: the solver completed but produced specs"
" that do not satisfy the request. Please report a bug at "
f"https://github.com/spack/spack/issues\n\t{unsolved_str}"
)
return result, timer, self.control.statistics
class ConcreteSpecsByHash(collections.abc.Mapping):
@@ -1768,7 +1371,7 @@ def effect_rules(self):
return
self.gen.h2("Imposed requirements")
for name in sorted(self._effect_cache):
for name in self._effect_cache:
cache = self._effect_cache[name]
for (spec_str, _), (effect_id, requirements) in cache.items():
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
@@ -1821,8 +1424,8 @@ def define_variant(
elif isinstance(values, vt.DisjointSetsOfValues):
union = set()
for sid, s in enumerate(sorted(values.sets)):
for value in sorted(s):
for sid, s in enumerate(values.sets):
for value in s:
pkg_fact(fn.variant_value_from_disjoint_sets(vid, value, sid))
union.update(s)
values = union
@@ -2003,7 +1606,7 @@ def package_provider_rules(self, pkg):
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(vpkg_name)))
for when, provided in pkg.provided.items():
for vpkg in sorted(provided):
for vpkg in provided:
if vpkg.name not in self.possible_virtuals:
continue
@@ -2018,8 +1621,8 @@ def package_provider_rules(self, pkg):
condition_id = self.condition(
when, required_name=pkg.name, msg="Virtuals are provided together"
)
for set_id, virtuals_together in enumerate(sorted(sets_of_virtuals)):
for name in sorted(virtuals_together):
for set_id, virtuals_together in enumerate(sets_of_virtuals):
for name in virtuals_together:
self.gen.fact(
fn.pkg_fact(pkg.name, fn.provided_together(condition_id, set_id, name))
)
@@ -2053,16 +1656,13 @@ def track_dependencies(input_spec, requirements):
return requirements + [fn.attr("track_dependencies", input_spec.name)]
def dependency_holds(input_spec, requirements):
result = remove_node(input_spec, requirements) + [
return remove_node(input_spec, requirements) + [
fn.attr(
"dependency_holds", pkg.name, input_spec.name, dt.flag_to_string(t)
)
for t in dt.ALL_FLAGS
if t & depflag
]
if input_spec.name not in pkg.extendees:
return result
return result + [fn.attr("extends", pkg.name, input_spec.name)]
context = ConditionContext()
context.source = ConstraintOrigin.append_type_suffix(
@@ -2129,7 +1729,7 @@ def package_splice_rules(self, pkg):
for map in pkg.variants.values():
for k in map:
filt_match_variants.add(k)
filt_match_variants = sorted(filt_match_variants)
filt_match_variants = list(filt_match_variants)
variant_constraints = self._gen_match_variant_splice_constraints(
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
)
@@ -2234,8 +1834,8 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
spec.attach_git_version_lookup()
when_spec = spec
if virtual and spec.name != pkg_name:
when_spec = spack.spec.Spec(f"^[virtuals={pkg_name}] {spec.name}")
if virtual:
when_spec = spack.spec.Spec(pkg_name)
try:
context = ConditionContext()
@@ -2659,7 +2259,7 @@ def define_package_versions_and_validate_preferences(
):
"""Declare any versions in specs not declared in packages."""
packages_yaml = spack.config.get("packages")
for pkg_name in sorted(possible_pkgs):
for pkg_name in possible_pkgs:
pkg_cls = self.pkg_class(pkg_name)
# All the versions from the corresponding package.py file. Since concepts
@@ -2987,7 +2587,7 @@ def define_variant_values(self):
"""
# Tell the concretizer about possible values from specs seen in spec_clauses().
# We might want to order these facts by pkg and name if we are debugging.
for pkg_name, variant_def_id, value in sorted(self.variant_values_from_specs):
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
try:
vid = self.variant_ids_by_def_id[variant_def_id]
except KeyError:
@@ -3025,8 +2625,6 @@ def concrete_specs(self):
# Declare as possible parts of specs that are not in package.py
# - Add versions to possible versions
# - Add OS to possible OS's
# is traverse deterministic?
for dep in spec.traverse():
self.possible_versions[dep.name].add(dep.version)
if isinstance(dep.version, vn.GitVersion):
@@ -3264,7 +2862,7 @@ def define_runtime_constraints(self):
recorder.consume_facts()
def literal_specs(self, specs):
for spec in sorted(specs):
for spec in specs:
self.gen.h2("Spec: %s" % str(spec))
condition_id = next(self._id_counter)
trigger_id = next(self._id_counter)
@@ -3432,7 +3030,8 @@ def __init__(self):
self.asp_problem = []
def fact(self, atom: AspFunction) -> None:
self.asp_problem.append(f"{atom}.\n")
symbol = atom.symbol() if hasattr(atom, "symbol") else atom
self.asp_problem.append(f"{str(symbol)}.\n")
def append(self, rule: str) -> None:
self.asp_problem.append(rule)
@@ -3764,7 +3363,7 @@ def consume_facts(self):
# on the available compilers)
self._setup.pkg_version_rules(runtime_pkg)
for imposed_spec, when_spec in sorted(self.runtime_conditions):
for imposed_spec, when_spec in self.runtime_conditions:
msg = f"{when_spec} requires {imposed_spec} at runtime"
_ = self._setup.condition(when_spec, imposed_spec=imposed_spec, msg=msg)
@@ -4103,11 +3702,11 @@ def build_specs(self, function_tuples):
roots = [spec.root for spec in self._specs.values()]
roots = dict((id(r), r) for r in roots)
for root in roots.values():
_inject_patches_variant(root)
spack.spec.Spec.inject_patches_variant(root)
# Add external paths to specs with just external modules
for s in self._specs.values():
_ensure_external_path_if_external(s)
spack.spec.Spec.ensure_external_path_if_external(s)
for s in self._specs.values():
_develop_specs_from_env(s, ev.active_environment())
@@ -4179,92 +3778,6 @@ def execute_explicit_splices(self):
return specs
def _inject_patches_variant(root: spack.spec.Spec) -> None:
# This dictionary will store object IDs rather than Specs as keys
# since the Spec __hash__ will change as patches are added to them
spec_to_patches: Dict[int, Set[spack.patch.Patch]] = {}
for s in root.traverse():
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
# normalize and concretize evaluate Packages using Repo.get(),
# which respects precedence. So, a namespace assignment isn't
# changing how a package name would have been interpreted and
# we can do it as late as possible to allow as much
# compatibility across repositories as possible.
if s.namespace is None:
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
if s.concrete:
continue
# Add any patches from the package to the spec.
node_patches = {
patch
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items()
if s.satisfies(cond)
for patch in patch_list
}
if node_patches:
spec_to_patches[id(s)] = node_patches
# Also record all patches required on dependencies by depends_on(..., patch=...)
for dspec in root.traverse_edges(deptype=dt.ALL, cover="edges", root=False):
if dspec.spec.concrete:
continue
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
edge_patches: List[spack.patch.Patch] = []
for cond, deps_by_name in pkg_deps.items():
if not dspec.parent.satisfies(cond):
continue
dependency = deps_by_name.get(dspec.spec.name)
if not dependency:
continue
for pcond, patch_list in dependency.patches.items():
if dspec.spec.satisfies(pcond):
edge_patches.extend(patch_list)
if edge_patches:
spec_to_patches.setdefault(id(dspec.spec), set()).update(edge_patches)
for spec in root.traverse():
if id(spec) not in spec_to_patches:
continue
patches = list(spec_to_patches[id(spec)])
variant: vt.MultiValuedVariant = spec.variants.setdefault(
"patches", vt.MultiValuedVariant("patches", ())
)
variant.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches variant to store patches order
ordered_hashes = [(*p.ordering_key, p.sha256) for p in patches if p.ordering_key]
ordered_hashes.sort()
tty.debug(
f"Ordered hashes [{spec.name}]: "
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
)
setattr(
variant, "_patches_in_order_of_appearance", [sha256 for _, _, sha256 in ordered_hashes]
)
def _ensure_external_path_if_external(spec: spack.spec.Spec) -> None:
if not spec.external_modules or spec.external_path:
return
# Get the path from the module the package can override the default
# (this is mostly needed for Cray)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
package = pkg_cls(spec)
spec.external_path = getattr(package, "external_prefix", None) or md.path_from_modules(
spec.external_modules
)
def _develop_specs_from_env(spec, env):
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
if not dev_info:
@@ -4621,9 +4134,6 @@ def solve_with_stats(
reusable_specs.extend(self.selector.reusable_specs(specs))
setup = SpackSolverSetup(tests=tests)
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
CONC_CACHE.flush_manifest()
CONC_CACHE.cleanup()
return self.driver.solve(
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
)
@@ -4693,9 +4203,6 @@ def solve_in_rounds(
for spec in result.specs:
reusable_specs.extend(spec.traverse())
CONC_CACHE.flush_manifest()
CONC_CACHE.cleanup()
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
"""There was an issue with the spec that was requested (i.e. a user error)."""

View File

@@ -524,16 +524,6 @@ error(10, "'{0}' is not a valid dependency for any package in the DAG", Package)
:- attr("node", node(ID, Package)),
not needed(node(ID, Package)).
% Extensions depending on each other must all extend the same node (e.g. all Python packages
% depending on each other must depend on the same Python interpreter)
error(100, "{0} and {1} must depend on the same {2}", ExtensionParent, ExtensionChild, ExtendeePackage)
:- depends_on(ExtensionParent, ExtensionChild),
attr("extends", ExtensionParent, ExtendeePackage),
depends_on(ExtensionParent, node(X, ExtendeePackage)),
depends_on(ExtensionChild, node(Y, ExtendeePackage)),
X != Y.
#defined dependency_type/2.
%-----------------------------------------------------------------------------
@@ -597,13 +587,6 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
% This is needed to allow requirement on virtuals,
% when a virtual root is requested
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
:- attr("virtual_root", node(min_dupe_id, Virtual)),
attr("root", ProviderNode),
provider(ProviderNode, node(min_dupe_id, Virtual)).
% dependencies on virtuals also imply that the virtual is a virtual node
1 { attr("virtual_node", node(0..X-1, Virtual)) : max_dupes(Virtual, X) }
:- node_depends_on_virtual(PackageNode, Virtual).
@@ -960,14 +943,12 @@ error(100, "Cannot set variant '{0}' for package '{1}' because the variant condi
build(node(ID, Package)).
% at most one variant value for single-valued variants.
error(100, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
:- attr("node", node(ID, Package)),
node_has_variant(node(ID, Package), Variant, _),
variant_single_value(node(ID, Package), Variant),
attr("variant_value", node(ID, Package), Variant, Value1),
attr("variant_value", node(ID, Package), Variant, Value2),
Value1 < Value2,
build(node(ID, Package)).
build(node(ID, Package)),
2 { attr("variant_value", node(ID, Package), Variant, Value) }.
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
:- attr("node", node(ID, Package)),

View File

@@ -31,19 +31,16 @@ class AspObject:
"""Object representing a piece of ASP code."""
def _id(thing: Any) -> Union[str, int, AspObject]:
def _id(thing: Any) -> Union[str, AspObject]:
"""Quote string if needed for it to be a valid identifier."""
if isinstance(thing, bool):
return f'"{thing}"'
elif isinstance(thing, (AspObject, int)):
if isinstance(thing, AspObject):
return thing
elif isinstance(thing, bool):
return f'"{str(thing)}"'
elif isinstance(thing, int):
return str(thing)
else:
if isinstance(thing, str):
# escape characters that cannot be in clingo strings
thing = thing.replace("\\", r"\\")
thing = thing.replace("\n", r"\n")
thing = thing.replace('"', r"\"")
return f'"{thing}"'
return f'"{str(thing)}"'
class AspVar(AspObject):
@@ -93,9 +90,26 @@ def __call__(self, *args: Any) -> "AspFunction":
"""
return AspFunction(self.name, self.args + args)
def _argify(self, arg: Any) -> Any:
"""Turn the argument into an appropriate clingo symbol"""
if isinstance(arg, bool):
return clingo().String(str(arg))
elif isinstance(arg, int):
return clingo().Number(arg)
elif isinstance(arg, AspFunction):
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
elif isinstance(arg, AspVar):
return clingo().Variable(arg.name)
return clingo().String(str(arg))
def symbol(self):
"""Return a clingo symbol for this function"""
return clingo().Function(
self.name, [self._argify(arg) for arg in self.args], positive=True
)
def __str__(self) -> str:
args = f"({','.join(str(_id(arg)) for arg in self.args)})"
return f"{self.name}{args}"
return f"{self.name}({', '.join(str(_id(arg)) for arg in self.args)})"
def __repr__(self) -> str:
return str(self)

View File

@@ -117,7 +117,7 @@ error(0, "Cannot find a valid provider for virtual {0}", Virtual, startcauses, C
condition_holds(Cause, node(CID, TriggerPkg)).
% At most one variant value for single-valued variants
error(0, "'{0}' requires conflicting variant values 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
error(0, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2, startcauses, Cause1, X, Cause2, X)
:- attr("node", node(X, Package)),
node_has_variant(node(X, Package), Variant, VariantID),
variant_single_value(node(X, Package), Variant),

View File

@@ -468,29 +468,16 @@ def possible_packages_facts(self, gen, fn):
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
default = spack.config.CONFIG.get("concretizer:duplicates:max_dupes:default", 2)
for package_name in sorted(self.possible_dependencies() & build_tools):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
if max_dupes > 1:
gen.fact(fn.multiple_unification_sets(package_name))
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (link-run virtuals)")
for package_name in sorted(self._link_run_virtuals):
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (other virtuals)")
for package_name in sorted(self.possible_virtuals() - self._link_run_virtuals):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))

View File

@@ -10,7 +10,7 @@
import spack.error
import spack.package_base
import spack.spec
from spack.util.spack_yaml import get_mark_from_yaml_data
from spack.config import get_mark_from_yaml_data
class RequirementKind(enum.Enum):
@@ -66,29 +66,18 @@ def rules_from_package_py(self, pkg: spack.package_base.PackageBase) -> List[Req
return rules
def rules_from_virtual(self, virtual_str: str) -> List[RequirementRule]:
kind, requests = self._raw_yaml_data(virtual_str, section="require", virtual=True)
result = self._rules_from_requirements(virtual_str, requests, kind=kind)
kind, requests = self._raw_yaml_data(virtual_str, section="prefer", virtual=True)
result.extend(self._rules_from_preferences(virtual_str, preferences=requests, kind=kind))
kind, requests = self._raw_yaml_data(virtual_str, section="conflict", virtual=True)
result.extend(self._rules_from_conflicts(virtual_str, conflicts=requests, kind=kind))
return result
requirements = self.config.get("packages", {}).get(virtual_str, {}).get("require", [])
return self._rules_from_requirements(
virtual_str, requirements, kind=RequirementKind.VIRTUAL
)
def rules_from_require(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
kind, requirements = self._raw_yaml_data(pkg.name, section="require")
kind, requirements = self._raw_yaml_data(pkg, section="require")
return self._rules_from_requirements(pkg.name, requirements, kind=kind)
def rules_from_prefer(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
kind, preferences = self._raw_yaml_data(pkg.name, section="prefer")
return self._rules_from_preferences(pkg.name, preferences=preferences, kind=kind)
def _rules_from_preferences(
self, pkg_name: str, *, preferences, kind: RequirementKind
) -> List[RequirementRule]:
result = []
kind, preferences = self._raw_yaml_data(pkg, section="prefer")
for item in preferences:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
@@ -97,7 +86,7 @@ def _rules_from_preferences(
# require:
# - any_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg_name,
pkg_name=pkg.name,
policy="any_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
@@ -108,13 +97,8 @@ def _rules_from_preferences(
return result
def rules_from_conflict(self, pkg: spack.package_base.PackageBase) -> List[RequirementRule]:
kind, conflicts = self._raw_yaml_data(pkg.name, section="conflict")
return self._rules_from_conflicts(pkg.name, conflicts=conflicts, kind=kind)
def _rules_from_conflicts(
self, pkg_name: str, *, conflicts, kind: RequirementKind
) -> List[RequirementRule]:
result = []
kind, conflicts = self._raw_yaml_data(pkg, section="conflict")
for item in conflicts:
spec, condition, message = self._parse_prefer_conflict_item(item)
result.append(
@@ -123,7 +107,7 @@ def _rules_from_conflicts(
# require:
# - one_of: [spec_str, "@:"]
RequirementRule(
pkg_name=pkg_name,
pkg_name=pkg.name,
policy="one_of",
requirements=[spec, spack.spec.Spec("@:")],
kind=kind,
@@ -145,14 +129,10 @@ def _parse_prefer_conflict_item(self, item):
message = item.get("message")
return spec, condition, message
def _raw_yaml_data(self, pkg_name: str, *, section: str, virtual: bool = False):
def _raw_yaml_data(self, pkg: spack.package_base.PackageBase, *, section: str):
config = self.config.get("packages")
data = config.get(pkg_name, {}).get(section, [])
data = config.get(pkg.name, {}).get(section, [])
kind = RequirementKind.PACKAGE
if virtual:
return RequirementKind.VIRTUAL, data
if not data:
data = config.get("all", {}).get(section, [])
kind = RequirementKind.DEFAULT
@@ -185,8 +165,7 @@ def _rules_from_requirements(
# validate specs from YAML first, and fail with line numbers if parsing fails.
constraints = [
parse_spec_from_yaml_string(constraint, named=kind == RequirementKind.VIRTUAL)
for constraint in constraints
parse_spec_from_yaml_string(constraint) for constraint in constraints
]
when_str = requirement.get("when")
when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
@@ -224,7 +203,7 @@ def reject_requirement_constraint(
s.validate_or_raise()
except spack.error.SpackError as e:
tty.debug(
f"[{__name__}] Rejecting the default '{constraint}' requirement "
f"[SETUP] Rejecting the default '{constraint}' requirement "
f"on '{pkg_name}': {str(e)}",
level=2,
)
@@ -232,37 +211,21 @@ def reject_requirement_constraint(
return False
def parse_spec_from_yaml_string(string: str, *, named: bool = False) -> spack.spec.Spec:
def parse_spec_from_yaml_string(string: str) -> spack.spec.Spec:
"""Parse a spec from YAML and add file/line info to errors, if it's available.
Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
add file/line information for debugging using file/line annotations from the string.
Args:
Arguments:
string: a string representing a ``Spec`` from config YAML.
named: if True, the spec must have a name
"""
try:
result = spack.spec.Spec(string)
return spack.spec.Spec(string)
except spack.error.SpecSyntaxError as e:
mark = get_mark_from_yaml_data(string)
if mark:
msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
raise spack.error.SpecSyntaxError(msg) from e
raise e
if named is True and not result.name:
msg = f"expected a named spec, but got '{string}' instead"
mark = get_mark_from_yaml_data(string)
# Add a hint in case it's dependencies
deps = result.dependencies()
if len(deps) == 1:
msg = f"{msg}. Did you mean '{deps[0]}'?"
if mark:
msg = f"{mark.name}:{mark.line + 1}: {msg}"
raise spack.error.SpackError(msg)
return result

View File

@@ -52,7 +52,6 @@
import enum
import io
import itertools
import json
import os
import pathlib
import platform
@@ -97,7 +96,9 @@
import spack.spec_parser
import spack.store
import spack.traverse
import spack.util.executable
import spack.util.hash
import spack.util.module_cmd as md
import spack.util.prefix
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
@@ -174,17 +175,15 @@
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
"{name}{@versions}"
"{compiler_flags}"
"{%compiler.name}{@compiler.versions}{compiler_flags}"
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
" {%compiler.name}{@compiler.versions}"
)
#: Display format, which eliminates extra `@=` in the output, for readability.
DISPLAY_FORMAT = (
"{name}{@version}"
"{compiler_flags}"
"{%compiler.name}{@compiler.version}{compiler_flags}"
"{variants}{ namespace=namespace_if_anonymous}{ arch=architecture}{/abstract_hash}"
" {%compiler.name}{@compiler.version}"
)
#: Regular expression to pull spec contents out of clearsigned signature
@@ -799,7 +798,7 @@ def update_deptypes(self, depflag: dt.DepFlag) -> bool:
self.depflag = new
return True
def update_virtuals(self, virtuals: Iterable[str]) -> bool:
def update_virtuals(self, virtuals: Tuple[str, ...]) -> bool:
"""Update the list of provided virtuals"""
old = self.virtuals
self.virtuals = tuple(sorted(set(virtuals).union(self.virtuals)))
@@ -1109,6 +1108,28 @@ def clear(self):
self.edges.clear()
def _command_default_handler(spec: "Spec"):
"""Default handler when looking for the 'command' attribute.
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
Parameters:
spec: spec that is being queried
Returns:
Executable: An executable of the command
Raises:
RuntimeError: If the command is not found
"""
home = getattr(spec.package, "home")
path = os.path.join(home.bin, spec.name)
if fs.is_exe(path):
return spack.util.executable.Executable(path)
raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
def _headers_default_handler(spec: "Spec"):
"""Default handler when looking for the 'headers' attribute.
@@ -1312,7 +1333,9 @@ class SpecBuildInterface(lang.ObjectWrapper):
home = ForwardQueryToPackage("home", default_handler=None)
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
command = ForwardQueryToPackage("command", default_handler=None, _indirect=True)
command = ForwardQueryToPackage(
"command", default_handler=_command_default_handler, _indirect=True
)
def __init__(
self,
@@ -1490,7 +1513,7 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
self.abstract_hash = None
# initial values for all spec hash types
for h in ht.HASHES:
for h in ht.hashes:
setattr(self, h.attr, None)
# cache for spec's prefix, computed lazily by prefix property
@@ -2083,34 +2106,32 @@ def long_spec(self):
def short_spec(self):
"""Returns a version of the spec with the dependencies hashed
instead of completely enumerated."""
return self.format(
"{name}{@version}{variants}{ arch=architecture}"
"{/hash:7}{%compiler.name}{@compiler.version}"
)
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
spec_format += "{variants}{ arch=architecture}{/hash:7}"
return self.format(spec_format)
@property
def cshort_spec(self):
"""Returns an auto-colorized version of ``self.short_spec``."""
return self.cformat(
"{name}{@version}{variants}{ arch=architecture}"
"{/hash:7}{%compiler.name}{@compiler.version}"
)
spec_format = "{name}{@version}{%compiler.name}{@compiler.version}"
spec_format += "{variants}{ arch=architecture}{/hash:7}"
return self.cformat(spec_format)
@property
def prefix(self) -> spack.util.prefix.Prefix:
def prefix(self):
if not self._concrete:
raise spack.error.SpecError(f"Spec is not concrete: {self}")
raise spack.error.SpecError("Spec is not concrete: " + str(self))
if self._prefix is None:
_, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
upstream, record = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
if record and record.path:
self.set_prefix(record.path)
self.prefix = record.path
else:
self.set_prefix(spack.store.STORE.layout.path_for_spec(self))
assert self._prefix is not None
self.prefix = spack.store.STORE.layout.path_for_spec(self)
return self._prefix
def set_prefix(self, value: str) -> None:
@prefix.setter
def prefix(self, value):
self._prefix = spack.util.prefix.Prefix(llnl.path.convert_to_platform_path(value))
def spec_hash(self, hash):
@@ -2124,9 +2145,7 @@ def spec_hash(self, hash):
if hash.override is not None:
return hash.override(self)
node_dict = self.to_node_dict(hash=hash)
json_text = json.dumps(
node_dict, ensure_ascii=True, indent=None, separators=(",", ":"), sort_keys=False
)
json_text = sjson.dump(node_dict)
# This implements "frankenhashes", preserving the last 7 characters of the
# original hash when splicing so that we can avoid relocation issues
out = spack.util.hash.b32_hash(json_text)
@@ -2173,16 +2192,30 @@ def package_hash(self):
def dag_hash(self, length=None):
"""This is Spack's default hash, used to identify installations.
Same as the full hash (includes package hash and build/link/run deps).
Tells us when package files and any dependencies have changes.
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
NOTE: Versions of Spack prior to 1.0 only did not include test deps.
"""
return self._cached_hash(ht.dag_hash, length)
def process_hash(self, length=None):
"""Hash used to transfer specs among processes.
This hash includes build and test dependencies and is only used to
serialize a spec and pass it around among processes.
"""
return self._cached_hash(ht.process_hash, length)
def dag_hash_bit_prefix(self, bits):
"""Get the first <bits> bits of the DAG hash as an integer type."""
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
def process_hash_bit_prefix(self, bits):
"""Get the first <bits> bits of the DAG hash as an integer type."""
return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
def _lookup_hash(self):
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
store, or finally, binary caches."""
@@ -2673,7 +2706,7 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
return name, depflag
def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]],
s: Union[Spec, Tuple[Spec, str]]
) -> Tuple[Spec, dt.DepFlag]:
"""Given a non-string key in the literal, extracts the spec
and its dependency types.
@@ -2702,7 +2735,7 @@ def spec_and_dependency_types(
return spec_builder(spec_dict)
@staticmethod
def from_dict(data) -> "Spec":
def from_dict(data):
"""Construct a spec from JSON/YAML.
Args:
@@ -2725,7 +2758,7 @@ def from_dict(data) -> "Spec":
return spec
@staticmethod
def from_yaml(stream) -> "Spec":
def from_yaml(stream):
"""Construct a spec from YAML.
Args:
@@ -2735,7 +2768,7 @@ def from_yaml(stream) -> "Spec":
return Spec.from_dict(data)
@staticmethod
def from_json(stream) -> "Spec":
def from_json(stream):
"""Construct a spec from JSON.
Args:
@@ -2745,7 +2778,7 @@ def from_json(stream) -> "Spec":
data = sjson.load(stream)
return Spec.from_dict(data)
except Exception as e:
raise sjson.SpackJSONError("error parsing JSON spec:", e) from e
raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) from e
@staticmethod
def extract_json_from_clearsig(data):
@@ -2809,6 +2842,94 @@ def _patches_assigned(self):
return True
@staticmethod
def inject_patches_variant(root):
# This dictionary will store object IDs rather than Specs as keys
# since the Spec __hash__ will change as patches are added to them
spec_to_patches = {}
for s in root.traverse():
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
# normalize and concretize evaluate Packages using Repo.get(),
# which respects precedence. So, a namespace assignment isn't
# changing how a package name would have been interpreted and
# we can do it as late as possible to allow as much
# compatibility across repositories as possible.
if s.namespace is None:
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
if s.concrete:
continue
# Add any patches from the package to the spec.
patches = set()
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
if s.satisfies(cond):
for patch in patch_list:
patches.add(patch)
if patches:
spec_to_patches[id(s)] = patches
# Also record all patches required on dependencies by
# depends_on(..., patch=...)
for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
if dspec.spec.concrete:
continue
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
patches = []
for cond, deps_by_name in pkg_deps.items():
if not dspec.parent.satisfies(cond):
continue
dependency = deps_by_name.get(dspec.spec.name)
if not dependency:
continue
for pcond, patch_list in dependency.patches.items():
if dspec.spec.satisfies(pcond):
patches.extend(patch_list)
if patches:
all_patches = spec_to_patches.setdefault(id(dspec.spec), set())
for patch in patches:
all_patches.add(patch)
for spec in root.traverse():
if id(spec) not in spec_to_patches:
continue
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches mvar to store patches order
full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
ordered_hashes = sorted(full_order_keys)
tty.debug(
"Ordered hashes [{0}]: ".format(spec.name)
+ ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
)
mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
@staticmethod
def ensure_external_path_if_external(external_spec):
if external_spec.external_modules and not external_spec.external_path:
compiler = spack.compilers.compiler_for_spec(
external_spec.compiler, external_spec.architecture
)
for mod in compiler.modules:
md.load_module(mod)
# Get the path from the module the package can override the default
# (this is mostly needed for Cray)
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
package = pkg_cls(external_spec)
external_spec.external_path = getattr(
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
)
@staticmethod
def ensure_no_deprecated(root):
"""Raise if a deprecated spec is in the dag.
@@ -3288,20 +3409,12 @@ def _intersects_dependencies(self, other):
# These two loops handle cases where there is an overly restrictive
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2)
for spec in self.traverse():
if (
spack.repo.PATH.is_virtual(spec.name)
and spec.name in other_index
and not other_index.providers_for(spec)
):
for spec in self.virtual_dependencies():
if spec.name in other_index and not other_index.providers_for(spec):
return False
for spec in other.traverse():
if (
spack.repo.PATH.is_virtual(spec.name)
and spec.name in self_index
and not self_index.providers_for(spec)
):
for spec in other.virtual_dependencies():
if spec.name in self_index and not self_index.providers_for(spec):
return False
return True
@@ -3445,6 +3558,10 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
for rhs in other.traverse(root=False)
)
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spack.repo.PATH.is_virtual(spec.name)]
@property # type: ignore[misc] # decorated prop not supported in mypy
def patches(self):
"""Return patch objects for any patch sha256 sums on this Spec.
@@ -3549,11 +3666,11 @@ def _dup(self, other: "Spec", deps: Union[bool, dt.DepTypes, dt.DepFlag] = True)
if self._concrete:
self._dunder_hash = other._dunder_hash
for h in ht.HASHES:
for h in ht.hashes:
setattr(self, h.attr, getattr(other, h.attr, None))
else:
self._dunder_hash = None
for h in ht.HASHES:
for h in ht.hashes:
setattr(self, h.attr, None)
return changed
@@ -3634,16 +3751,18 @@ def __getitem__(self, name: str):
csv = query_parameters.pop().strip()
query_parameters = re.split(r"\s*,\s*", csv)
# Consider all direct dependencies and transitive runtime dependencies
order = itertools.chain(
self.edges_to_dependencies(depflag=dt.ALL),
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
order = lambda: itertools.chain(
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
)
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
# and prefer matches closest to the root.
try:
edge = next((e for e in order if e.spec.name == name or name in e.virtuals))
except StopIteration as e:
raise KeyError(f"No spec with name {name} in {self}") from e
edge = next((e for e in order() if e.spec.name == name or name in e.virtuals))
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(
@@ -3666,11 +3785,8 @@ def __contains__(self, spec):
# if anonymous or same name, we only have to look at the root
if not spec.name or spec.name == self.name:
return self.satisfies(spec)
try:
dep = self[spec.name]
except KeyError:
return False
return dep.satisfies(spec)
else:
return any(s.satisfies(spec) for s in self.traverse(root=False))
def eq_dag(self, other, deptypes=True, vs=None, vo=None):
"""True if the full dependency DAGs of specs are equal."""
@@ -3745,6 +3861,16 @@ def _cmp_iter(self):
# serialized before the hash change and one after, are considered different.
yield self.dag_hash() if self.concrete else None
# This needs to be in _cmp_iter so that no specs with different process hashes
# are considered the same by `__hash__` or `__eq__`.
#
# TODO: We should eventually unify the `_cmp_*` methods with `to_node_dict` so
# TODO: there aren't two sources of truth, but this needs some thought, since
# TODO: they exist for speed. We should benchmark whether it's really worth
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
# TODO: spec hashing.
yield self.process_hash() if self.concrete else None
def deps():
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
yield dep.spec.name
@@ -4398,7 +4524,7 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
"""
Clears all cached hashes in a Spec, while preserving other properties.
"""
for h in ht.HASHES:
for h in ht.hashes:
if h.attr not in ignore:
if hasattr(self, h.attr):
setattr(self, h.attr, None)
@@ -4407,12 +4533,18 @@ def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
setattr(self, attr, None)
def __hash__(self):
# If the spec is concrete, we leverage the dag hash and just use a 64-bit prefix of it.
# The dag hash has the advantage that it's computed once per concrete spec, and it's saved
# -- so if we read concrete specs we don't need to recompute the whole hash.
# If the spec is concrete, we leverage the process hash and just use
# a 64-bit prefix of it. The process hash has the advantage that it's
# computed once per concrete spec, and it's saved -- so if we read
# concrete specs we don't need to recompute the whole hash. This is
# good for large, unchanging specs.
#
# We use the process hash instead of the DAG hash here because the DAG
# hash includes the package hash, which can cause infinite recursion,
# and which isn't defined unless the spec has a known package.
if self.concrete:
if not self._dunder_hash:
self._dunder_hash = self.dag_hash_bit_prefix(64)
self._dunder_hash = self.process_hash_bit_prefix(64)
return self._dunder_hash
# This is the normal hash for lazy_lexicographic_ordering. It's
@@ -4421,7 +4553,7 @@ def __hash__(self):
return hash(lang.tuplify(self._cmp_iter))
def __reduce__(self):
return Spec.from_dict, (self.to_dict(hash=ht.dag_hash),)
return Spec.from_dict, (self.to_dict(hash=ht.process_hash),)
def attach_git_version_lookup(self):
# Add a git lookup method for GitVersions
@@ -4564,6 +4696,17 @@ def constrain(self, other: "VariantMap") -> bool:
return changed
@property
def concrete(self):
"""Returns True if the spec is concrete in terms of variants.
Returns:
bool: True or False
"""
return self.spec._concrete or all(
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
)
def copy(self) -> "VariantMap":
clone = VariantMap(self.spec)
for name, variant in self.items():
@@ -4690,51 +4833,33 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
return merged_spec
def reconstruct_virtuals_on_edges(spec: Spec) -> None:
"""Reconstruct virtuals on edges. Used to read from old DB and reindex."""
virtuals_needed: Dict[str, Set[str]] = {}
virtuals_provided: Dict[str, Set[str]] = {}
for edge in spec.traverse_edges(cover="edges", root=False):
parent_key = edge.parent.dag_hash()
if parent_key not in virtuals_needed:
# Construct which virtuals are needed by parent
virtuals_needed[parent_key] = set()
try:
parent_pkg = edge.parent.package
except Exception as e:
warnings.warn(
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
)
continue
def reconstruct_virtuals_on_edges(spec):
"""Reconstruct virtuals on edges. Used to read from old DB and reindex.
virtuals_needed[parent_key].update(
name
for name, when_deps in parent_pkg.dependencies_by_name(when=True).items()
if spack.repo.PATH.is_virtual(name)
and any(edge.parent.satisfies(x) for x in when_deps)
Args:
spec: spec on which we want to reconstruct virtuals
"""
# Collect all possible virtuals
possible_virtuals = set()
for node in spec.traverse():
try:
possible_virtuals.update(
{x for x in node.package.dependencies if spack.repo.PATH.is_virtual(x)}
)
if not virtuals_needed[parent_key]:
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue
child_key = edge.spec.dag_hash()
if child_key not in virtuals_provided:
virtuals_provided[child_key] = set()
try:
child_pkg = edge.spec.package
except Exception as e:
warnings.warn(
f"cannot reconstruct virtual dependencies on {edge.parent.name}: {e}"
)
continue
virtuals_provided[child_key].update(x.name for x in child_pkg.virtuals_provided)
if not virtuals_provided[child_key]:
# Assume all incoming edges to provider are marked with virtuals=
for vspec in possible_virtuals:
try:
provider = spec[vspec]
except KeyError:
# Virtual not in the DAG
continue
virtuals_to_add = virtuals_needed[parent_key] & virtuals_provided[child_key]
if virtuals_to_add:
edge.update_virtuals(virtuals_to_add)
for edge in provider.edges_from_dependents():
edge.update_virtuals([vspec])
class SpecfileReaderBase:
@@ -4743,7 +4868,7 @@ def from_node_dict(cls, node):
spec = Spec()
name, node = cls.name_and_data(node)
for h in ht.HASHES:
for h in ht.hashes:
setattr(spec, h.attr, node.get(h.name, None))
spec.name = name
@@ -4926,7 +5051,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
"""
for dep_name, elt in deps.items():
if isinstance(elt, dict):
for h in ht.HASHES:
for h in ht.hashes:
if h.name in elt:
dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
@@ -4969,7 +5094,7 @@ def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
dep_name = dep["name"]
if isinstance(elt, dict):
# new format: elements of dependency spec are keyed.
for h in ht.HASHES:
for h in ht.hashes:
if h.name in elt:
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
break
@@ -5079,13 +5204,6 @@ def get_host_environment() -> Dict[str, Any]:
}
def eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
return eval(string, valid_variables)
class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs."""
@@ -5244,10 +5362,8 @@ def __init__(self, spec):
class AmbiguousHashError(spack.error.SpecError):
def __init__(self, msg, *specs):
spec_fmt = (
"{namespace}.{name}{@version}{compiler_flags}{variants}"
"{ arch=architecture}{/hash:7}{%compiler}"
)
spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
spec_fmt += "{variants}{ arch=architecture}{/hash:7}"
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
super().__init__(msg + specs_str)

View File

@@ -60,17 +60,13 @@
import pathlib
import re
import sys
import traceback
import warnings
from typing import Iterator, List, Optional, Tuple
from typing import Iterator, List, Optional
from llnl.util.tty import color
import spack.deptypes
import spack.error
import spack.paths
import spack.spec
import spack.util.spack_yaml
import spack.version
from spack.tokenize import Token, TokenBase, Tokenizer
@@ -208,32 +204,6 @@ def __init__(self, tokens: List[Token], text: str):
super().__init__(message)
def _warn_about_variant_after_compiler(literal_str: str, issues: List[str]):
"""Issue a warning if variant or other token is preceded by a compiler token. The warning is
only issued if it's actionable: either we know the config file it originates from, or we have
call site that's not internal to Spack."""
ignore = [spack.paths.lib_path, spack.paths.bin_path]
mark = spack.util.spack_yaml.get_mark_from_yaml_data(literal_str)
issue_str = ", ".join(issues)
error = f"{issue_str} in `{literal_str}`"
# warning from config file
if mark:
warnings.warn(f"{mark.name}:{mark.line + 1}: {error}")
return
# warning from hopefully package.py
for frame in reversed(traceback.extract_stack()):
if frame.lineno and not any(frame.filename.startswith(path) for path in ignore):
warnings.warn_explicit(
error,
category=spack.error.SpackAPIWarning,
filename=frame.filename,
lineno=frame.lineno,
)
return
class SpecParser:
"""Parse text into specs"""
@@ -272,31 +242,26 @@ def add_dependency(dep, **edge_properties):
raise SpecParsingError(str(e), self.ctx.current_token, self.literal_str) from e
initial_spec = initial_spec or spack.spec.Spec()
root_spec, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
while True:
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
edge_properties.setdefault("depflag", 0)
edge_properties.setdefault("virtuals", ())
dependency, warnings = self._parse_node(root_spec)
parser_warnings.extend(warnings)
dependency = self._parse_node(root_spec)
add_dependency(dependency, **edge_properties)
elif self.ctx.accept(SpecTokens.DEPENDENCY):
dependency, warnings = self._parse_node(root_spec)
parser_warnings.extend(warnings)
dependency = self._parse_node(root_spec)
add_dependency(dependency, depflag=0, virtuals=())
else:
break
if parser_warnings:
_warn_about_variant_after_compiler(self.literal_str, parser_warnings)
return root_spec
def _parse_node(self, root_spec):
dependency, parser_warnings = SpecNodeParser(self.ctx, self.literal_str).parse()
dependency = SpecNodeParser(self.ctx, self.literal_str).parse()
if dependency is None:
msg = (
"the dependency sigil and any optional edge attributes must be followed by a "
@@ -305,7 +270,7 @@ def _parse_node(self, root_spec):
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
if root_spec.concrete:
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
return dependency, parser_warnings
return dependency
def all_specs(self) -> List["spack.spec.Spec"]:
"""Return all the specs that remain to be parsed"""
@@ -325,7 +290,7 @@ def __init__(self, ctx, literal_str):
def parse(
self, initial_spec: Optional["spack.spec.Spec"] = None
) -> Tuple["spack.spec.Spec", List[str]]:
) -> Optional["spack.spec.Spec"]:
"""Parse a single spec node from a stream of tokens
Args:
@@ -334,15 +299,12 @@ def parse(
Return
The object passed as argument
"""
parser_warnings: List[str] = []
last_compiler = None
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
return initial_spec
if initial_spec is None:
initial_spec = spack.spec.Spec()
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
return initial_spec, parser_warnings
# If we start with a package name we have a named spec, we cannot
# accept another package name afterwards in a node
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
@@ -356,7 +318,7 @@ def parse(
initial_spec.namespace = namespace
elif self.ctx.accept(SpecTokens.FILENAME):
return FileParser(self.ctx).parse(initial_spec), parser_warnings
return FileParser(self.ctx).parse(initial_spec)
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
"""Raise a spec parsing error with token context."""
@@ -369,12 +331,6 @@ def add_flag(name: str, value: str, propagate: bool):
except Exception as e:
raise_parsing_error(str(e), e)
def warn_if_after_compiler(token: str):
"""Register a warning for %compiler followed by +variant that will in the future apply
to the compiler instead of the current root."""
if last_compiler:
parser_warnings.append(f"`{token}` should go before `{last_compiler}`")
while True:
if self.ctx.accept(SpecTokens.COMPILER):
if self.has_compiler:
@@ -383,7 +339,6 @@ def warn_if_after_compiler(token: str):
compiler_name = self.ctx.current_token.value[1:]
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
self.has_compiler = True
last_compiler = self.ctx.current_token.value
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
if self.has_compiler:
@@ -394,7 +349,6 @@ def warn_if_after_compiler(token: str):
compiler_name.strip(), compiler_version
)
self.has_compiler = True
last_compiler = self.ctx.current_token.value
elif (
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
@@ -409,17 +363,14 @@ def warn_if_after_compiler(token: str):
)
initial_spec.attach_git_version_lookup()
self.has_version = True
warn_if_after_compiler(self.ctx.current_token.value)
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
variant_value = self.ctx.current_token.value[0] == "+"
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
warn_if_after_compiler(self.ctx.current_token.value)
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
variant_value = self.ctx.current_token.value[0:2] == "++"
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
warn_if_after_compiler(self.ctx.current_token.value)
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
match = SPLIT_KVP.match(self.ctx.current_token.value)
@@ -427,7 +378,6 @@ def warn_if_after_compiler(token: str):
name, _, value = match.groups()
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
warn_if_after_compiler(self.ctx.current_token.value)
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
match = SPLIT_KVP.match(self.ctx.current_token.value)
@@ -435,19 +385,17 @@ def warn_if_after_compiler(token: str):
name, _, value = match.groups()
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
warn_if_after_compiler(self.ctx.current_token.value)
elif self.ctx.expect(SpecTokens.DAG_HASH):
if initial_spec.abstract_hash:
break
self.ctx.accept(SpecTokens.DAG_HASH)
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
warn_if_after_compiler(self.ctx.current_token.value)
else:
break
return initial_spec, parser_warnings
return initial_spec
class FileParser:
@@ -537,18 +485,23 @@ def parse_one_or_raise(
text (str): text to be parsed
initial_spec: buffer where to parse the spec. If None a new one will be created.
"""
parser = SpecParser(text)
stripped_text = text.strip()
parser = SpecParser(stripped_text)
result = parser.next_spec(initial_spec)
next_token = parser.ctx.next_token
last_token = parser.ctx.current_token
if next_token:
message = f"expected a single spec, but got more:\n{text}"
underline = f"\n{' ' * next_token.start}{'^' * len(next_token.value)}"
message += color.colorize(f"@*r{{{underline}}}")
if last_token is not None and last_token.end != len(stripped_text):
message = "a single spec was requested, but parsed more than one:"
message += f"\n{text}"
if last_token is not None:
underline = f"\n{' ' * last_token.end}{'^' * (len(text) - last_token.end)}"
message += color.colorize(f"@*r{{{underline}}}")
raise ValueError(message)
if result is None:
raise ValueError("expected a single spec, but got none")
message = "a single spec was requested, but none was parsed:"
message += f"\n{text}"
raise ValueError(message)
return result

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test ABI-based splicing of dependencies"""
""" Test ABI-based splicing of dependencies """
from typing import List

View File

@@ -129,7 +129,7 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
)
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
spec = Spec(
f"pkg-a foobar=bar target={root_target_range} %gcc@10 ^pkg-b target={dep_target_range}"
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
)
with spack.concretize.disable_compiler_existence_check():
spec = spack.concretize.concretize_one(spec)

View File

@@ -200,11 +200,7 @@ def dummy_prefix(tmpdir):
@pytest.mark.requires_executables(*required_executables)
@pytest.mark.maybeslow
@pytest.mark.usefixtures(
"default_config",
"cache_directory",
"install_dir_default_layout",
"temporary_mirror",
"mutable_mock_env_path",
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
)
def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
"""
@@ -276,11 +272,7 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures(
"default_config",
"cache_directory",
"install_dir_default_layout",
"temporary_mirror",
"mutable_mock_env_path",
"default_config", "cache_directory", "install_dir_default_layout", "temporary_mirror"
)
def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
"""
@@ -577,6 +569,7 @@ def test_FetchCacheError_only_accepts_lists_of_errors():
def test_FetchCacheError_pretty_printing_multiple():
e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
str_e = str(e)
print("'" + str_e + "'")
assert "Multiple errors" in str_e
assert "Error 1: RuntimeError: Oops!" in str_e
assert "Error 2: TypeError: Trouble!" in str_e
@@ -1140,7 +1133,7 @@ def test_get_valid_spec_file_no_json(tmp_path, filename):
def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config, capsys):
layout_version = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION + 1
spec = Spec("gmake@4.4.1 arch=linux-ubuntu23.04-zen2 %gcc@13.1.0")
spec = Spec("gmake@4.4.1%gcc@13.1.0 arch=linux-ubuntu23.04-zen2")
spec._mark_concrete()
spec_dict = spec.to_dict()
spec_dict["buildcache_layout_version"] = layout_version

View File

@@ -388,7 +388,7 @@ def test_wrapper_variables(
root = spack.concretize.concretize_one("dt-diamond")
for s in root.traverse():
s.set_prefix(f"/{s.name}-prefix/")
s.prefix = "/{0}-prefix/".format(s.name)
dep_pkg = root["dt-diamond-left"].package
dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
@@ -396,7 +396,7 @@ def test_wrapper_variables(
dep_libs = LibraryList(dep_lib_paths)
dep2_pkg = root["dt-diamond-right"].package
dep2_pkg.spec.set_prefix(str(installation_dir_with_headers))
dep2_pkg.spec.prefix = str(installation_dir_with_headers)
setattr(dep_pkg, "libs", dep_libs)
try:
@@ -542,7 +542,7 @@ def test_build_jobs_sequential_is_sequential():
spack.config.determine_number_of_jobs(
parallel=False,
max_cpus=8,
config=spack.config.create_from(
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
),
@@ -556,7 +556,7 @@ def test_build_jobs_command_line_overrides():
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=1,
config=spack.config.create_from(
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
),
@@ -567,7 +567,7 @@ def test_build_jobs_command_line_overrides():
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=100,
config=spack.config.create_from(
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
),
@@ -581,7 +581,7 @@ def test_build_jobs_defaults():
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.create_from(
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
),
)
@@ -591,7 +591,7 @@ def test_build_jobs_defaults():
spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.create_from(
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
),
)

View File

@@ -403,8 +403,8 @@ def test_autoreconf_search_path_args_multiple(default_mock_concretization, tmpdi
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.set_prefix(str(tmpdir.join("fst")))
build_dep_two.set_prefix(str(tmpdir.join("snd")))
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
"-I",
aclocal_fst,
@@ -422,8 +422,8 @@ def test_autoreconf_search_path_args_skip_automake(default_mock_concretization,
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.name = "automake"
build_dep_one.set_prefix(str(tmpdir.join("fst")))
build_dep_two.set_prefix(str(tmpdir.join("snd")))
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
@@ -434,7 +434,7 @@ def test_autoreconf_search_path_args_external_order(default_mock_concretization,
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.external_path = str(tmpdir.join("fst"))
build_dep_two.set_prefix(str(tmpdir.join("snd")))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
"-I",
aclocal_snd,
@@ -447,8 +447,8 @@ def test_autoreconf_search_path_skip_nonexisting(default_mock_concretization, tm
"""Skip -I flags for non-existing directories"""
spec = default_mock_concretization("dttop")
build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.set_prefix(str(tmpdir.join("fst")))
build_dep_two.set_prefix(str(tmpdir.join("snd")))
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []

View File

@@ -210,6 +210,7 @@ def check_args_contents(cc, args, must_contain, must_not_contain):
"""
with set_env(SPACK_TEST_COMMAND="dump-args"):
cc_modified_args = cc(*args, output=str).strip().split("\n")
print(cc_modified_args)
for a in must_contain:
assert a in cc_modified_args
for a in must_not_contain:

View File

@@ -18,7 +18,6 @@
import spack.repo as repo
import spack.util.git
from spack.test.conftest import MockHTTPResponse
from spack.version import Version
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@@ -31,43 +30,6 @@ def repro_dir(tmp_path):
yield result
def test_get_added_versions_new_checksum(mock_git_package_changes):
repo_path, filename, commits = mock_git_package_changes
checksum_versions = {
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
}
with fs.working_dir(str(repo_path)):
added_versions = ci.get_added_versions(
checksum_versions, filename, from_ref=commits[-1], to_ref=commits[-2]
)
assert len(added_versions) == 1
assert added_versions[0] == Version("2.1.5")
def test_get_added_versions_new_commit(mock_git_package_changes):
repo_path, filename, commits = mock_git_package_changes
checksum_versions = {
"74253725f884e2424a0dd8ae3f69896d5377f325": Version("2.1.6"),
"3f6576971397b379d4205ae5451ff5a68edf6c103b2f03c4188ed7075fbb5f04": Version("2.1.5"),
"a0293475e6a44a3f6c045229fe50f69dc0eebc62a42405a51f19d46a5541e77a": Version("2.1.4"),
"6c0853bb27738b811f2b4d4af095323c3d5ce36ceed6b50e5f773204fb8f7200": Version("2.0.7"),
"86993903527d9b12fc543335c19c1d33a93797b3d4d37648b5addae83679ecd8": Version("2.0.0"),
}
with fs.working_dir(str(repo_path)):
added_versions = ci.get_added_versions(
checksum_versions, filename, from_ref=commits[2], to_ref=commits[1]
)
assert len(added_versions) == 1
assert added_versions[0] == Version("2.1.6")
def test_pipeline_dag(config, tmpdir):
r"""Test creation, pruning, and traversal of PipelineDAG using the
following package dependency graph:
@@ -385,6 +347,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
for key, val in expectations.items():
affected_specs = ci.get_spec_filter_list(e1, touched, dependent_traverse_depth=key)
affected_pkg_names = set([s.name for s in affected_specs])
print(f"{key}: {affected_pkg_names}")
assert affected_pkg_names == val

View File

@@ -12,7 +12,7 @@
build_env = SpackCommand("build-env")
@pytest.mark.parametrize("pkg", [("pkg-c",), ("pkg-c", "--")])
@pytest.mark.parametrize("pkg", [("zlib",), ("zlib", "--")])
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_it_just_runs(pkg):
build_env(*pkg)
@@ -38,7 +38,7 @@ def test_build_env_requires_a_spec(args):
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_dump(shell_as, shell, tmpdir):
with tmpdir.as_cwd():
build_env("--dump", _out_file, "pkg-c")
build_env("--dump", _out_file, "zlib")
with open(_out_file, encoding="utf-8") as f:
if shell == "pwsh":
assert any(line.startswith("$Env:PATH") for line in f.readlines())
@@ -51,7 +51,7 @@ def test_dump(shell_as, shell, tmpdir):
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_pickle(tmpdir):
with tmpdir.as_cwd():
build_env("--pickle", _out_file, "pkg-c")
build_env("--pickle", _out_file, "zlib")
environment = pickle.load(open(_out_file, "rb"))
assert isinstance(environment, dict)
assert "PATH" in environment

View File

@@ -148,7 +148,7 @@ def test_update_key_index(
s = spack.concretize.concretize_one("libdwarf")
# Install a package
install("--fake", s.name)
install(s.name)
# Put installed package in the buildcache, which, because we're signing
# it, should result in the public key getting pushed to the buildcache
@@ -178,7 +178,7 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
s = spack.concretize.concretize_one("libdwarf")
# Install and generate build cache index
PackageInstaller([s.package], fake=True, explicit=True).install()
PackageInstaller([s.package], explicit=True).install()
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
@@ -214,11 +214,13 @@ def verify_mirror_contents():
if in_env_pkg in p:
found_pkg = True
assert found_pkg, f"Expected to find {in_env_pkg} in {dest_mirror_dir}"
if not found_pkg:
print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
assert False
# Install a package and put it in the buildcache
s = spack.concretize.concretize_one(out_env_pkg)
install("--fake", s.name)
install(s.name)
buildcache("push", "-u", "-f", src_mirror_url, s.name)
env("create", "test")

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import pathlib
import pytest
@@ -23,15 +22,7 @@
@pytest.fixture
def no_add(monkeypatch):
def add_versions_to_pkg(pkg, version_lines, open_in_editor):
raise AssertionError("Should not be called")
monkeypatch.setattr(spack.cmd.checksum, "add_versions_to_pkg", add_versions_to_pkg)
@pytest.fixture
def can_fetch_versions(monkeypatch, no_add):
def can_fetch_versions(monkeypatch):
"""Fake successful version detection."""
def fetch_remote_versions(pkg, concurrency):
@@ -54,7 +45,7 @@ def url_exists(url, curl=None):
@pytest.fixture
def cannot_fetch_versions(monkeypatch, no_add):
def cannot_fetch_versions(monkeypatch):
"""Fake unsuccessful version detection."""
def fetch_remote_versions(pkg, concurrency):
@@ -97,6 +88,7 @@ def test_checksum_args(arguments, expected):
(["--batch", "preferred-test"], "version of preferred-test"),
(["--latest", "preferred-test"], "Found 1 version"),
(["--preferred", "preferred-test"], "Found 1 version"),
(["--add-to-package", "preferred-test"], "Added 0 new versions to"),
(["--verify", "preferred-test"], "Verified 1 of 1"),
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
],
@@ -279,12 +271,15 @@ def test_checksum_interactive_unrecognized_command():
assert interactive_version_filter(v.copy(), input=input) == v
def test_checksum_versions(mock_packages, can_fetch_versions, monkeypatch):
def test_checksum_versions(mock_packages, can_fetch_versions):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
versions = [str(v) for v in pkg_cls.versions]
output = spack_checksum("zlib", *versions)
assert "Found 3 versions" in output
assert "version(" in output
output = spack_checksum("--add-to-package", "zlib", *versions)
assert "Found 3 versions" in output
assert "Added 0 new versions to" in output
def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
@@ -292,6 +287,7 @@ def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
assert "Could not find any remote versions" in output
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
assert "Could not find any remote versions" in output
assert "Added 1 new versions to" not in output
def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
@@ -301,6 +297,8 @@ def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
)
assert "Version 1.1.0 is deprecated" in output
# TODO alecbcs: broken assertion.
# assert "Added 0 new versions to" not in output
def test_checksum_url(mock_packages, config):
@@ -339,52 +337,3 @@ def test_checksum_manual_download_fails(mock_packages, monkeypatch):
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
with pytest.raises(ManualDownloadRequiredError, match=error):
spack_checksum(name, *versions)
def test_upate_package_contents(tmp_path: pathlib.Path):
"""Test that the package.py file is updated with the new versions."""
pkg_path = tmp_path / "package.py"
pkg_path.write_text(
"""\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)
version_lines = """\
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
"""
# two new versions are added
assert spack.cmd.checksum.add_versions_to_pkg(str(pkg_path), version_lines) == 2
assert (
pkg_path.read_text()
== """\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)

View File

@@ -22,17 +22,12 @@
import spack.hash_types as ht
import spack.main
import spack.paths as spack_paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.spack_yaml as syaml
import spack.version
from spack.ci import gitlab as gitlab_generator
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
from spack.ci.generator_registry import generator
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
from spack.database import INDEX_JSON_FILE
from spack.error import SpackError
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.database_index import schema as db_idx_schema
from spack.spec import Spec
@@ -175,9 +170,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
url: https://my.fake.cdash
project: Not used
site: Nothing
""",
"--artifacts-root",
str(tmp_path / "my_artifacts_root"),
"""
)
yaml_contents = syaml.load(outputfile.read_text())
@@ -199,7 +192,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
assert "variables" in yaml_contents
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "my_artifacts_root"
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "jobs_scratch_dir"
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
@@ -871,7 +864,7 @@ def test_push_to_build_cache(
logs_dir = scratch / "logs_dir"
logs_dir.mkdir()
ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir))
assert "spack-build-out.txt.gz" in os.listdir(logs_dir)
assert "spack-build-out.txt" in os.listdir(logs_dir)
dl_dir = scratch / "download_dir"
buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir))
@@ -1069,7 +1062,7 @@ def test_ci_rebuild_index(
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
f.write(concrete_spec.to_json(hash=ht.dag_hash))
install_cmd("--fake", "--add", "-f", str(tmp_path / "spec.json"))
install_cmd("--add", "-f", str(tmp_path / "spec.json"))
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
ci_cmd("rebuild-index")
@@ -1329,50 +1322,44 @@ def test_ci_reproduce(
env.concretize()
env.write()
def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
with working_dir(tmp_path), ev.Environment(".") as env:
if not os.path.exists(repro_dir):
repro_dir.mkdir()
repro_dir.mkdir()
job_spec = env.concrete_roots()[0]
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
f.write(job_spec.to_json(hash=ht.dag_hash))
artifacts_root = repro_dir / "jobs_scratch_dir"
pipeline_path = artifacts_root / "pipeline.yml"
job_spec = env.concrete_roots()[0]
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
f.write(job_spec.to_json(hash=ht.dag_hash))
ci_cmd(
"generate",
"--output-file",
str(pipeline_path),
"--artifacts-root",
str(artifacts_root),
artifacts_root = repro_dir / "scratch_dir"
pipeline_path = artifacts_root / "pipeline.yml"
ci_cmd(
"generate",
"--output-file",
str(pipeline_path),
"--artifacts-root",
str(artifacts_root),
)
job_name = gitlab_generator.get_job_name(job_spec)
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
f.write(
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
)
)
job_name = gitlab_generator.get_job_name(job_spec)
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
f.write(
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
)
)
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n")
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
if merge_commit_test:
f.write(
f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n"
)
else:
f.write(f"\ncommit {last_two_git_commits[1]}\n\n")
return "jobs_scratch_dir"
def fake_download_and_extract_artifacts(url, work_dir):
pass
monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
rep_out = ci_cmd(
@@ -1388,64 +1375,6 @@ def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
# Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commits are used
assert f"checkout_commit: {last_two_git_commits[0]}" in rep_out
assert f"merge_commit: {last_two_git_commits[1]}" in rep_out
# Test re-running in dirty working dir
with pytest.raises(SpackError, match=f"{repro_dir}"):
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Cleanup between tests
shutil.rmtree(repro_dir)
# Test --use-local-head
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--use-local-head",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure we are checkout out the HEAD commit without a merge commit
assert "checkout_commit: HEAD" in rep_out
assert "merge_commit: None" in rep_out
# Test the case where the spack_info.txt is not a merge commit
monkeypatch.setattr(
ci,
"download_and_extract_artifacts",
lambda url, wd: fake_download_and_extract_artifacts(url, wd, False),
)
# Cleanup between tests
shutil.rmtree(repro_dir)
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure the script was generated
assert (repro_dir / "start.sh").exists()
# Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commit is used (different than HEAD)
assert f"checkout_commit: {last_two_git_commits[1]}" in rep_out
assert "merge_commit: None" in rep_out
@pytest.mark.parametrize(
"url_in,url_out",
@@ -1845,216 +1774,3 @@ def test_ci_generate_alternate_target(
assert pipeline_doc.startswith("unittestpipeline")
assert "externaltest" in pipeline_doc
@pytest.fixture
def fetch_versions_match(monkeypatch):
"""Fake successful checksums returned from downloaded tarballs."""
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
pkg_cls = spack.repo.PATH.get_pkg_class(package_name)
return {v: pkg_cls.versions[v]["sha256"] for v in url_by_version}
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
@pytest.fixture
def fetch_versions_invalid(monkeypatch):
"""Fake successful checksums returned from downloaded tarballs."""
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
return {
v: "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
for v in url_by_version
}
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
def test_ci_validate_standard_versions_valid(capfd, mock_packages, fetch_versions_match, versions):
spec = spack.spec.Spec("diff-test")
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
version_list = [spack.version.Version(v) for v in versions]
assert spack.cmd.ci.validate_standard_versions(pkg, version_list)
out, err = capfd.readouterr()
for version in versions:
assert f"Validated diff-test@{version}" in out
@pytest.mark.parametrize("versions", [["2.1.4"], ["2.1.4", "2.1.5"]])
def test_ci_validate_standard_versions_invalid(
capfd, mock_packages, fetch_versions_invalid, versions
):
spec = spack.spec.Spec("diff-test")
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
version_list = [spack.version.Version(v) for v in versions]
assert spack.cmd.ci.validate_standard_versions(pkg, version_list) is False
out, err = capfd.readouterr()
for version in versions:
assert f"Invalid checksum found diff-test@{version}" in err
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6)]])
def test_ci_validate_git_versions_valid(
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
):
spec = spack.spec.Spec("diff-test")
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
version_list = [spack.version.Version(v) for v, _ in versions]
repo_path, filename, commits = mock_git_version_info
version_commit_dict = {
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
}
pkg_class = spec.package_class
monkeypatch.setattr(pkg_class, "git", repo_path)
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
assert spack.cmd.ci.validate_git_versions(pkg, version_list)
out, err = capfd.readouterr()
for version in version_list:
assert f"Validated diff-test@{version}" in out
@pytest.mark.parametrize("versions", [[("1.0", -3)], [("1.1", -5), ("2.0", -5)]])
def test_ci_validate_git_versions_bad_tag(
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
):
spec = spack.spec.Spec("diff-test")
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
version_list = [spack.version.Version(v) for v, _ in versions]
repo_path, filename, commits = mock_git_version_info
version_commit_dict = {
spack.version.Version(v): {"tag": f"v{v}", "commit": commits[c]} for v, c in versions
}
pkg_class = spec.package_class
monkeypatch.setattr(pkg_class, "git", repo_path)
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
out, err = capfd.readouterr()
for version in version_list:
assert f"Mismatched tag <-> commit found for diff-test@{version}" in err
@pytest.mark.parametrize("versions", [[("1.0", -2)], [("1.1", -4), ("2.0", -6), ("3.0", -6)]])
def test_ci_validate_git_versions_invalid(
capfd, monkeypatch, mock_packages, mock_git_version_info, versions
):
spec = spack.spec.Spec("diff-test")
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
version_list = [spack.version.Version(v) for v, _ in versions]
repo_path, filename, commits = mock_git_version_info
version_commit_dict = {
spack.version.Version(v): {
"tag": f"v{v}",
"commit": "abcdefabcdefabcdefabcdefabcdefabcdefabc",
}
for v, c in versions
}
pkg_class = spec.package_class
monkeypatch.setattr(pkg_class, "git", repo_path)
monkeypatch.setattr(pkg_class, "versions", version_commit_dict)
assert spack.cmd.ci.validate_git_versions(pkg, version_list) is False
out, err = capfd.readouterr()
for version in version_list:
assert f"Invalid commit for diff-test@{version}" in err
@pytest.fixture
def verify_standard_versions_valid(monkeypatch):
def validate_standard_versions(pkg, versions):
for version in versions:
print(f"Validated {pkg.name}@{version}")
return True
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
@pytest.fixture
def verify_git_versions_valid(monkeypatch):
def validate_git_versions(pkg, versions):
for version in versions:
print(f"Validated {pkg.name}@{version}")
return True
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
@pytest.fixture
def verify_standard_versions_invalid(monkeypatch):
def validate_standard_versions(pkg, versions):
for version in versions:
print(f"Invalid checksum found {pkg.name}@{version}")
return False
monkeypatch.setattr(spack.cmd.ci, "validate_standard_versions", validate_standard_versions)
@pytest.fixture
def verify_git_versions_invalid(monkeypatch):
def validate_git_versions(pkg, versions):
for version in versions:
print(f"Invalid commit for {pkg.name}@{version}")
return False
monkeypatch.setattr(spack.cmd.ci, "validate_git_versions", validate_git_versions)
def test_ci_verify_versions_valid(
monkeypatch,
mock_packages,
mock_git_package_changes,
verify_standard_versions_valid,
verify_git_versions_valid,
):
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.paths, "prefix", repo_path)
out = ci_cmd("verify-versions", commits[-1], commits[-3])
assert "Validated diff-test@2.1.5" in out
assert "Validated diff-test@2.1.6" in out
def test_ci_verify_versions_standard_invalid(
monkeypatch,
mock_packages,
mock_git_package_changes,
verify_standard_versions_invalid,
verify_git_versions_invalid,
):
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.paths, "prefix", repo_path)
out = ci_cmd("verify-versions", commits[-1], commits[-3], fail_on_error=False)
assert "Invalid checksum found diff-test@2.1.5" in out
assert "Invalid commit for diff-test@2.1.6" in out
def test_ci_verify_versions_manual_package(monkeypatch, mock_packages, mock_git_package_changes):
repo_path, _, commits = mock_git_package_changes
monkeypatch.setattr(spack.paths, "prefix", repo_path)
pkg_class = spack.spec.Spec("diff-test").package_class
monkeypatch.setattr(pkg_class, "manual_download", True)
out = ci_cmd("verify-versions", commits[-1], commits[-2])
assert "Skipping manual download package: diff-test" in out

View File

@@ -5,7 +5,6 @@
import filecmp
import os
import shutil
import textwrap
import pytest
@@ -260,25 +259,15 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
def test_updated_completion_scripts(shell, tmpdir):
"""Make sure our shell tab completion scripts remain up-to-date."""
width = 72
lines = textwrap.wrap(
msg = (
"It looks like Spack's command-line interface has been modified. "
"If differences are more than your global 'include:' scopes, please "
"update Spack's shell tab completion scripts by running:",
width,
"Please update Spack's shell tab completion scripts by running:\n\n"
" spack commands --update-completion\n\n"
"and adding the changed files to your pull request."
)
lines.append("\n spack commands --update-completion\n")
lines.extend(
textwrap.wrap(
"and adding the changed files (minus your global 'include:' scopes) "
"to your pull request.",
width,
)
)
msg = "\n".join(lines)
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
script = f"spack-completion.{shell}"
script = "spack-completion.{0}".format(shell)
old_script = os.path.join(spack.paths.share_path, script)
new_script = str(tmpdir.join(script))

View File

@@ -213,7 +213,7 @@ def test_config_add_update_dict(mutable_empty_config):
def test_config_with_c_argument(mutable_empty_config):
# I don't know how to add a spack argument to a Spack Command, so we test this way
config_file = "config:install_tree:root:/path/to/config.yaml"
config_file = "config:install_root:root:/path/to/config.yaml"
parser = spack.main.make_argument_parser()
args = parser.parse_args(["-c", config_file])
assert config_file in args.config_vars
@@ -221,7 +221,7 @@ def test_config_with_c_argument(mutable_empty_config):
# Add the path to the config
config("add", args.config_vars[0], scope="command_line")
output = config("get", "config")
assert "config:\n install_tree:\n root: /path/to/config.yaml" in output
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
def test_config_add_ordered_dict(mutable_empty_config):
@@ -335,7 +335,7 @@ def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
config("add", "packages:all:require:['%gcc']")
config("add", "packages:all:compiler:[gcc]")
# contents to add to file
contents = """spack:
@@ -357,7 +357,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
expected = """packages:
all:
target: [x86_64]
require: ['%gcc']
compiler: [gcc]
"""
assert expected == output
@@ -606,6 +606,7 @@ def test_config_prefer_upstream(
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
# Make sure only the non-default variants are set.
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
assert packages["dependency-install"] == {"version": ["2.0"]}
# Ensure that neither variant gets listed for hdf5, since they conflict

View File

@@ -2,16 +2,52 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import pytest
import spack
import spack.platforms
import spack.spec
from spack.database import INDEX_JSON_FILE
from spack.main import SpackCommand
from spack.util.executable import which
debug = SpackCommand("debug")
@pytest.mark.db
def test_create_db_tarball(tmpdir, database):
with tmpdir.as_cwd():
debug("create-db-tarball")
# get the first non-dotfile to avoid coverage files in the directory
files = os.listdir(os.getcwd())
tarball_name = next(
f for f in files if not f.startswith(".") and not f.startswith("tests")
)
# debug command made an archive
assert os.path.exists(tarball_name)
# print contents of archive
tar = which("tar")
contents = tar("tzf", tarball_name, output=str)
# DB file is included
assert INDEX_JSON_FILE in contents
# specfiles from all installs are included
for spec in database.query():
# externals won't have a specfile
if spec.external:
continue
spec_suffix = "%s/.spack/spec.json" % spec.dag_hash()
assert spec_suffix in contents
def test_report():
out = debug("report")
host_platform = spack.platforms.host()

View File

@@ -15,21 +15,18 @@
deprecate = SpackCommand("deprecate")
find = SpackCommand("find")
# Unit tests should not be affected by the user's managed environments
pytestmark = pytest.mark.usefixtures("mutable_mock_env_path")
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
all_installed = spack.store.STORE.db.query("libelf")
all_installed = spack.store.STORE.db.query()
assert len(all_installed) == 2
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert all_available == all_installed
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
@@ -42,24 +39,24 @@ def test_deprecate_fails_no_such_package(mock_packages, mock_archive, mock_fetch
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.10' matches no installed packages" in output
install("--fake", "libelf@0.8.10")
install("libelf@0.8.10")
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.13' matches no installed packages" in output
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery, monkeypatch):
"""Tests that the -i option allows us to deprecate in favor of a spec
that is not yet installed.
"""
install("--fake", "libelf@0.8.10")
to_deprecate = spack.store.STORE.db.query("libelf")
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the ```-i`` option allows us to deprecate in favor of a spec
that is not yet installed."""
install("libelf@0.8.10")
to_deprecate = spack.store.STORE.db.query()
assert len(to_deprecate) == 1
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
deprecated = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.DEPRECATED)
non_deprecated = spack.store.STORE.db.query()
deprecated = spack.store.STORE.db.query(installed=InstallRecordStatus.DEPRECATED)
assert deprecated == to_deprecate
assert len(non_deprecated) == 1
assert non_deprecated[0].satisfies("libelf@0.8.13")
@@ -67,8 +64,8 @@ def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mock
def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Test that the deprecate command deprecates all dependencies properly."""
install("--fake", "libdwarf@20130729 ^libelf@0.8.13")
install("--fake", "libdwarf@20130207 ^libelf@0.8.10")
install("libdwarf@20130729 ^libelf@0.8.13")
install("libdwarf@20130207 ^libelf@0.8.10")
new_spec = spack.concretize.concretize_one("libdwarf@20130729^libelf@0.8.13")
old_spec = spack.concretize.concretize_one("libdwarf@20130207^libelf@0.8.10")
@@ -84,14 +81,14 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
assert all_available == all_installed
assert sorted(all_available) == sorted(deprecated + non_deprecated)
assert sorted(non_deprecated) == sorted(new_spec.traverse())
assert sorted(deprecated) == sorted([old_spec, old_spec["libelf"]])
assert sorted(non_deprecated) == sorted(list(new_spec.traverse()))
assert sorted(deprecated) == sorted(list(old_spec.traverse()))
def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can still uninstall deprecated packages."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
@@ -107,9 +104,9 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_m
def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can re-deprecate a spec to change its deprecator."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.12")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.12")
install("libelf@0.8.10")
deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
@@ -120,8 +117,8 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert len(non_deprecated) == 2
assert len(all_available) == 3
@@ -132,9 +129,9 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that when a deprecator spec is deprecated, its deprecatee specs
are updated to point to the new deprecator."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.12")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.12")
install("libelf@0.8.10")
first_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
second_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.12")
@@ -147,8 +144,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert len(non_deprecated) == 1
assert len(all_available) == 3
@@ -161,8 +158,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the concretizer throws an error if we concretize to a
deprecated spec"""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")

View File

@@ -127,15 +127,16 @@ def test_dev_build_before_until(tmpdir, install_mockery):
assert not_installed in out
def _print_spack_short_spec(*args):
print(f"SPACK_SHORT_SPEC={os.environ['SPACK_SHORT_SPEC']}")
def print_spack_cc(*args):
# Eat arguments and print environment variable to test
print(os.environ.get("CC", ""))
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
monkeypatch.setattr(os, "execvp", _print_spack_short_spec)
monkeypatch.setattr(os, "execvp", print_spack_cc)
with tmpdir.as_cwd():
output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
assert "SPACK_SHORT_SPEC=dev-build-test-install@0.0.0" in output
assert os.path.join("lib", "spack", "env") in output
def test_dev_build_fails_already_installed(tmpdir, install_mockery):

View File

@@ -16,7 +16,6 @@
import spack.stage
import spack.util.git
import spack.util.path
from spack.error import SpackError
from spack.main import SpackCommand
add = SpackCommand("add")
@@ -160,7 +159,6 @@ def check_path(stage, dest):
# Create path to allow develop to modify env
fs.mkdirp(abspath)
develop("--no-clone", "-p", path, "mpich@1.0")
self.check_develop(e, spack.spec.Spec("mpich@=1.0"), path)
# Remove path to ensure develop with no args runs staging code
os.rmdir(abspath)
@@ -220,40 +218,6 @@ def test_develop_full_git_repo(
assert len(commits) > 1
def test_recursive(mutable_mock_env_path, install_mockery, mock_fetch):
env("create", "test")
with ev.read("test") as e:
add("indirect-mpich@1.0")
e.concretize()
specs = e.all_specs()
assert len(specs) > 1
develop("--recursive", "mpich")
expected_dev_specs = ["mpich", "direct-mpich", "indirect-mpich"]
for spec in expected_dev_specs:
assert spec in e.dev_specs
def test_develop_fails_with_multiple_concrete_versions(
mutable_mock_env_path, install_mockery, mock_fetch
):
env("create", "test")
with ev.read("test") as e:
add("indirect-mpich@1.0")
add("indirect-mpich@0.9")
e.unify = False
e.concretize()
with pytest.raises(SpackError) as develop_error:
develop("indirect-mpich", fail_on_error=True)
error_str = "has multiple concrete instances in the graph"
assert error_str in str(develop_error.value)
def test_concretize_dev_path_with_at_symbol_in_env(mutable_mock_env_path, tmpdir, mock_packages):
spec_like = "develop-test@develop"

Some files were not shown because too many files have changed in this diff Show More