Compare commits

..

7 Commits

Author SHA1 Message Date
Cameron Smith
9dead10d70 fix style 2024-01-11 19:21:57 -05:00
Angel Castillo
63ba7db2d2 find mpi exec (#42050) 2024-01-11 14:31:06 -05:00
Cameron Smith
19c0208c1a pumi: fix style 2024-01-10 10:34:12 -05:00
Cameron Smith
9682347254 pumi: fix mpi test paths 2024-01-10 10:34:12 -05:00
Cameron Smith
f4f7309504 pumi: test dir fixes
thank you @tldahlgren
2024-01-10 10:34:11 -05:00
Cameron Smith
071a34df27 double quotes 2024-01-10 10:34:11 -05:00
Cameron Smith
8d35a8498b pumi: fix path to smoketest input data 2024-01-10 10:34:11 -05:00
623 changed files with 4962 additions and 24800 deletions

View File

@@ -1,6 +0,0 @@
<!--
Remember that `spackbot` can help with your PR in multiple ways:
- `@spackbot help` shows all the commands that are currently available
- `@spackbot fix style` tries to push a commit to fix style issues in this PR
- `@spackbot re-run pipeline` runs the pipelines again, if you have write access to the repository
-->

View File

@@ -43,7 +43,7 @@ jobs:
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -57,7 +57,7 @@ jobs:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
- uses: docker/metadata-action@dbef88086f6cef02e264edb7dbf63250c17cef6c
id: docker_meta
with:
images: |
@@ -118,5 +118,7 @@ jobs:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}

View File

@@ -40,7 +40,7 @@ jobs:
with:
fetch-depth: 0
# For pull requests it's not necessary to checkout the code
- uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
id: filter
with:
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below

View File

@@ -1,4 +1,4 @@
black==24.1.1
black==23.12.1
clingo==5.6.2
flake8==7.0.0
isort==5.13.2

View File

@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,linux,${{ matrix.concretizer }}
# Test shell integration
@@ -122,7 +122,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: shelltests,linux
@@ -181,7 +181,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044 # @v2.1.0
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
with:
flags: unittests,linux,clingo
# Run unit tests on MacOS
@@ -216,6 +216,6 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,macos

View File

@@ -33,7 +33,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,windows
unit-tests-cmd:
@@ -57,7 +57,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@e0b68c6749509c5f83f984dd99a76a1c1a231044
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
with:
flags: unittests,windows
build-abseil:

View File

@@ -1130,10 +1130,6 @@ A version specifier can also be a list of ranges and specific versions,
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
^^^^^^^^^^^^
Git versions
^^^^^^^^^^^^
For packages with a ``git`` attribute, ``git`` references
may be specified instead of a numerical version i.e. branches, tags
and commits. Spack will stage and build based off the ``git``

View File

@@ -199,7 +199,6 @@ def setup(sphinx):
("py:class", "contextlib.contextmanager"),
("py:class", "module"),
("py:class", "_io.BufferedReader"),
("py:class", "_io.BytesIO"),
("py:class", "unittest.case.TestCase"),
("py:class", "_frozen_importlib_external.SourceFileLoader"),
("py:class", "clingo.Control"),
@@ -216,7 +215,6 @@ def setup(sphinx):
("py:class", "spack.spec.InstallStatus"),
("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
]
# The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -142,21 +142,6 @@ user's prompt to begin with the environment name in brackets.
$ spack env activate -p myenv
[myenv] $ ...
The ``activate`` command can also be used to create a new environment, if it is
not already defined, by adding the ``--create`` flag. Managed and anonymous
environments, anonymous environments are explained in the next section,
can both be created using the same flags that `spack env create` accepts.
If an environment already exists then spack will simply activate it and ignore the
create specific flags.
.. code-block:: console
$ spack env activate --create -p myenv
# ...
# [creates if myenv does not exist yet]
# ...
[myenv] $ ...
To deactivate an environment, use the command:
.. code-block:: console
@@ -472,11 +457,11 @@ a ``packages.yaml`` file) could contain:
.. code-block:: yaml
spack:
# ...
...
packages:
all:
compiler: [intel]
# ...
...
This configuration sets the default compiler for all packages to
``intel``.
@@ -822,7 +807,7 @@ directories.
.. code-block:: yaml
spack:
# ...
...
view:
mpis:
root: /path/to/view
@@ -866,7 +851,7 @@ automatically named ``default``, so that
.. code-block:: yaml
spack:
# ...
...
view: True
is equivalent to
@@ -874,7 +859,7 @@ is equivalent to
.. code-block:: yaml
spack:
# ...
...
view:
default:
root: .spack-env/view
@@ -884,7 +869,7 @@ and
.. code-block:: yaml
spack:
# ...
...
view: /path/to/view
is equivalent to
@@ -892,7 +877,7 @@ is equivalent to
.. code-block:: yaml
spack:
# ...
...
view:
default:
root: /path/to/view

View File

@@ -623,7 +623,7 @@ Fortran.
compilers:
- compiler:
# ...
...
paths:
cc: /usr/bin/clang
cxx: /usr/bin/clang++

View File

@@ -198,7 +198,7 @@ set by changing the ``roots`` key of the configuration.
my_custom_lmod_modules:
roots:
lmod: /path/to/install/custom/lmod/modules
# ...
...
This configuration will create two module sets. The default module set
will install its ``tcl`` modules to ``/path/to/install/tcl/modules``

View File

@@ -487,56 +487,6 @@ present. For instance with a configuration like:
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Conflicts and strong preferences
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If the semantic of requirements is too strong, you can also express "strong preferences" and "conflicts"
from configuration files:
.. code-block:: yaml
packages:
all:
prefer:
- '%clang'
conflict:
- '+shared'
The ``prefer`` and ``conflict`` sections can be used whenever a ``require`` section is allowed.
The argument is always a list of constraints, and each constraint can be either a simple string,
or a more complex object:
.. code-block:: yaml
packages:
all:
conflict:
- spec: '%clang'
when: 'target=x86_64_v3'
message: 'reason why clang cannot be used'
The ``spec`` attribute is mandatory, while both ``when`` and ``message`` are optional.
.. note::
Requirements allow for expressing both "strong preferences" and "conflicts".
The syntax for doing so, though, may not be immediately clear. For
instance, if we want to prevent any package from using ``%clang``, we can set:
.. code-block:: yaml
packages:
all:
require:
- one_of: ['%clang', '@:']
Since only one of the requirements must hold, and ``@:`` is always true, the rule above is
equivalent to a conflict. For "strong preferences" we need to substitute the ``one_of`` policy
with ``any_of``.
.. _package-preferences:
-------------------
@@ -657,11 +607,10 @@ You can assign class-level attributes in the configuration:
packages:
mpileaks:
package_attributes:
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
# Override existing attributes
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
# ... or add new ones
x: 1
Attributes set this way will be accessible to any method executed
in the package.py file (e.g. the ``install()`` method). Values for these

View File

@@ -6979,18 +6979,3 @@ you probably care most about are:
You may also care about `license exceptions
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
e.g. ``Apache-2.0 WITH LLVM-exception``.
Many of the licenses that are currently in the spack repositories have been
automatically determined. While this is great for bulk adding license
information and is most likely correct, there are sometimes edge cases that
require manual intervention. To determine which licenses are validated and
which are not, there is the `checked_by` parameter in the license directive:
.. code-block:: python
license("<license>", when="<when>", checked_by="<github username>")
When you have validated a github license, either when doing so explicitly or
as part of packaging a new package, please set the `checked_by` parameter
to your Github username to signal that the license has been manually
verified.

View File

@@ -810,7 +810,7 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
.. code-block:: yaml
spack:
# ...
...
ci:
pipeline-gen:
- build-job:

View File

@@ -2,12 +2,12 @@ sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.24.0
python-levenshtein==0.23.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.2.0
pytest==8.0.0
urllib3==2.1.0
pytest==7.4.4
isort==5.13.2
black==24.1.1
black==23.12.1
flake8==7.0.0
mypy==1.8.0

View File

@@ -1377,89 +1377,120 @@ def traverse_tree(
yield (source_path, dest_path)
def lexists_islink_isdir(path):
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
if sys.platform == "win32":
if not os.path.lexists(path):
return False, False, False
return os.path.lexists(path), islink(path), os.path.isdir(path)
# First try to lstat, so we know if it's a link or not.
try:
lst = os.lstat(path)
except (IOError, OSError):
return False, False, False
is_link = stat.S_ISLNK(lst.st_mode)
# Check whether file is a dir.
if not is_link:
is_dir = stat.S_ISDIR(lst.st_mode)
return True, is_link, is_dir
# Check whether symlink points to a dir.
try:
st = os.stat(path)
is_dir = stat.S_ISDIR(st.st_mode)
except (IOError, OSError):
# Dangling symlink (i.e. it lexists but not exists)
is_dir = False
return True, is_link, is_dir
class BaseDirectoryVisitor:
"""Base class and interface for :py:func:`visit_directory_tree`."""
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_file(self, root, rel_path, depth):
"""Handle the non-symlink file at ``os.path.join(root, rel_path)``
Parameters:
root: root directory
rel_path: relative path to current file from ``root``
root (str): root directory
rel_path (str): relative path to current file from ``root``
depth (int): depth of current file from the ``root`` directory"""
pass
def visit_symlinked_file(self, root: str, rel_path: str, depth) -> None:
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``. Note: ``rel_path`` is
the location of the symlink, not to what it is pointing to. The symlink may be dangling.
def visit_symlinked_file(self, root, rel_path, depth):
"""Handle the symlink to a file at ``os.path.join(root, rel_path)``.
Note: ``rel_path`` is the location of the symlink, not to what it is
pointing to. The symlink may be dangling.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory"""
pass
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
"""Return True from this function to recurse into the directory at
os.path.join(root, rel_path). Return False in order not to recurse further.
Parameters:
root: root directory
rel_path: relative path to current directory from ``root``
depth: depth of current directory from the ``root`` directory
root (str): root directory
rel_path (str): relative path to current directory from ``root``
depth (int): depth of current directory from the ``root`` directory
Returns:
bool: ``True`` when the directory should be recursed into. ``False`` when
not"""
return False
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""Return ``True`` to recurse into the symlinked directory and ``False`` in order not to.
Note: ``rel_path`` is the path to the symlink itself. Following symlinked directories
blindly can cause infinite recursion due to cycles.
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""Return ``True`` to recurse into the symlinked directory and ``False`` in
order not to. Note: ``rel_path`` is the path to the symlink itself.
Following symlinked directories blindly can cause infinite recursion due to
cycles.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory
Returns:
bool: ``True`` when the directory should be recursed into. ``False`` when
not"""
return False
def after_visit_dir(self, root: str, rel_path: str, depth: int) -> None:
"""Called after recursion into ``rel_path`` finished. This function is not called when
``rel_path`` was not recursed into.
def after_visit_dir(self, root, rel_path, depth):
"""Called after recursion into ``rel_path`` finished. This function is not
called when ``rel_path`` was not recursed into.
Parameters:
root: root directory
rel_path: relative path to current directory from ``root``
depth: depth of current directory from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current directory from ``root``
depth (int): depth of current directory from the ``root`` directory"""
pass
def after_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> None:
"""Called after recursion into ``rel_path`` finished. This function is not called when
``rel_path`` was not recursed into.
def after_visit_symlinked_dir(self, root, rel_path, depth):
"""Called after recursion into ``rel_path`` finished. This function is not
called when ``rel_path`` was not recursed into.
Parameters:
root: root directory
rel_path: relative path to current symlink from ``root``
depth: depth of current symlink from the ``root`` directory"""
root (str): root directory
rel_path (str): relative path to current symlink from ``root``
depth (int): depth of current symlink from the ``root`` directory"""
pass
def visit_directory_tree(
root: str, visitor: BaseDirectoryVisitor, rel_path: str = "", depth: int = 0
):
"""Recurses the directory root depth-first through a visitor pattern using the interface from
:py:class:`BaseDirectoryVisitor`
def visit_directory_tree(root, visitor, rel_path="", depth=0):
"""Recurses the directory root depth-first through a visitor pattern using the
interface from :py:class:`BaseDirectoryVisitor`
Parameters:
root: path of directory to recurse into
visitor: what visitor to use
rel_path: current relative path from the root
depth: current depth from the root
root (str): path of directory to recurse into
visitor (BaseDirectoryVisitor): what visitor to use
rel_path (str): current relative path from the root
depth (str): current depth from the root
"""
dir = os.path.join(root, rel_path)
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
@@ -1467,19 +1498,26 @@ def visit_directory_tree(
for f in dir_entries:
rel_child = os.path.join(rel_path, f.name)
islink = f.is_symlink()
# On Windows, symlinks to directories are distinct from symlinks to files, and it is
# possible to create a broken symlink to a directory (e.g. using os.symlink without
# `target_is_directory=True`), invoking `isdir` on a symlink on Windows that is broken in
# this manner will result in an error. In this case we can work around the issue by reading
# the target and resolving the directory ourselves
# On Windows, symlinks to directories are distinct from
# symlinks to files, and it is possible to create a
# broken symlink to a directory (e.g. using os.symlink
# without `target_is_directory=True`), invoking `isdir`
# on a symlink on Windows that is broken in this manner
# will result in an error. In this case we can work around
# the issue by reading the target and resolving the
# directory ourselves
try:
isdir = f.is_dir()
except OSError as e:
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and evaluate file vs directory
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
# link_target might be relative but resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative to the CWD and therefore makes sense
# link_target might be relative but
# resolve_link_target_relative_to_the_link
# will ensure that if so, that it is relative
# to the CWD and therefore
# makes sense
isdir = os.path.isdir(link_target)
else:
raise e

View File

@@ -8,7 +8,7 @@
import filecmp
import os
import shutil
from typing import Callable, Dict, List, Optional, Tuple
from collections import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, touch, traverse_tree
@@ -51,32 +51,32 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
def __init__(self, ignore=None):
self.ignore = ignore if ignore is not None else lambda f: False
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
# When mapping <src root> to <dst root>/<projection>, we need
# to prepend the <projection> bit to the relative path in the
# destination dir.
self.projection = ""
# Two files f and g conflict if they are not os.path.samefile(f, g) and they are both
# projected to the same destination file. These conflicts are not necessarily fatal, and
# can be resolved or ignored. For example <prefix>/LICENSE or
# <site-packages>/<namespace>/__init__.py conflicts can be ignored).
self.file_conflicts: List[MergeConflict] = []
# When a file blocks another file, the conflict can sometimes
# be resolved / ignored (e.g. <prefix>/LICENSE or
# or <site-packages>/<namespace>/__init__.py conflicts can be
# ignored).
self.file_conflicts = []
# When we have to create a dir where a file is, or a file where a dir is, we have fatal
# errors, listed here.
self.fatal_conflicts: List[MergeConflict] = []
# When we have to create a dir where a file is, or a file
# where a dir is, we have fatal errors, listed here.
self.fatal_conflicts = []
# What directories we have to make; this is an ordered dict, so that we have a fast lookup
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# What directories we have to make; this is an ordered set,
# so that we have a fast lookup and can run mkdir in order.
self.directories = OrderedDict()
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel)
self.files = OrderedDict()
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
"""
@@ -104,7 +104,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Replace symlinked dirs with actual directories when possible in low depths,
otherwise handle it as a file (i.e. we link to the symlink).
@@ -136,56 +136,40 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
def visit_file(self, root, rel_path, depth):
proj_rel_path = os.path.join(self.projection, rel_path)
if self.ignore(rel_path):
pass
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
src_a_root, src_a_relpath = self.directories[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
# In some cases we can resolve file-file conflicts
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.file_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
)
else:
# Otherwise register this file to be linked.
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_symlinked_file(self, root, rel_path, depth):
# Treat symlinked files as ordinary files (without "dereferencing")
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
def set_projection(self, projection: str) -> None:
def set_projection(self, projection):
self.projection = os.path.normpath(projection)
# Todo, is this how to check in general for empty projection?
@@ -213,19 +197,24 @@ def set_projection(self, projection: str) -> None:
class DestinationMergeVisitor(BaseDirectoryVisitor):
"""DestinatinoMergeVisitor takes a SourceMergeVisitor and:
"""DestinatinoMergeVisitor takes a SourceMergeVisitor
and:
a. registers additional conflicts when merging to the destination prefix
b. removes redundant mkdir operations when directories already exist in the destination prefix.
a. registers additional conflicts when merging
to the destination prefix
b. removes redundant mkdir operations when
directories already exist in the destination
prefix.
This also makes sure that symlinked directories in the target prefix will never be merged with
This also makes sure that symlinked directories
in the target prefix will never be merged with
directories in the sources directories.
"""
def __init__(self, source_merge_visitor: SourceMergeVisitor):
def __init__(self, source_merge_visitor):
self.src = source_merge_visitor
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_dir(self, root, rel_path, depth):
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if rel_path in self.src.files:
@@ -247,7 +236,7 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# don't descend into it.
return False
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
def before_visit_symlinked_dir(self, root, rel_path, depth):
"""
Symlinked directories in the destination prefix should
be seen as files; we should not accidentally merge
@@ -273,7 +262,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_file(self, root, rel_path, depth):
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
@@ -291,7 +280,7 @@ def visit_file(self, root: str, rel_path: str, depth: int) -> None:
)
)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
def visit_symlinked_file(self, root, rel_path, depth):
# Treat symlinked files as ordinary files (without "dereferencing")
self.visit_file(root, rel_path, depth)

View File

@@ -244,7 +244,7 @@ def _search_duplicate_specs_in_externals(error_cls):
+ lines
+ ["as they might result in non-deterministic hashes"]
)
except (TypeError, AttributeError):
except TypeError:
details = []
errors.append(error_cls(summary=error_msg, details=details))
@@ -292,6 +292,12 @@ def _avoid_mismatched_variants(error_cls):
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
syaml.dump_config(config_data, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
for pkg_name in packages_yaml:
# 'all:' must be more forgiving, since it is setting defaults for everything
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
@@ -311,7 +317,7 @@ def _avoid_mismatched_variants(error_cls):
f"Setting a preference for the '{pkg_name}' package to the "
f"non-existing variant '{variant.name}'"
)
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
errors.append(make_error(preferences, summary))
continue
# Variant cannot accept this value
@@ -323,41 +329,11 @@ def _avoid_mismatched_variants(error_cls):
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
f"to the invalid value '{str(variant)}'"
)
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
errors.append(make_error(preferences, summary))
return errors
@config_packages
def _wrongly_named_spec(error_cls):
"""Warns if the wrong name is used for an external spec"""
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
for pkg_name in packages_yaml:
if pkg_name == "all":
continue
externals = packages_yaml[pkg_name].get("externals", [])
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
for entry in externals:
spec = spack.spec.Spec(entry["spec"])
regular_pkg_is_wrong = not is_virtual and pkg_name != spec.name
virtual_pkg_is_wrong = is_virtual and not any(
p.name == spec.name for p in spack.repo.PATH.providers_for(pkg_name)
)
if regular_pkg_is_wrong or virtual_pkg_is_wrong:
summary = f"Wrong external spec detected for '{pkg_name}': {spec}"
errors.append(_make_config_error(entry, summary, error_cls=error_cls))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
syaml.dump_config(config_data, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
#: Sanity checks on package directives
package_directives = AuditClass(
group="packages",

View File

@@ -5,6 +5,7 @@
import codecs
import collections
import errno
import hashlib
import io
import itertools
@@ -22,7 +23,8 @@
import urllib.parse
import urllib.request
import warnings
from contextlib import closing
from contextlib import closing, contextmanager
from gzip import GzipFile
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError
@@ -48,7 +50,6 @@
import spack.stage
import spack.store
import spack.traverse as traverse
import spack.util.archive
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.gpg
@@ -1132,46 +1133,205 @@ def generate_key_index(key_prefix, tmpdir=None):
shutil.rmtree(tmpdir)
@contextmanager
def gzip_compressed_tarfile(path):
"""Create a reproducible, compressed tarfile"""
# Create gzip compressed tarball of the install prefix
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
# If the filename="" is dropped, Python will use fileobj.name instead.
# This should effectively mimick `gzip --no-name`.
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
name="", mode="w", fileobj=outer_checksum
) as tar:
yield tar, inner_checksum, outer_checksum
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
"""Compute tarfile entry name as the relative path from the (system) root."""
return _path(*_path(absolute_path).parts[1:]).as_posix()
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
Args:
tar: tarfile object to add files to
prefix: absolute install prefix of spec"""
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
try: # skip buildinfo file if it exists
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
skip = lambda entry: stat_key(entry.stat(follow_symlinks=False)) in files_to_skip
except OSError:
skip = lambda entry: False
files_to_skip = []
spack.util.archive.reproducible_tarfile_from_prefix(
tar,
prefix,
# Spack <= 0.21 did not include parent directories, leading to issues when tarballs are
# used in runtimes like AWS lambda.
include_parent_directories=True,
skip=skip,
)
# First add all directories leading up to `prefix` (Spack <= 0.21 did not do this, leading to
# issues when tarballs are used in runtimes like AWS lambda). Skip the file system root.
parent_dirs = reversed(pathlib.Path(prefix).parents)
next(parent_dirs) # skip the root: slices are supported from python 3.10
for parent_dir in parent_dirs:
dir_info = tarfile.TarInfo(_tarinfo_name(str(parent_dir)))
dir_info.type = tarfile.DIRTYPE
dir_info.mode = 0o755
tar.addfile(dir_info)
dir_stack = [prefix]
while dir_stack:
dir = dir_stack.pop()
# Add the dir before its contents
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
dir_info.type = tarfile.DIRTYPE
dir_info.mode = 0o755
tar.addfile(dir_info)
# Sort by name: reproducible & improves compression
with os.scandir(dir) as it:
entries = sorted(it, key=lambda entry: entry.name)
new_dirs = []
for entry in entries:
if entry.is_dir(follow_symlinks=False):
new_dirs.append(entry.path)
continue
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
s = entry.stat(follow_symlinks=False)
# Skip existing binary distribution files.
id = stat_key(s)
if id in files_to_skip:
continue
# Normalize the mode
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
if entry.is_symlink():
file_info.type = tarfile.SYMTYPE
file_info.linkname = os.readlink(entry.path)
tar.addfile(file_info)
elif entry.is_file(follow_symlinks=False):
# Deduplicate hardlinks
if s.st_nlink > 1:
if id in hardlink_to_tarinfo_name:
file_info.type = tarfile.LNKTYPE
file_info.linkname = hardlink_to_tarinfo_name[id]
tar.addfile(file_info)
continue
hardlink_to_tarinfo_name[id] = file_info.name
# If file not yet seen, copy it.
file_info.type = tarfile.REGTYPE
file_info.size = s.st_size
with open(entry.path, "rb") as f:
tar.addfile(file_info, f)
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
class ChecksumWriter(io.BufferedIOBase):
"""Checksum writer computes a checksum while writing to a file."""
myfileobj = None
def __init__(self, fileobj, algorithm=hashlib.sha256):
self.fileobj = fileobj
self.hasher = algorithm()
self.length = 0
def hexdigest(self):
return self.hasher.hexdigest()
def write(self, data):
if isinstance(data, (bytes, bytearray)):
length = len(data)
else:
data = memoryview(data)
length = data.nbytes
if length > 0:
self.fileobj.write(data)
self.hasher.update(data)
self.length += length
return length
def read(self, size=-1):
raise OSError(errno.EBADF, "read() on write-only object")
def read1(self, size=-1):
raise OSError(errno.EBADF, "read1() on write-only object")
def peek(self, n):
raise OSError(errno.EBADF, "peek() on write-only object")
@property
def closed(self):
return self.fileobj is None
def close(self):
fileobj = self.fileobj
if fileobj is None:
return
self.fileobj.close()
self.fileobj = None
def flush(self):
self.fileobj.flush()
def fileno(self):
return self.fileobj.fileno()
def rewind(self):
raise OSError("Can't rewind while computing checksum")
def readable(self):
return False
def writable(self):
return True
def seekable(self):
return True
def tell(self):
return self.fileobj.tell()
def seek(self, offset, whence=io.SEEK_SET):
# In principle forward seek is possible with b"0" padding,
# but this is not implemented.
if offset == 0 and whence == io.SEEK_CUR:
return
raise OSError("Can't seek while computing checksum")
def readline(self, size=-1):
raise OSError(errno.EBADF, "readline() on write-only object")
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
tar,
inner_checksum,
outer_checksum,
):
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
# Tarball the install prefix
tarfile_of_spec_prefix(tar, binaries_dir)
# Serialize buildinfo for the tarball
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
)
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
tarinfo.type = tarfile.REGTYPE
tarinfo.size = len(bstring)
tarinfo.mode = 0o644

View File

@@ -542,7 +542,7 @@ def verify_patchelf(patchelf: "spack.util.executable.Executable") -> bool:
return version >= spack.version.Version("0.13.1")
def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
def ensure_patchelf_in_path_or_raise() -> None:
"""Ensure patchelf is in the PATH or raise."""
# The old concretizer is not smart and we're doing its job: if the latest patchelf
# does not concretize because the compiler doesn't support C++17, we try to

View File

@@ -146,7 +146,7 @@ def mypy_root_spec() -> str:
def black_root_spec() -> str:
"""Return the root spec used to bootstrap black"""
return _root_spec("py-black@:24.1.0")
return _root_spec("py-black@:23.1.0")
def flake8_root_spec() -> str:

View File

@@ -217,9 +217,6 @@ def clean_environment():
env.unset("R_HOME")
env.unset("R_ENVIRON")
env.unset("LUA_PATH")
env.unset("LUA_CPATH")
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
# env.unset('MAKEFLAGS')
@@ -555,55 +552,58 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
"""
module = ModuleChangePropagator(pkg)
m = module
if context == Context.BUILD:
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m.make_jobs = jobs
jobs = determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs)
m.gmake = MakeExecutable("gmake", jobs)
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# analog to configure for win32
m.cscript = Executable("cscript")
# TODO: make these build deps that can be installed if not found.
module.make = MakeExecutable("make", jobs)
module.gmake = MakeExecutable("gmake", jobs)
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
module.nmake = Executable("nmake")
module.msbuild = Executable("msbuild")
# analog to configure for win32
module.cscript = Executable("cscript")
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable("./configure")
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
module.configure = Executable("./configure")
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
# Useful directories within the prefix are encapsulated in
# a Prefix object.
module.prefix = pkg.prefix
m.prefix = pkg.prefix
# Platform-specific library suffix.
module.dso_suffix = dso_suffix
m.dso_suffix = dso_suffix
def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
compiler_path = kwargs.get("compiler", module.spack_cc)
compiler_path = kwargs.get("compiler", m.spack_cc)
compiler = Executable(compiler_path)
return _static_to_shared_library(
pkg.spec.architecture, compiler, static_lib, shared_lib, **kwargs
)
module.static_to_shared_library = static_to_shared_library
m.static_to_shared_library = static_to_shared_library
module.propagate_changes_to_mro()
@@ -972,8 +972,8 @@ def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
self.should_set_package_py_globals = (
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
)
# In a build context, the root needs build-specific globals set.
self.needs_build_context = UseMode.ROOT
# In a build context, the root and direct build deps need build-specific globals set.
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
def set_all_package_py_globals(self):
"""Set the globals in modules of package.py files."""

View File

@@ -199,8 +199,6 @@ def initconfig_mpi_entries(self):
mpiexec = "/usr/bin/srun"
else:
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
elif hasattr(spec["mpi"].package, "mpiexec"):
mpiexec = spec["mpi"].package.mpiexec
else:
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
if not os.path.exists(mpiexec):

View File

@@ -15,7 +15,6 @@
import spack.build_environment
import spack.builder
import spack.deptypes as dt
import spack.package_base
from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when
@@ -32,68 +31,8 @@ def _extract_primary_generator(generator):
primary generator from the generator string which may contain an
optional secondary generator.
"""
return _primary_generator_extractor.match(generator).group(1)
def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
``find_python_hints`` for context."""
if not getattr(pkg, "find_python_hints", False):
return
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
if len(pythons) != 1:
return
try:
python_executable = pythons[0].package.command.path
except RuntimeError:
return
args.extend(
[
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
]
)
def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
"""Set a few default defines for CMake, depending on its version."""
cmakes = pkg.spec.dependencies("cmake", dt.BUILD)
if len(cmakes) != 1:
return
cmake = cmakes[0]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
try:
ipo = pkg.spec.variants["ipo"].value
except KeyError:
ipo = False
if cmake.satisfies("@3.9:"):
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
# find_package may search there. This is not what we want.
# Do not populate CMake User Package Registry
if cmake.satisfies("@3.15:"):
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
elif cmake.satisfies("@3.1:"):
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
# Do not use CMake User/System Package Registry
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
if cmake.satisfies("@3.16:"):
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
elif cmake.satisfies("@3.1:3.15"):
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
primary_generator = _primary_generator_extractor.match(generator).group(1)
return primary_generator
def generator(*names: str, default: Optional[str] = None):
@@ -147,13 +86,6 @@ class CMakePackage(spack.package_base.PackageBase):
#: Legacy buildsystem attribute used to deserialize and install old specs
legacy_buildsystem = "cmake"
#: When this package depends on Python and ``find_python_hints`` is set to True, pass the
#: defines {Python3,Python,PYTHON}_EXECUTABLE explicitly, so that CMake locates the right
#: Python in its builtin FindPython3, FindPython, and FindPythonInterp modules. Spack does
#: CMake's job because CMake's modules by default only search for Python versions known at the
#: time of release.
find_python_hints = True
build_system("cmake")
with when("build_system=cmake"):
@@ -309,9 +241,9 @@ def std_cmake_args(self):
"""Standard cmake arguments provided as a property for
convenience of package writers
"""
args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
args += getattr(self.pkg, "cmake_flag_args", [])
return args
std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
return std_cmake_args
@staticmethod
def std_args(pkg, generator=None):
@@ -331,6 +263,11 @@ def std_args(pkg, generator=None):
except KeyError:
build_type = "RelWithDebInfo"
try:
ipo = pkg.spec.variants["ipo"].value
except KeyError:
ipo = False
define = CMakeBuilder.define
args = [
"-G",
@@ -339,6 +276,10 @@ def std_args(pkg, generator=None):
define("CMAKE_BUILD_TYPE", build_type),
]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
if pkg.spec.satisfies("^cmake@3.9:"):
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
if primary_generator == "Unix Makefiles":
args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
@@ -347,9 +288,6 @@ def std_args(pkg, generator=None):
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
)
_conditional_cmake_defaults(pkg, args)
_maybe_set_python_hints(pkg, args)
# Set up CMake rpath
args.extend(
[

View File

@@ -218,7 +218,7 @@ def pset_components(self):
"+inspector": " intel-inspector",
"+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
# Trace Analyzer and Collector
"+vtune": " intel-vtune",
"+vtune": " intel-vtune"
# VTune, ..-profiler since 2020, ..-amplifier before
}.items():
if variant in self.spec:

View File

@@ -29,12 +29,15 @@ class LuaPackage(spack.package_base.PackageBase):
with when("build_system=lua"):
depends_on("lua-lang")
with when("^[virtuals=lua-lang] lua"):
extends("lua")
with when("^[virtuals=lua-lang] lua-luajit"):
extends("lua-luajit+lualinks")
with when("^[virtuals=lua-lang] lua-luajit-openresty"):
extends("lua-luajit-openresty+lualinks")
extends("lua", when="^lua")
with when("^lua-luajit"):
extends("lua-luajit")
depends_on("luajit")
depends_on("lua-luajit+lualinks")
with when("^lua-luajit-openresty"):
extends("lua-luajit-openresty")
depends_on("luajit")
depends_on("lua-luajit-openresty+lualinks")
@property
def lua(self):

View File

@@ -149,7 +149,7 @@ def std_args(pkg):
else:
default_library = "shared"
return [
args = [
"-Dprefix={0}".format(pkg.prefix),
# If we do not specify libdir explicitly, Meson chooses something
# like lib/x86_64-linux-gnu, which causes problems when trying to
@@ -163,6 +163,8 @@ def std_args(pkg):
"-Dwrap_mode=nodownload",
]
return args
@property
def build_dirname(self):
"""Returns the directory name to use when building the package."""

View File

@@ -35,9 +35,9 @@ def _misc_cache():
#: Spack's cache for small data
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)
)
MISC_CACHE: Union[
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_misc_cache)
def fetch_cache_location():
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_fetch_cache)
)
FETCH_CACHE: Union[
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
] = llnl.util.lang.Singleton(_fetch_cache)

View File

@@ -7,7 +7,9 @@
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry if isinstance(needs_entry, str) else None
else needs_entry
if isinstance(needs_entry, str)
else None
)

View File

@@ -7,14 +7,13 @@
import glob
import hashlib
import json
import multiprocessing
import multiprocessing.pool
import os
import shutil
import sys
import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple, Union
from typing import Dict, List, Optional, Tuple
import llnl.util.tty as tty
from llnl.string import plural
@@ -327,30 +326,8 @@ def _progress(i: int, total: int):
return ""
class NoPool:
def map(self, func, args):
return [func(a) for a in args]
def starmap(self, func, args):
return [func(*a) for a in args]
def __enter__(self):
return self
def __exit__(self, *args):
pass
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
def _make_pool() -> MaybePool:
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
That leaves only forking"""
if multiprocessing.get_start_method() == "fork":
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
else:
return NoPool()
def _make_pool():
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
def push_fn(args):
@@ -686,7 +663,7 @@ def _push_oci(
base_image: Optional[ImageReference],
installed_specs_with_deps: List[Spec],
tmpdir: str,
pool: MaybePool,
pool: multiprocessing.pool.Pool,
force: bool = False,
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
"""Push specs to an OCI registry
@@ -802,10 +779,11 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
return config if "spec" in config else None
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
request = urllib.request.Request(url=image_ref.tags_url())
response = spack.oci.opener.urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
def _update_index_oci(
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
) -> None:
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
spack.oci.opener.ensure_status(response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel

View File

@@ -5,7 +5,6 @@
import re
import sys
from typing import Dict, Optional
import llnl.string
import llnl.util.lang
@@ -18,15 +17,10 @@
import spack.util.crypto
import spack.util.web as web_util
from spack.cmd.common import arguments
from spack.package_base import (
ManualDownloadRequiredError,
PackageBase,
deprecated_version,
preferred_version,
)
from spack.package_base import PackageBase, deprecated_version, preferred_version
from spack.util.editor import editor
from spack.util.format import get_version_lines
from spack.version import StandardVersion, Version
from spack.version import Version
description = "checksum available versions of a package"
section = "packaging"
@@ -90,30 +84,28 @@ def checksum(parser, args):
spec = spack.spec.Spec(args.package)
# Get the package we're going to generate checksums for
pkg: PackageBase = spack.repo.PATH.get_pkg_class(spec.name)(spec)
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
# Skip manually downloaded packages
if pkg.manual_download:
raise ManualDownloadRequiredError(pkg.download_instr)
versions = [Version(v) for v in args.versions]
versions = [StandardVersion.from_string(v) for v in args.versions]
# Define placeholder for remote versions. This'll help reduce redundant work if we need to
# check for the existence of remote versions more than once.
remote_versions: Optional[Dict[StandardVersion, str]] = None
# Define placeholder for remote versions.
# This'll help reduce redundant work if we need to check for the existance
# of remote versions more than once.
remote_versions = None
# Add latest version if requested
if args.latest:
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
remote_versions = pkg.fetch_remote_versions(args.jobs)
if len(remote_versions) > 0:
versions.append(max(remote_versions.keys()))
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
versions.append(latest_version)
# Add preferred version if requested (todo: exclude git versions)
# Add preferred version if requested
if args.preferred:
versions.append(preferred_version(pkg))
# Store a dict of the form version -> URL
url_dict: Dict[StandardVersion, str] = {}
url_dict = {}
for version in versions:
if deprecated_version(pkg, version):
@@ -123,16 +115,16 @@ def checksum(parser, args):
if url is not None:
url_dict[version] = url
continue
# If we get here, it's because no valid url was provided by the package. Do expensive
# fallback to try to recover
# if we get here, it's because no valid url was provided by the package
# do expensive fallback to try to recover
if remote_versions is None:
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
remote_versions = pkg.fetch_remote_versions(args.jobs)
if version in remote_versions:
url_dict[version] = remote_versions[version]
if len(versions) <= 0:
if remote_versions is None:
remote_versions = pkg.fetch_remote_versions(concurrency=args.jobs)
remote_versions = pkg.fetch_remote_versions(args.jobs)
url_dict = remote_versions
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.

View File

@@ -6,7 +6,6 @@
import json
import os
import shutil
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -158,9 +157,7 @@ def setup_parser(subparser):
description=deindent(ci_reproduce.__doc__),
help=spack.cmd.first_line(ci_reproduce.__doc__),
)
reproduce.add_argument(
"job_url", help="URL of GitLab job web page or artifact", type=_gitlab_artifacts_url
)
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
reproduce.add_argument(
"--runtime",
help="Container runtime to use.",
@@ -795,6 +792,11 @@ def ci_reproduce(args):
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
instructions for reproducing the build locally
"""
job_url = args.job_url
work_dir = args.working_dir
autostart = args.autostart
runtime = args.runtime
# Allow passing GPG key for reprocuding protected CI jobs
if args.gpg_file:
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
@@ -803,47 +805,7 @@ def ci_reproduce(args):
else:
gpg_key_url = None
return spack_ci.reproduce_ci_job(
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
)
def _gitlab_artifacts_url(url: str) -> str:
"""Take a URL either to the URL of the job in the GitLab UI, or to the artifacts zip file,
and output the URL to the artifacts zip file."""
parsed = urlparse(url)
if not parsed.scheme or not parsed.netloc:
raise ValueError(url)
parts = parsed.path.split("/")
if len(parts) < 2:
raise ValueError(url)
# Just use API endpoints verbatim, they're probably generated by Spack.
if parts[1] == "api":
return url
# If it's a URL to the job in the Gitlab UI, we may need to append the artifacts path.
minus_idx = parts.index("-")
# Remove repeated slashes in the remainder
rest = [p for p in parts[minus_idx + 1 :] if p]
# Now the format is jobs/X or jobs/X/artifacts/download
if len(rest) < 2 or rest[0] != "jobs":
raise ValueError(url)
if len(rest) == 2:
# replace jobs/X with jobs/X/artifacts/download
rest.extend(("artifacts", "download"))
# Replace the parts and unparse.
parts[minus_idx + 1 :] = rest
# Don't allow fragments / queries
return urlunparse(parsed._replace(path="/".join(parts), fragment="", query=""))
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
def ci(parser, args):

View File

@@ -76,10 +76,6 @@ def setup_parser(subparser):
)
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
change_parser = sp.add_parser("change", help="swap variants etc. on specs in config")
change_parser.add_argument("path", help="colon-separated path to config section with specs")
change_parser.add_argument("--match-spec", help="only change constraints that match this")
prefer_upstream_parser = sp.add_parser(
"prefer-upstream", help="set package preferences from upstream"
)
@@ -122,7 +118,7 @@ def _get_scope_and_section(args):
if not section and not scope:
env = ev.active_environment()
if env:
scope = env.scope_name
scope = env.env_file_config_scope_name()
# set scope defaults
elif not scope:
@@ -267,98 +263,6 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
def _config_change_requires_scope(path, spec, scope, match_spec=None):
"""Return whether or not anything changed."""
require = spack.config.get(path, scope=scope)
if not require:
return False
changed = False
def override_cfg_spec(spec_str):
nonlocal changed
init_spec = spack.spec.Spec(spec_str)
# Overridden spec cannot be anonymous
init_spec.name = spec.name
if match_spec and not init_spec.satisfies(match_spec):
# If there is a match_spec, don't change constraints that
# don't match it
return spec_str
elif not init_spec.intersects(spec):
changed = True
return str(spack.spec.Spec.override(init_spec, spec))
else:
# Don't override things if they intersect, otherwise we'd
# be e.g. attaching +debug to every single version spec
return spec_str
if isinstance(require, str):
new_require = override_cfg_spec(require)
else:
new_require = []
for item in require:
if "one_of" in item:
item["one_of"] = [override_cfg_spec(x) for x in item["one_of"]]
elif "any_of" in item:
item["any_of"] = [override_cfg_spec(x) for x in item["any_of"]]
elif "spec" in item:
item["spec"] = override_cfg_spec(item["spec"])
elif isinstance(item, str):
item = override_cfg_spec(item)
else:
raise ValueError(f"Unexpected requirement: ({type(item)}) {str(item)}")
new_require.append(item)
spack.config.set(path, new_require, scope=scope)
return changed
def _config_change(config_path, match_spec_str=None):
all_components = spack.config.process_config_path(config_path)
key_components = all_components[:-1]
key_path = ":".join(key_components)
spec = spack.spec.Spec(syaml.syaml_str(all_components[-1]))
match_spec = None
if match_spec_str:
match_spec = spack.spec.Spec(match_spec_str)
if key_components[-1] == "require":
# Extract the package name from the config path, which allows
# args.spec to be anonymous if desired
pkg_name = key_components[1]
spec.name = pkg_name
changed = False
for scope in spack.config.writable_scope_names():
changed |= _config_change_requires_scope(key_path, spec, scope, match_spec=match_spec)
if not changed:
existing_requirements = spack.config.get(key_path)
if isinstance(existing_requirements, str):
raise spack.config.ConfigError(
"'config change' needs to append a requirement,"
" but existing require: config is not a list"
)
ideal_scope_to_modify = None
for scope in spack.config.writable_scope_names():
if spack.config.get(key_path, scope=scope):
ideal_scope_to_modify = scope
break
update_path = f"{key_path}:[{str(spec)}]"
spack.config.add(update_path, scope=ideal_scope_to_modify)
else:
raise ValueError("'config change' can currently only change 'require' sections")
def config_change(args):
_config_change(args.path, args.match_spec)
def config_update(args):
# Read the configuration files
spack.config.CONFIG.get_config(args.section, scope=args.scope)
@@ -586,6 +490,5 @@ def config(parser, args):
"update": config_update,
"revert": config_revert,
"prefer-upstream": config_prefer_upstream,
"change": config_change,
}
action[args.config_command](args)

View File

@@ -64,9 +64,8 @@ class {class_name}({base_class_name}):
# maintainers("github_user1", "github_user2")
# FIXME: Add the SPDX identifier of the project's license below.
# See https://spdx.org/licenses/ for a list. Upon manually verifying
# the license, set checked_by to your Github username.
license("UNKNOWN", checked_by="github_user1")
# See https://spdx.org/licenses/ for a list.
license("UNKNOWN")
{versions}

View File

@@ -54,104 +54,6 @@
]
#
# env create
#
def env_create_setup_parser(subparser):
"""create a new environment"""
subparser.add_argument(
"env_name",
metavar="env",
help=(
"name of managed environment or directory of the anonymous env "
"(when using --dir/-d) to activate"
),
)
subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
)
subparser.add_argument(
"--keep-relative",
action="store_true",
help="copy relative develop paths verbatim into the new environment"
" when initializing from envfile",
)
view_opts = subparser.add_mutually_exclusive_group()
view_opts.add_argument(
"--without-view", action="store_true", help="do not maintain a view for this environment"
)
view_opts.add_argument(
"--with-view",
help="specify that this environment should maintain a view at the"
" specified path (by default the view is maintained in the"
" environment directory)",
)
subparser.add_argument(
"envfile",
nargs="?",
default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
)
def env_create(args):
if args.with_view:
# Expand relative paths provided on the command line to the current working directory
# This way we interpret `spack env create --with-view ./view --dir ./env` as
# a view in $PWD/view, not $PWD/env/view. This is different from specifying a relative
# path in the manifest, which is resolved relative to the manifest file's location.
with_view = os.path.abspath(args.with_view)
elif args.without_view:
with_view = False
else:
# Note that 'None' means unspecified, in which case the Environment
# object could choose to enable a view by default. False means that
# the environment should not include a view.
with_view = None
env = _env_create(
args.env_name,
init_file=args.envfile,
dir=args.dir,
with_view=with_view,
keep_relative=args.keep_relative,
)
# Generate views, only really useful for environments created from spack.lock files.
env.regenerate_views()
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
"""Create a new environment, with an optional yaml description.
Arguments:
name_or_path (str): name of the environment to create, or path to it
init_file (str or file): optional initialization file -- can be
a JSON lockfile (*.lock, *.json) or YAML manifest file
dir (bool): if True, create an environment in a directory instead
of a named environment
keep_relative (bool): if True, develop paths are copied verbatim into
the new environment file, otherwise they may be made absolute if the
new environment is in a different location
"""
if not dir:
env = ev.create(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % (name_or_path))
return env
env = ev.create_in_dir(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment in %s" % env.path)
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % env.path)
return env
#
# env activate
#
@@ -216,46 +118,22 @@ def env_activate_setup_parser(subparser):
help="decorate the command line prompt when activating",
)
subparser.add_argument(
env_options = subparser.add_mutually_exclusive_group()
env_options.add_argument(
"--temp",
action="store_true",
default=False,
help="create and activate an environment in a temporary directory",
)
subparser.add_argument(
"--create",
action="store_true",
default=False,
help="create and activate the environment if it doesn't exist",
env_options.add_argument(
"-d", "--dir", default=None, help="activate the environment in this directory"
)
subparser.add_argument(
"--envfile",
nargs="?",
default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
)
subparser.add_argument(
"--keep-relative",
action="store_true",
help="copy relative develop paths verbatim into the new environment"
" when initializing from envfile",
)
subparser.add_argument(
"-d",
"--dir",
default=False,
action="store_true",
help="activate environment based on the directory supplied",
)
subparser.add_argument(
env_options.add_argument(
metavar="env",
dest="env_name",
dest="activate_env",
nargs="?",
default=None,
help=(
"name of managed environment or directory of the anonymous env"
" (when using --dir/-d) to activate"
),
help="name of environment to activate",
)
@@ -284,17 +162,11 @@ def env_activate(args):
if args.env or args.no_env or args.env_dir:
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
# special parser error handling relative to the --temp flag
temp_conflicts = iter([args.keep_relative, args.dir, args.env_name, args.with_view])
if args.temp and any(temp_conflicts):
tty.die(
"spack env activate --temp cannot be combined with managed environments, --with-view,"
" --keep-relative, or --dir."
)
env_name_or_dir = args.activate_env or args.dir
# When executing `spack env activate` without further arguments, activate
# the default environment. It's created when it doesn't exist yet.
if not args.env_name and not args.temp:
if not env_name_or_dir and not args.temp:
short_name = "default"
if not ev.exists(short_name):
ev.create(short_name)
@@ -313,25 +185,17 @@ def env_activate(args):
_tty_info(f"Created and activated temporary environment in {env_path}")
# Managed environment
elif ev.exists(args.env_name) and not args.dir:
env_path = ev.root(args.env_name)
short_name = args.env_name
elif ev.exists(env_name_or_dir) and not args.dir:
env_path = ev.root(env_name_or_dir)
short_name = env_name_or_dir
# Environment directory
elif ev.is_env_dir(args.env_name):
env_path = os.path.abspath(args.env_name)
elif ev.is_env_dir(env_name_or_dir):
env_path = os.path.abspath(env_name_or_dir)
short_name = os.path.basename(env_path)
# create if user requested, and then recall recursively
elif args.create:
tty.set_msg_enabled(False)
env_create(args)
tty.set_msg_enabled(True)
env_activate(args)
return
else:
tty.die("No such environment: '%s'" % args.env_name)
tty.die("No such environment: '%s'" % env_name_or_dir)
env_prompt = "[%s]" % short_name
@@ -426,6 +290,97 @@ def env_deactivate(args):
sys.stdout.write(cmds)
#
# env create
#
def env_create_setup_parser(subparser):
"""create a new environment"""
subparser.add_argument("create_env", metavar="env", help="name of environment to create")
subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
)
subparser.add_argument(
"--keep-relative",
action="store_true",
help="copy relative develop paths verbatim into the new environment"
" when initializing from envfile",
)
view_opts = subparser.add_mutually_exclusive_group()
view_opts.add_argument(
"--without-view", action="store_true", help="do not maintain a view for this environment"
)
view_opts.add_argument(
"--with-view",
help="specify that this environment should maintain a view at the"
" specified path (by default the view is maintained in the"
" environment directory)",
)
subparser.add_argument(
"envfile",
nargs="?",
default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
)
def env_create(args):
if args.with_view:
# Expand relative paths provided on the command line to the current working directory
# This way we interpret `spack env create --with-view ./view --dir ./env` as
# a view in $PWD/view, not $PWD/env/view. This is different from specifying a relative
# path in the manifest, which is resolved relative to the manifest file's location.
with_view = os.path.abspath(args.with_view)
elif args.without_view:
with_view = False
else:
# Note that 'None' means unspecified, in which case the Environment
# object could choose to enable a view by default. False means that
# the environment should not include a view.
with_view = None
env = _env_create(
args.create_env,
init_file=args.envfile,
dir=args.dir,
with_view=with_view,
keep_relative=args.keep_relative,
)
# Generate views, only really useful for environments created from spack.lock files.
env.regenerate_views()
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
"""Create a new environment, with an optional yaml description.
Arguments:
name_or_path (str): name of the environment to create, or path to it
init_file (str or file): optional initialization file -- can be
a JSON lockfile (*.lock, *.json) or YAML manifest file
dir (bool): if True, create an environment in a directory instead
of a named environment
keep_relative (bool): if True, develop paths are copied verbatim into
the new environment file, otherwise they may be made absolute if the
new environment is in a different location
"""
if not dir:
env = ev.create(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % (name_or_path))
return env
env = ev.create_in_dir(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment in %s" % env.path)
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % env.path)
return env
#
# env remove
#

View File

@@ -18,14 +18,7 @@
def setup_parser(subparser):
setup_parser.parser = subparser
subparser.epilog = """
Outside of an environment, the command concretizes specs and graphs them, unless the
--installed option is given. In that case specs are matched from the current DB.
If an environment is active, specs are matched from the currently available concrete specs
in the lockfile.
"""
method = subparser.add_mutually_exclusive_group()
method.add_argument(
"-a", "--ascii", action="store_true", help="draw graph as ascii to stdout (default)"
@@ -48,40 +41,39 @@ def setup_parser(subparser):
)
subparser.add_argument(
"-i", "--installed", action="store_true", help="graph specs from the DB"
"-i",
"--installed",
action="store_true",
help="graph installed specs, or specs in the active env (implies --dot)",
)
arguments.add_common_arguments(subparser, ["deptype", "specs"])
def graph(parser, args):
env = ev.active_environment()
if args.installed and env:
tty.die("cannot use --installed with an active environment")
if args.installed and args.specs:
tty.die("cannot specify specs with --installed")
if args.color and not args.dot:
tty.die("the --color option can be used only with --dot")
if args.installed:
if not args.specs:
specs = spack.store.STORE.db.query()
args.dot = True
env = ev.active_environment()
if env:
specs = env.concrete_roots()
else:
result = []
for item in args.specs:
result.extend(spack.store.STORE.db.query(item))
specs = list(set(result))
elif env:
specs = env.concrete_roots()
if args.specs:
specs = env.all_matching_specs(*args.specs)
specs = spack.store.STORE.db.query()
else:
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)
if not specs:
tty.die("no spec matching the query")
setup_parser.parser.print_help()
return 1
if args.static:
args.dot = True
static_graph_dot(specs, depflag=args.deptype)
return

View File

@@ -290,11 +290,11 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
def _dump_log_on_error(e: spack.build_environment.InstallError):
e.print_context()
assert e.pkg, "Expected InstallError to include the associated package"
if not os.path.exists(e.pkg.log_path):
if not os.path.exists(e.pkg.build_log_path):
tty.error("'spack install' created no log.")
else:
sys.stderr.write("Full build log:\n")
with open(e.pkg.log_path, errors="replace") as log:
with open(e.pkg.build_log_path, errors="replace") as log:
shutil.copyfileobj(log, sys.stderr)

View File

@@ -292,11 +292,9 @@ def head(n, span_id, title, anchor=None):
out.write("<dd>\n")
out.write(
", ".join(
(
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
)
d
if d not in pkg_names
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
for d in deps
)
)

View File

@@ -1,71 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import gzip
import io
import os
import shutil
import sys
import spack.cmd
import spack.spec
import spack.util.compression as compression
from spack.cmd.common import arguments
from spack.main import SpackCommandError
description = "print out logs for packages"
section = "basic"
level = "long"
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["spec"])
def _dump_byte_stream_to_stdout(instream: io.BufferedIOBase) -> None:
# Reopen stdout in binary mode so we don't have to worry about encoding
outstream = os.fdopen(sys.stdout.fileno(), "wb", closefd=False)
shutil.copyfileobj(instream, outstream)
def _logs(cmdline_spec: spack.spec.Spec, concrete_spec: spack.spec.Spec):
if concrete_spec.installed:
log_path = concrete_spec.package.install_log_path
elif os.path.exists(concrete_spec.package.stage.path):
# TODO: `spack logs` can currently not show the logs while a package is being built, as the
# combined log file is only written after the build is finished.
log_path = concrete_spec.package.log_path
else:
raise SpackCommandError(f"{cmdline_spec} is not installed or staged")
try:
stream = open(log_path, "rb")
except OSError as e:
if e.errno == errno.ENOENT:
raise SpackCommandError(f"No logs are available for {cmdline_spec}") from e
raise SpackCommandError(f"Error reading logs for {cmdline_spec}: {e}") from e
with stream as f:
ext = compression.extension_from_magic_numbers_by_stream(f, decompress=False)
if ext and ext != "gz":
raise SpackCommandError(f"Unsupported storage format for {log_path}: {ext}")
# If the log file is gzip compressed, wrap it with a decompressor
_dump_byte_stream_to_stdout(gzip.GzipFile(fileobj=f) if ext == "gz" else f)
def logs(parser, args):
specs = spack.cmd.parse_specs(args.spec)
if not specs:
raise SpackCommandError("You must supply a spec.")
if len(specs) != 1:
raise SpackCommandError("Too many specs. Supply only one.")
concrete_spec = spack.cmd.matching_spec_from_env(specs[0])
_logs(specs[0], concrete_spec)

View File

@@ -7,7 +7,6 @@
import re
import subprocess
import sys
import tempfile
from typing import Dict, List, Set
import spack.compiler
@@ -16,7 +15,7 @@
import spack.util.executable
from spack.compiler import Compiler
from spack.error import SpackError
from spack.version import Version, VersionRange
from spack.version import Version
avail_fc_version: Set[str] = set()
fc_path: Dict[str, str] = dict()
@@ -293,15 +292,6 @@ def setup_custom_environment(self, pkg, env):
else:
env.set_path(env_var, int_env[env_var].split(os.pathsep))
# certain versions of ifx (2021.3.0:2023.1.0) do not play well with env:TMP
# that has a "." character in the path
# Work around by pointing tmp to the stage for the duration of the build
if self.fc and Version(self.fc_version(self.fc)).satisfies(
VersionRange("2021.3.0", "2023.1.0")
):
new_tmp = tempfile.mkdtemp(dir=pkg.stage.path)
env.set("TMP", new_tmp)
env.set("CC", self.cc)
env.set("CXX", self.cxx)
env.set("FC", self.fc)

View File

@@ -826,6 +826,7 @@ def __init__(self, spec):
class InsufficientArchitectureInfoError(spack.error.SpackError):
"""Raised when details on architecture cannot be collected from the
system"""

View File

@@ -63,11 +63,10 @@
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
SECTION_SCHEMAS: Dict[str, Any] = {
SECTION_SCHEMAS = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema,
"view": spack.schema.view.schema,
"develop": spack.schema.develop.schema,
"mirrors": spack.schema.mirrors.schema,
"repos": spack.schema.repos.schema,
@@ -82,7 +81,7 @@
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
_ALL_SCHEMAS: Dict[str, Any] = copy.deepcopy(SECTION_SCHEMAS)
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
#: Path to the default configuration
@@ -639,6 +638,7 @@ def get(self, path: str, default: Optional[Any] = None, scope: Optional[str] = N
We use ``:`` as the separator, like YAML objects.
"""
# TODO: Currently only handles maps. Think about lists if needed.
parts = process_config_path(path)
section = parts.pop(0)
@@ -883,9 +883,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
has_existing_value = True
path = ""
override = False
value = components[-1]
if not isinstance(value, syaml.syaml_str):
value = syaml.load_config(value)
value = syaml.load_config(components[-1])
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = "::" if override else ":" if path else ""
@@ -907,7 +905,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
# construct value from this point down
for component in reversed(components[idx + 1 : -1]):
value: Dict[str, str] = {component: value} # type: ignore[no-redef]
value = {component: value}
break
if override:
@@ -918,7 +916,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
# append values to lists
if isinstance(existing, list) and not isinstance(value, list):
value: List[str] = [value] # type: ignore[no-redef]
value = [value]
# merge value into existing
new = merge_yaml(existing, value)
@@ -951,8 +949,7 @@ def scopes() -> Dict[str, ConfigScope]:
def writable_scopes() -> List[ConfigScope]:
"""
Return list of writable scopes. Higher-priority scopes come first in the
list.
Return list of writable scopes
"""
return list(
reversed(
@@ -1097,7 +1094,7 @@ def read_config_file(
data = syaml.load_config(f)
if data:
if schema is None:
if not schema:
key = next(iter(data))
schema = _ALL_SCHEMAS[key]
validate(data, schema)
@@ -1339,141 +1336,56 @@ def they_are(t):
return copy.copy(source)
class ConfigPath:
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
unquoted_string = "[^:'\"]+"
element = rf"(?:(?:{quoted_string})|(?:{unquoted_string}))"
next_key_pattern = rf"({element}[+-]?)(?:\:|$)"
@staticmethod
def _split_front(string, extract):
m = re.match(extract, string)
if not m:
return None, None
token = m.group(1)
return token, string[len(token) :]
@staticmethod
def _validate(path):
"""Example valid config paths:
x:y:z
x:"y":z
x:y+:z
x:y::z
x:y+::z
x:y:
x:y::
"""
first_key, path = ConfigPath._split_front(path, ConfigPath.next_key_pattern)
if not first_key:
raise ValueError(f"Config path does not start with a parse-able key: {path}")
path_elements = [first_key]
path_index = 1
while path:
separator, path = ConfigPath._split_front(path, r"(\:+)")
if not separator:
raise ValueError(f"Expected separator for {path}")
path_elements[path_index - 1] += separator
if not path:
break
element, remainder = ConfigPath._split_front(path, ConfigPath.next_key_pattern)
if not element:
# If we can't parse something as a key, then it must be a
# value (if it's valid).
try:
syaml.load_config(path)
except spack.util.spack_yaml.SpackYAMLError as e:
raise ValueError(
"Remainder of path is not a valid key"
f" and does not parse as a value {path}"
) from e
element = path
path = None # The rest of the path was consumed into the value
else:
path = remainder
path_elements.append(element)
path_index += 1
return path_elements
@staticmethod
def process(path):
result = []
quote = "['\"]"
seen_override_in_path = False
path_elements = ConfigPath._validate(path)
last_element_idx = len(path_elements) - 1
for i, element in enumerate(path_elements):
override = False
append = False
prepend = False
quoted = False
if element.endswith("::") or (element.endswith(":") and i == last_element_idx):
if seen_override_in_path:
raise syaml.SpackYAMLError(
"Meaningless second override indicator `::' in path `{0}'".format(path), ""
)
override = True
seen_override_in_path = True
element = element.rstrip(":")
if element.endswith("+"):
prepend = True
elif element.endswith("-"):
append = True
element = element.rstrip("+-")
if re.match(f"^{quote}", element):
quoted = True
element = element.strip("'\"")
if any([append, prepend, override, quoted]):
element = syaml.syaml_str(element)
if append:
element.append = True
if prepend:
element.prepend = True
if override:
element.override = True
result.append(element)
return result
def process_config_path(path: str) -> List[str]:
"""Process a path argument to config.set() that may contain overrides ('::' or
trailing ':')
Colons will be treated as static strings if inside of quotes,
e.g. `this:is:a:path:'value:with:colon'` will yield:
Note: quoted value path components will be processed as a single value (escaping colons)
quoted path components outside of the value will be considered ill formed and will
raise.
e.g. `this:is:a:path:'value:with:colon'` will yield:
[this, is, a, path, value:with:colon]
The path may consist only of keys (e.g. for a `get`) or may end in a value.
Keys are always strings: if a user encloses a key in quotes, the quotes
should be removed. Values with quotes should be treated as strings,
but without quotes, may be parsed as a different yaml object (e.g.
'{}' is a dict, but '"{}"' is a string).
This function does not know whether the final element of the path is a
key or value, so:
* It must strip the quotes, in case it is a key (so we look for "key" and
not '"key"'))
* It must indicate somehow that the quotes were stripped, in case it is a
value (so that we don't process '"{}"' as a YAML dict)
Therefore, all elements with quotes are stripped, and then also converted
to ``syaml_str`` (if treating the final element as a value, the caller
should not parse it in this case).
[this, is, a, path, value:with:colon]
"""
return ConfigPath.process(path)
result = []
if path.startswith(":"):
raise syaml.SpackYAMLError(f"Illegal leading `:' in path `{path}'", "")
seen_override_in_path = False
while path:
front, sep, path = path.partition(":")
if (sep and not path) or path.startswith(":"):
if seen_override_in_path:
raise syaml.SpackYAMLError(
f"Meaningless second override indicator `::' in path `{path}'", ""
)
path = path.lstrip(":")
front = syaml.syaml_str(front)
front.override = True # type: ignore[attr-defined]
seen_override_in_path = True
elif front.endswith("+"):
front = front.rstrip("+")
front = syaml.syaml_str(front)
front.prepend = True # type: ignore[attr-defined]
elif front.endswith("-"):
front = front.rstrip("-")
front = syaml.syaml_str(front)
front.append = True # type: ignore[attr-defined]
result.append(front)
quote = "['\"]"
not_quote = "[^'\"]"
if re.match(f"^{quote}", path):
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
if not m:
raise ValueError("Quotes indicate value, but there are additional path entries")
result.append(m.group(1))
break
return result
#

View File

@@ -71,7 +71,7 @@
"almalinux:9": {
"bootstrap": {
"template": "container/almalinux_9.dockerfile",
"image": "quay.io/almalinuxorg/almalinux:9"
"image": "quay.io/almalinux/almalinux:9"
},
"os_package_manager": "dnf_epel",
"build": "spack/almalinux9",
@@ -79,13 +79,13 @@
"develop": "latest"
},
"final": {
"image": "quay.io/almalinuxorg/almalinux:9"
"image": "quay.io/almalinux/almalinux:9"
}
},
"almalinux:8": {
"bootstrap": {
"template": "container/almalinux_8.dockerfile",
"image": "quay.io/almalinuxorg/almalinux:8"
"image": "quay.io/almalinux/almalinux:8"
},
"os_package_manager": "dnf_epel",
"build": "spack/almalinux8",
@@ -93,7 +93,7 @@
"develop": "latest"
},
"final": {
"image": "quay.io/almalinuxorg/almalinux:8"
"image": "quay.io/almalinux/almalinux:8"
}
},
"centos:stream": {

View File

@@ -36,9 +36,6 @@
#: Default dependency type if none is specified
DEFAULT: DepFlag = BUILD | LINK
#: A flag with no dependency types set
NONE: DepFlag = 0
#: An iterator of all flag components
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)

View File

@@ -21,6 +21,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util.lang import dedupe
from llnl.util.link_tree import ConflictingSpecsError
from llnl.util.symlink import symlink
@@ -82,30 +83,17 @@
lockfile_name = "spack.lock"
#: Name of the directory where environments store repos, logs, views, configs
#: Name of the directory where environments store repos, logs, views
env_subdir_name = ".spack-env"
def env_root_path() -> str:
def env_root_path():
"""Override default root path if the user specified it"""
return spack.util.path.canonicalize_path(
spack.config.get("config:environments_root", default=default_env_path)
)
def environment_name(path: Union[str, pathlib.Path]) -> str:
"""Human-readable representation of the environment.
This is the path for directory environments, and just the name
for managed environments.
"""
path_str = str(path)
if path_str.startswith(env_root_path()):
return os.path.basename(path_str)
else:
return path_str
def check_disallowed_env_config_mods(scopes):
for scope in scopes:
with spack.config.use_configuration(scope):
@@ -191,8 +179,9 @@ def validate_env_name(name):
def activate(env, use_env_repo=False):
"""Activate an environment.
To activate an environment, we add its manifest's configuration scope to the
existing Spack configuration, and we set active to the current environment.
To activate an environment, we add its configuration scope to the
existing Spack configuration, and we set active to the current
environment.
Arguments:
env (Environment): the environment to activate
@@ -209,7 +198,7 @@ def activate(env, use_env_repo=False):
# below.
install_tree_before = spack.config.get("config:install_tree")
upstreams_before = spack.config.get("upstreams")
env.manifest.prepare_config_scope()
prepare_config_scope(env)
install_tree_after = spack.config.get("config:install_tree")
upstreams_after = spack.config.get("upstreams")
if install_tree_before != install_tree_after or upstreams_before != upstreams_after:
@@ -237,7 +226,7 @@ def deactivate():
if hasattr(_active_environment, "store_token"):
spack.store.restore(_active_environment.store_token)
delattr(_active_environment, "store_token")
_active_environment.manifest.deactivate_config_scope()
deactivate_config_scope(_active_environment)
# use _repo so we only remove if a repo was actually constructed
if _active_environment._repo:
@@ -374,12 +363,12 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
to store the environment in a different directory, we have to rewrite
relative paths to absolute ones."""
with env:
dev_specs = spack.config.get("develop", default={}, scope=env.scope_name)
dev_specs = spack.config.get("develop", default={}, scope=env.env_file_config_scope_name())
if not dev_specs:
return
for name, entry in dev_specs.items():
dev_path = substitute_path_variables(entry["path"])
expanded_path = spack.util.path.canonicalize_path(dev_path, default_wd=init_file_dir)
dev_path = entry["path"]
expanded_path = os.path.normpath(os.path.join(init_file_dir, entry["path"]))
# Skip if the expanded path is the same (e.g. when absolute)
if dev_path == expanded_path:
@@ -389,7 +378,7 @@ def _rewrite_relative_dev_paths_on_relocation(env, init_file_dir):
dev_specs[name]["path"] = expanded_path
spack.config.set("develop", dev_specs, scope=env.scope_name)
spack.config.set("develop", dev_specs, scope=env.env_file_config_scope_name())
env._dev_specs = None
# If we changed the environment's spack.yaml scope, that will not be reflected
@@ -610,33 +599,39 @@ def content_hash(self, specs):
return spack.util.hash.b32_hash(contents)
def get_projection_for_spec(self, spec):
"""Get projection for spec. This function does not require the view
to exist on the filesystem."""
return self._view(self.root).get_projection_for_spec(spec)
"""Get projection for spec relative to view root
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
Getting the projection from the underlying root will get the temporary
projection. This gives the permanent projection relative to the root
symlink.
"""
Returns a view object for the *underlying* view directory. This means that the
self.root symlink is followed, and that the view has to exist on the filesystem
(unless ``new``). This function is useful when writing to the view.
view = self.view()
view_path = view.get_projection_for_spec(spec)
rel_path = os.path.relpath(view_path, self._current_root)
return os.path.join(self.root, rel_path)
def view(self, new=None):
"""
Generate the FilesystemView object for this ViewDescriptor
By default, this method returns a FilesystemView object rooted at the
current underlying root of this ViewDescriptor (self._current_root)
Raise if new is None and there is no current view
Arguments:
new: If a string, create a FilesystemView rooted at that path. Default None. This
should only be used to regenerate the view, and cannot be used to access specs.
new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs.
"""
root = new if new else self._current_root
if not root:
# This can only be hit if we write a future bug
raise SpackEnvironmentViewError(
msg = (
"Attempting to get nonexistent view from environment. "
f"View root is at {self.root}"
"View root is at %s" % self.root
)
return self._view(root)
def _view(self, root: str) -> SimpleFilesystemView:
"""Returns a view object for a given root dir."""
raise SpackEnvironmentViewError(msg)
return SimpleFilesystemView(
root,
spack.store.STORE.layout,
@@ -662,26 +657,30 @@ def __contains__(self, spec):
return True
def specs_for_view(self, concrete_roots: List[Spec]) -> List[Spec]:
"""Flatten the DAGs of the concrete roots, keep only unique, selected, and installed specs
in topological order from root to leaf."""
if self.link == "all":
deptype = dt.LINK | dt.RUN
elif self.link == "run":
deptype = dt.RUN
def specs_for_view(self, concretized_root_specs):
"""
From the list of concretized user specs in the environment, flatten
the dags, and filter selected, installed specs, remove duplicates on dag hash.
"""
# With deps, requires traversal
if self.link == "all" or self.link == "run":
deptype = ("run") if self.link == "run" else ("link", "run")
specs = list(
traverse.traverse_nodes(
concretized_root_specs, deptype=deptype, key=traverse.by_dag_hash
)
)
else:
deptype = dt.NONE
specs = traverse.traverse_nodes(
concrete_roots, order="topo", deptype=deptype, key=traverse.by_dag_hash
)
specs = list(dedupe(concretized_root_specs, key=traverse.by_dag_hash))
# Filter selected, installed specs
with spack.store.STORE.db.read_transaction():
return [s for s in specs if s in self and s.installed]
specs = [s for s in specs if s in self and s.installed]
def regenerate(self, concrete_roots: List[Spec]) -> None:
specs = self.specs_for_view(concrete_roots)
return specs
def regenerate(self, concretized_root_specs):
specs = self.specs_for_view(concretized_root_specs)
# To ensure there are no conflicts with packages being installed
# that cannot be resolved or have repos that have been removed
@@ -698,14 +697,14 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
old_root = self._current_root
if new_root == old_root:
tty.debug(f"View at {self.root} does not need regeneration.")
tty.debug("View at %s does not need regeneration." % self.root)
return
_error_on_nonempty_view_dir(new_root)
# construct view at new_root
if specs:
tty.msg(f"Updating view at {self.root}")
tty.msg("Updating view at {0}".format(self.root))
view = self.view(new=new_root)
@@ -715,7 +714,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
# Create a new view
try:
fs.mkdirp(new_root)
view.add_specs(*specs)
view.add_specs(*specs, with_dependencies=False)
# create symlink from tmp_symlink_name to new_root
if os.path.exists(tmp_symlink_name):
@@ -729,7 +728,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
try:
shutil.rmtree(new_root, ignore_errors=True)
os.unlink(tmp_symlink_name)
except OSError:
except (IOError, OSError):
pass
# Give an informative error message for the typical error case: two specs, same package
@@ -770,17 +769,6 @@ def _create_environment(path):
return Environment(path)
def env_subdir_path(manifest_dir: Union[str, pathlib.Path]) -> str:
"""Path to where the environment stores repos, logs, views, configs.
Args:
manifest_dir: directory containing the environment manifest file
Returns: directory the environment uses to manage its files
"""
return os.path.join(str(manifest_dir), env_subdir_name)
class Environment:
"""A Spack environment, which bundles together configuration and a list of specs."""
@@ -792,13 +780,12 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
manifest_dir: directory with the "spack.yaml" associated with the environment
"""
self.path = os.path.abspath(str(manifest_dir))
self.name = environment_name(self.path)
self.env_subdir_path = env_subdir_path(self.path)
self.txlock = lk.Lock(self._transaction_lock_path)
self._unify = None
self.new_specs: List[Spec] = []
self.new_installs: List[Spec] = []
self.views: Dict[str, ViewDescriptor] = {}
#: Specs from "spack.yaml"
@@ -815,15 +802,9 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
self._previous_active = None
self._dev_specs = None
# Load the manifest file contents into memory
self._load_manifest_file()
def _load_manifest_file(self):
"""Instantiate and load the manifest file contents into memory."""
with lk.ReadTransaction(self.txlock):
self.manifest = EnvironmentManifestFile(self.path)
with self.manifest.use_config():
self._read()
self.manifest = EnvironmentManifestFile(manifest_dir)
self._read()
@property
def unify(self):
@@ -841,10 +822,19 @@ def __reduce__(self):
def _re_read(self):
"""Reinitialize the environment object."""
self.clear(re_read=True)
self._load_manifest_file()
self.manifest = EnvironmentManifestFile(self.path)
self._read(re_read=True)
def _read(self):
self._construct_state_from_manifest()
def _read(self, re_read=False):
# If the manifest has included files, then some of the information
# (e.g., definitions) MAY be in those files. So we need to ensure
# the config is populated with any associated spec lists in order
# to fully construct the manifest state.
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
if includes and not re_read:
prepare_config_scope(self)
self._construct_state_from_manifest(re_read)
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
@@ -871,67 +861,38 @@ def _process_definition(self, item):
else:
self.spec_lists[name] = user_specs
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
"""Process view option(s), which can be boolean, string, or None.
A boolean environment view option takes precedence over any that may
be included. So ``view: True`` results in the default view only. And
``view: False`` means the environment will have no view.
Args:
env_view: view option provided in the manifest or configuration
"""
def add_view(name, values):
"""Add the view with the name and the string or dict values."""
if isinstance(values, str):
self.views[name] = ViewDescriptor(self.path, values)
elif isinstance(values, dict):
self.views[name] = ViewDescriptor.from_dict(self.path, values)
else:
tty.error(f"Cannot add view named {name} for {type(values)} values {values}")
# If the configuration specifies 'view: False' then we are done
# processing views. If this is called with the environment's view
# view (versus an included view), then there are to be NO views.
if env_view is False:
return
# If the configuration specifies 'view: True' then only the default
# view will be created for the environment and we are done processing
# views.
if env_view is True:
add_view(default_view_name, self.view_path_default)
return
# Otherwise, the configuration has a subdirectory or dictionary.
if isinstance(env_view, str):
add_view(default_view_name, env_view)
elif env_view:
for name, values in env_view.items():
add_view(name, values)
# If we reach this point without an explicit view option then we
# provide the default view.
if self.views == dict():
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file."""
def _construct_state_from_manifest(self, re_read=False):
"""Read manifest file and set up user specs."""
self.spec_lists = collections.OrderedDict()
self.views = {}
for item in spack.config.get("definitions", []):
self._process_definition(item)
if not re_read:
for item in spack.config.get("definitions", []):
self._process_definition(item)
env_configuration = self.manifest[TOP_LEVEL_KEY]
for item in env_configuration.get("definitions", []):
self._process_definition(item)
spec_list = env_configuration.get(user_speclist_name, [])
user_specs = SpecList(
user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
)
self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True))
enable_view = env_configuration.get("view")
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, str):
self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view:
path = self.path
self.views = dict(
(name, ViewDescriptor.from_dict(path, values))
for name, values in enable_view.items()
)
else:
self.views = {}
@property
def user_specs(self):
@@ -960,8 +921,10 @@ def clear(self, re_read=False):
"""Clear the contents of the environment
Arguments:
re_read: If ``True``, do not clear ``new_specs``. This value cannot be read from yaml,
and needs to be maintained when re-reading an existing environment.
re_read (bool): If True, do not clear ``new_specs`` nor
``new_installs`` values. These values cannot be read from
yaml, and need to be maintained when re-reading an existing
environment.
"""
self.spec_lists = collections.OrderedDict()
self.spec_lists[user_speclist_name] = SpecList()
@@ -975,6 +938,24 @@ def clear(self, re_read=False):
if not re_read:
# things that cannot be recreated from file
self.new_specs = [] # write packages for these on write()
self.new_installs = [] # write modules for these on write()
@property
def internal(self):
"""Whether this environment is managed by Spack."""
return self.path.startswith(env_root_path())
@property
def name(self):
"""Human-readable representation of the environment.
This is the path for directory environments, and just the name
for managed environments.
"""
if self.internal:
return os.path.basename(self.path)
else:
return self.path
@property
def active(self):
@@ -1003,9 +984,23 @@ def _lock_backup_v1_path(self):
"""Path to backup of v1 lockfile before conversion to v2"""
return self.lock_path + ".backup.v1"
@property
def env_subdir_path(self):
"""Path to directory where the env stores repos, logs, views."""
return os.path.join(self.path, env_subdir_name)
@property
def repos_path(self):
return os.path.join(self.env_subdir_path, "repos")
return os.path.join(self.path, env_subdir_name, "repos")
@property
def log_path(self):
return os.path.join(self.path, env_subdir_name, "logs")
@property
def config_stage_dir(self):
"""Directory for any staged configuration file(s)."""
return os.path.join(self.env_subdir_path, "config")
@property
def view_path_default(self):
@@ -1018,10 +1013,122 @@ def repo(self):
self._repo = make_repo_path(self.repos_path)
return self._repo
@property
def scope_name(self):
def included_config_scopes(self):
"""List of included configuration scopes from the environment.
Scopes are listed in the YAML file in order from highest to
lowest precedence, so configuration from earlier scope will take
precedence over later ones.
This routine returns them in the order they should be pushed onto
the internal scope stack (so, in reverse, from lowest to highest).
"""
scopes = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path)
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, self.config_stage_dir, skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.path, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = "env:%s:%s" % (self.name, os.path.basename(config_path))
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
scope = spack.config.ConfigScope(config_name, config_path)
elif os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = "env:%s:%s" % (self.name, config_path)
tty.debug(
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
)
scope = spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema
)
else:
missing.append(config_path)
continue
scopes.append(scope)
if missing:
msg = "Detected {0} missing include path(s):".format(len(missing))
msg += "\n {0}".format("\n ".join(missing))
raise spack.config.ConfigFileError(msg)
return scopes
def env_file_config_scope_name(self):
"""Name of the config scope of this environment's manifest file."""
return self.manifest.scope_name
return "env:%s" % self.name
def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
config_name = self.env_file_config_scope_name()
return spack.config.SingleFileScope(
config_name, self.manifest_path, spack.schema.env.schema, [TOP_LEVEL_KEY]
)
def config_scopes(self):
"""A list of all configuration scopes for this environment."""
return check_disallowed_env_config_mods(
self.included_config_scopes() + [self.env_file_config_scope()]
)
def destroy(self):
"""Remove this environment from Spack entirely."""
@@ -1121,7 +1228,7 @@ def change_existing_spec(
for idx, spec in matches:
override_spec = Spec.override(spec, change_spec)
self.spec_lists[list_name].replace(idx, str(override_spec))
self.spec_lists[list_name].specs[idx] = override_spec
if list_name == user_speclist_name:
self.manifest.override_user_spec(str(override_spec), idx=idx)
else:
@@ -1129,6 +1236,7 @@ def change_existing_spec(
str(spec), override=str(override_spec), list_name=list_name
)
self.update_stale_references(from_list=list_name)
self._construct_state_from_manifest()
def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec"""
@@ -1699,8 +1807,8 @@ def _add_concrete_spec(self, spec, concrete, new=True):
self.concretized_order.append(h)
self.specs_by_hash[h] = concrete
def _dev_specs_that_need_overwrite(self):
"""Return the hashes of all specs that need to be reinstalled due to source code change."""
def _get_overwrite_specs(self):
# Find all dev specs that were modified.
changed_dev_specs = [
s
for s in traverse.traverse_nodes(
@@ -1725,6 +1833,21 @@ def _dev_specs_that_need_overwrite(self):
if depth == 0 or spec.installed
]
def _install_log_links(self, spec):
if not spec.external:
# Make sure log directory exists
log_path = self.log_path
fs.mkdirp(log_path)
with fs.working_dir(self.path):
# Link the resulting log file into logs dir
build_log_link = os.path.join(
log_path, "%s-%s.log" % (spec.name, spec.dag_hash(7))
)
if os.path.lexists(build_log_link):
os.remove(build_log_link)
symlink(spec.package.build_log_path, build_log_link)
def _partition_roots_by_install_status(self):
"""Partition root specs into those that do not have to be passed to the
installer, and those that should be, taking into account development
@@ -1758,18 +1881,58 @@ def install_all(self, **install_args):
"""
self.install_specs(None, **install_args)
def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
roots = self.concrete_roots()
specs = specs if specs is not None else roots
def install_specs(self, specs=None, **install_args):
tty.debug("Assessing installation status of environment packages")
# If "spack install" is invoked repeatedly for a large environment
# where all specs are already installed, the operation can take
# a large amount of time due to repeatedly acquiring and releasing
# locks. As a small optimization, drop already installed root specs.
installed_roots, uninstalled_roots = self._partition_roots_by_install_status()
if specs:
specs_to_install = [s for s in specs if s not in installed_roots]
specs_dropped = [s for s in specs if s in installed_roots]
else:
specs_to_install = uninstalled_roots
specs_dropped = installed_roots
# Extend the set of specs to overwrite with modified dev specs and their parents
install_args["overwrite"] = (
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
)
# We need to repeat the work of the installer thanks to the above optimization:
# Already installed root specs should be marked explicitly installed in the
# database.
if specs_dropped:
with spack.store.STORE.db.write_transaction(): # do all in one transaction
for spec in specs_dropped:
spack.store.STORE.db.update_explicit(spec, True)
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
if not specs_to_install:
tty.msg("All of the packages are already installed")
else:
tty.debug("Processing {0} uninstalled specs".format(len(specs_to_install)))
PackageInstaller(installs).install()
specs_to_overwrite = self._get_overwrite_specs()
tty.debug("{0} specs need to be overwritten".format(len(specs_to_overwrite)))
install_args["overwrite"] = install_args.get("overwrite", []) + specs_to_overwrite
installs = []
for spec in specs_to_install:
pkg_install_args = install_args.copy()
pkg_install_args["explicit"] = spec in self.roots()
installs.append((spec.package, pkg_install_args))
try:
builder = PackageInstaller(installs)
builder.install()
finally:
# Ensure links are set appropriately
for spec in specs_to_install:
if spec.installed:
self.new_installs.append(spec)
try:
self._install_log_links(spec)
except OSError as e:
tty.warn(
"Could not install log links for {0}: {1}".format(spec.name, str(e))
)
def all_specs_generator(self) -> Iterable[Spec]:
"""Returns a generator for all concrete specs"""
@@ -2092,7 +2255,11 @@ def write(self, regenerate: bool = True) -> None:
self.regenerate_views()
spack.hooks.post_env_write(self)
self.new_specs.clear()
self._reset_new_specs_and_installs()
def _reset_new_specs_and_installs(self) -> None:
self.new_specs = []
self.new_installs = []
def update_lockfile(self) -> None:
with fs.write_tmp_and_move(self.lock_path) as f:
@@ -2290,6 +2457,18 @@ def make_repo_path(root):
return path
def prepare_config_scope(env):
"""Add env's scope to the global configuration search path."""
for scope in env.config_scopes():
spack.config.CONFIG.push_scope(scope)
def deactivate_config_scope(env):
"""Remove any scopes from env from the global config path."""
for scope in env.config_scopes():
spack.config.CONFIG.remove_scope(scope.name)
def manifest_file(env_name_or_dir):
"""Return the absolute path to a manifest file given the environment
name or directory.
@@ -2468,9 +2647,8 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
already existing in the directory.
Args:
manifest_dir: directory containing the manifest and lockfile
manifest_dir: directory where the lockfile is
"""
# TBD: Should this be the abspath?
manifest_dir = pathlib.Path(manifest_dir)
lockfile = manifest_dir / lockfile_name
with lockfile.open("r") as f:
@@ -2488,8 +2666,6 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.manifest_dir = pathlib.Path(manifest_dir)
self.manifest_file = self.manifest_dir / manifest_name
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
if not self.manifest_file.exists():
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
@@ -2726,145 +2902,6 @@ def __iter__(self):
def __str__(self):
return str(self.manifest_file)
@property
def included_config_scopes(self) -> List[spack.config.ConfigScope]:
"""List of included configuration scopes from the manifest.
Scopes are listed in the YAML file in order from highest to
lowest precedence, so configuration from earlier scope will take
precedence over later ones.
This routine returns them in the order they should be pushed onto
the internal scope stack (so, in reverse, from lowest to highest).
Returns: Configuration scopes associated with the environment manifest
Raises:
SpackEnvironmentError: if the manifest includes a remote file but
no configuration stage directory has been identified
"""
scopes = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = self[TOP_LEVEL_KEY].get("include", [])
env_name = environment_name(self.manifest_dir)
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path)
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.manifest_dir, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
scope = spack.config.ConfigScope(config_name, config_path)
elif os.path.exists(config_path):
# files are assumed to be SingleFileScopes
config_name = "env:%s:%s" % (env_name, config_path)
tty.debug(
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
)
scope = spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema
)
else:
missing.append(config_path)
continue
scopes.append(scope)
if missing:
msg = "Detected {0} missing include path(s):".format(len(missing))
msg += "\n {0}".format("\n ".join(missing))
raise spack.config.ConfigFileError(msg)
return scopes
@property
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""A list of all configuration scopes for the environment manifest.
Returns: All configuration scopes associated with the environment
"""
config_name = self.scope_name
env_scope = spack.config.SingleFileScope(
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
)
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.push_scope(scope)
def deactivate_config_scope(self) -> None:
"""Remove any of the manifest's scopes from the global config path."""
for scope in self.env_config_scopes:
spack.config.CONFIG.remove_scope(scope.name)
@contextlib.contextmanager
def use_config(self):
"""Ensure only the manifest's configuration scopes are global."""
with no_active_environment():
self.prepare_config_scope()
yield
self.deactivate_config_scope()
class SpackEnvironmentError(spack.error.SpackError):
"""Superclass for all errors to do with Spack environments."""

View File

@@ -30,7 +30,6 @@
import shutil
import urllib.error
import urllib.parse
from pathlib import PurePath
from typing import List, Optional
import llnl.url
@@ -38,14 +37,13 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.string import comma_and, quote
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, working_dir
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
from llnl.util.symlink import symlink
import spack.config
import spack.error
import spack.oci.opener
import spack.url
import spack.util.archive
import spack.util.crypto as crypto
import spack.util.git
import spack.util.url as url_util
@@ -602,21 +600,29 @@ def expand(self):
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
@_needs_stage
def archive(self, destination, *, exclude: Optional[str] = None):
def archive(self, destination, **kwargs):
assert llnl.url.extension_from_path(destination) == "tar.gz"
assert self.stage.source_path.startswith(self.stage.path)
# We need to prepend this dir name to every entry of the tarfile
top_level_dir = PurePath(self.stage.srcdir or os.path.basename(self.stage.source_path))
with working_dir(self.stage.source_path), spack.util.archive.gzip_compressed_tarfile(
destination
) as (tar, _, _):
spack.util.archive.reproducible_tarfile_from_prefix(
tar=tar,
prefix=".",
skip=lambda entry: entry.name == exclude,
path_to_name=lambda path: (top_level_dir / PurePath(path)).as_posix(),
)
tar = which("tar", required=True)
patterns = kwargs.get("exclude", None)
if patterns is not None:
if isinstance(patterns, str):
patterns = [patterns]
for p in patterns:
tar.add_default_arg("--exclude=%s" % p)
with working_dir(self.stage.path):
if self.stage.srcdir:
# Here we create an archive with the default repository name.
# The 'tar' command has options for changing the name of a
# directory that is included in the archive, but they differ
# based on OS, so we temporarily rename the repo
with temp_rename(self.stage.source_path, self.stage.srcdir):
tar("-czf", destination, self.stage.srcdir)
else:
tar("-czf", destination, os.path.basename(self.stage.source_path))
def __str__(self):
return "VCS: %s" % self.url
@@ -697,6 +703,7 @@ def __str__(self):
@fetcher
class GitFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a git repository.
Use like this in a package:
@@ -1088,6 +1095,7 @@ def __str__(self):
@fetcher
class SvnFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that gets source code from a subversion repository.
Use like this in a package:
@@ -1182,6 +1190,7 @@ def __str__(self):
@fetcher
class HgFetchStrategy(VCSFetchStrategy):
"""
Fetch strategy that gets source code from a Mercurial repository.
Use like this in a package:

View File

@@ -32,7 +32,6 @@
from llnl.util.tty.color import colorize
import spack.config
import spack.paths
import spack.projections
import spack.relocate
import spack.schema.projections
@@ -92,16 +91,16 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
prefix_to_projection[spack.store.STORE.layout.root] = view._root
# This is vestigial code for the *old* location of sbang.
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
sbang.sbang_shebang_line()
)
prefix_to_projection[
"#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
] = sbang.sbang_shebang_line()
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
os.chown(dst, src_stat.st_uid, src_stat.st_gid)
except OSError:
tty.debug(f"Can't change the permissions for {dst}")
tty.debug("Can't change the permissions for %s" % dst)
def view_func_parser(parsed_name):
@@ -113,7 +112,7 @@ def view_func_parser(parsed_name):
elif parsed_name in ("add", "symlink", "soft"):
return view_symlink
else:
raise ValueError(f"invalid link type for view: '{parsed_name}'")
raise ValueError("invalid link type for view: '%s'" % parsed_name)
def inverse_view_func_parser(view_type):
@@ -271,10 +270,9 @@ def __init__(self, root, layout, **kwargs):
# Ensure projections are the same from each source
# Read projections file from view
if self.projections != self.read_projections():
raise ConflictingProjectionsError(
f"View at {self._root} has projections file"
" which does not match projections passed manually."
)
msg = "View at %s has projections file" % self._root
msg += " which does not match projections passed manually."
raise ConflictingProjectionsError(msg)
self._croot = colorize_root(self._root) + " "
@@ -315,11 +313,11 @@ def add_specs(self, *specs, **kwargs):
def add_standalone(self, spec):
if spec.external:
tty.warn(f"{self._croot}Skipping external package: {colorize_spec(spec)}")
tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
return True
if self.check_added(spec):
tty.warn(f"{self._croot}Skipping already linked package: {colorize_spec(spec)}")
tty.warn(self._croot + "Skipping already linked package: %s" % colorize_spec(spec))
return True
self.merge(spec)
@@ -327,7 +325,7 @@ def add_standalone(self, spec):
self.link_meta_folder(spec)
if self.verbose:
tty.info(f"{self._croot}Linked package: {colorize_spec(spec)}")
tty.info(self._croot + "Linked package: %s" % colorize_spec(spec))
return True
def merge(self, spec, ignore=None):
@@ -395,7 +393,7 @@ def needs_file(spec, file):
for file in files:
if not os.path.lexists(file):
tty.warn(f"Tried to remove {file} which does not exist")
tty.warn("Tried to remove %s which does not exist" % file)
continue
# remove if file is not owned by any other package in the view
@@ -406,7 +404,7 @@ def needs_file(spec, file):
# we are currently removing, as we remove files before unlinking the
# metadata directory.
if len([s for s in specs if needs_file(s, file)]) <= 1:
tty.debug(f"Removing file {file}")
tty.debug("Removing file " + file)
os.remove(file)
def check_added(self, spec):
@@ -479,14 +477,14 @@ def remove_standalone(self, spec):
Remove (unlink) a standalone package from this view.
"""
if not self.check_added(spec):
tty.warn(f"{self._croot}Skipping package not linked in view: {spec.name}")
tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
return
self.unmerge(spec)
self.unlink_meta_folder(spec)
if self.verbose:
tty.info(f"{self._croot}Removed package: {colorize_spec(spec)}")
tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
def get_projection_for_spec(self, spec):
"""
@@ -560,9 +558,9 @@ def print_conflict(self, spec_active, spec_specified, level="error"):
linked = tty.color.colorize(" (@gLinked@.)", color=color)
specified = tty.color.colorize("(@rSpecified@.)", color=color)
cprint(
f"{self._croot}Package conflict detected:\n"
f"{linked} {colorize_spec(spec_active)}\n"
f"{specified} {colorize_spec(spec_specified)}"
self._croot + "Package conflict detected:\n"
"%s %s\n" % (linked, colorize_spec(spec_active))
+ "%s %s" % (specified, colorize_spec(spec_specified))
)
def print_status(self, *specs, **kwargs):
@@ -574,14 +572,14 @@ def print_status(self, *specs, **kwargs):
for s, v in zip(specs, in_view):
if not v:
tty.error(f"{self._croot}Package not linked: {s.name}")
tty.error(self._croot + "Package not linked: %s" % s.name)
elif s != v:
self.print_conflict(v, s, level="warn")
in_view = list(filter(None, in_view))
if len(specs) > 0:
tty.msg(f"Packages linked in {self._croot[:-1]}:")
tty.msg("Packages linked in %s:" % self._croot[:-1])
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ("architecture", "compiler"))
@@ -591,19 +589,20 @@ def print_status(self, *specs, **kwargs):
if i > 0:
print()
header = (
f"{spack.spec.ARCHITECTURE_COLOR}{{{architecture}}} "
f"/ {spack.spec.COMPILER_COLOR}{{{compiler}}}"
header = "%s{%s} / %s{%s}" % (
spack.spec.ARCHITECTURE_COLOR,
architecture,
spack.spec.COMPILER_COLOR,
compiler,
)
tty.hline(colorize(header), char="-")
specs = index[(architecture, compiler)]
specs.sort()
abbreviated = [
s.cformat("{name}{@version}{%compiler}{compiler_flags}{variants}")
for s in specs
]
format_string = "{name}{@version}"
format_string += "{%compiler}{compiler_flags}{variants}"
abbreviated = [s.cformat(format_string) for s in specs]
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
@@ -635,19 +634,22 @@ def unlink_meta_folder(self, spec):
class SimpleFilesystemView(FilesystemView):
"""A simple and partial implementation of FilesystemView focused on performance and immutable
views, where specs cannot be removed after they were added."""
"""A simple and partial implementation of FilesystemView focused on
performance and immutable views, where specs cannot be removed after they
were added."""
def __init__(self, root, layout, **kwargs):
super().__init__(root, layout, **kwargs)
def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same package, that project
to the same prefix. We want to catch that as early as possible and give a sensible error to
the user. Here we use the metadata dir (.spack) projection as a quick test to see whether
two specs in the view are going to clash. The metadata dir is used because it's always
added by Spack with identical files, so a guaranteed clash that's easily verified."""
seen = {}
"""A very common issue is that we end up with two specs of the same
package, that project to the same prefix. We want to catch that as
early as possible and give a sensible error to the user. Here we use
the metadata dir (.spack) projection as a quick test to see whether
two specs in the view are going to clash. The metadata dir is used
because it's always added by Spack with identical files, so a
guaranteed clash that's easily verified."""
seen = dict()
for current_spec in specs:
metadata_dir = self.relative_metadata_dir_for_spec(current_spec)
conflicting_spec = seen.get(metadata_dir)
@@ -655,8 +657,7 @@ def _sanity_check_view_projection(self, specs):
raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec
def add_specs(self, *specs: spack.spec.Spec) -> None:
"""Link a root-to-leaf topologically ordered list of specs into the view."""
def add_specs(self, *specs, **kwargs):
assert all((s.concrete for s in specs))
if len(specs) == 0:
return
@@ -667,6 +668,9 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
tty.warn("Skipping external package: " + s.short_spec)
specs = [s for s in specs if not s.external]
if kwargs.get("exclude", None):
specs = set(filter_exclude(specs, kwargs["exclude"]))
self._sanity_check_view_projection(specs)
# Ignore spack meta data folder.
@@ -691,11 +695,13 @@ def skip_list(file):
# Inform about file-file conflicts.
if visitor.file_conflicts:
if self.ignore_conflicts:
tty.debug(f"{len(visitor.file_conflicts)} file conflicts")
tty.debug("{0} file conflicts".format(len(visitor.file_conflicts)))
else:
raise MergeConflictSummary(visitor.file_conflicts)
tty.debug(f"Creating {len(visitor.directories)} dirs and {len(visitor.files)} links")
tty.debug(
"Creating {0} dirs and {1} links".format(len(visitor.directories), len(visitor.files))
)
# Make the directory structure
for dst in visitor.directories:

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from typing import BinaryIO, Optional, Tuple
from typing import IO, Optional, Tuple
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
@@ -18,7 +18,7 @@ def should_keep(path: bytes) -> bool:
return path.startswith(b"$") or (os.path.isabs(path) and os.path.lexists(path))
def _drop_redundant_rpaths(f: BinaryIO) -> Optional[Tuple[bytes, bytes]]:
def _drop_redundant_rpaths(f: IO) -> Optional[Tuple[bytes, bytes]]:
"""Drop redundant entries from rpath.
Args:

View File

@@ -34,8 +34,21 @@ def _for_each_enabled(
def post_install(spec, explicit: bool):
import spack.environment as ev # break import cycle
if ev.active_environment():
# If the installed through an environment, we skip post_install
# module generation and generate the modules on env_write so Spack
# can manage interactions between env views and modules
return
_for_each_enabled(spec, "write", explicit)
def post_uninstall(spec):
_for_each_enabled(spec, "remove")
def post_env_write(env):
for spec in env.new_installs:
_for_each_enabled(spec, "write")

View File

@@ -229,8 +229,6 @@ def post_install(spec, explicit=None):
$spack_prefix/bin/sbang instead of something longer than the
shebang limit.
"""
if sys.platform == "win32":
return
if spec.external:
tty.debug("SKIP: shebang filtering [external package]")
return

View File

@@ -36,7 +36,6 @@
import sys
import time
from collections import defaultdict
from gzip import GzipFile
from typing import Dict, Iterator, List, Optional, Set, Tuple
import llnl.util.filesystem as fs
@@ -639,12 +638,13 @@ def archive_install_logs(pkg: "spack.package_base.PackageBase", phase_log_dir: s
pkg: the package that was built and installed
phase_log_dir: path to the archive directory
"""
# Copy a compressed version of the install log
with open(pkg.log_path, "rb") as f, open(pkg.install_log_path, "wb") as g:
# Use GzipFile directly so we can omit filename / mtime in header
gzip_file = GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=g)
shutil.copyfileobj(f, gzip_file)
gzip_file.close()
# Archive the whole stdout + stderr for the package
fs.install(pkg.log_path, pkg.install_log_path)
# Archive all phase log paths
for phase_log in pkg.phase_log_files:
log_file = os.path.basename(phase_log)
fs.install(phase_log, os.path.join(phase_log_dir, log_file))
# Archive the install-phase test log, if present
pkg.archive_install_test_log()

View File

@@ -43,7 +43,6 @@
import spack.build_environment
import spack.config
import spack.deptypes as dt
import spack.environment
import spack.error
import spack.modules.common
@@ -54,7 +53,6 @@
import spack.spec
import spack.store
import spack.tengine as tengine
import spack.user_environment
import spack.util.environment
import spack.util.file_permissions as fp
import spack.util.path
@@ -697,33 +695,28 @@ def environment_modifications(self):
)
spack.config.merge_yaml(
prefix_inspections,
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
spack.config.get("modules:%s:prefix_inspections" % self.conf.name, {}),
)
use_view = spack.config.get(f"modules:{self.conf.name}:use_view", False)
assert isinstance(use_view, (bool, str))
use_view = spack.config.get("modules:%s:use_view" % self.conf.name, False)
spec = self.spec.copy() # defensive copy before setting prefix
if use_view:
if use_view is True:
use_view = spack.environment.default_view_name
env = spack.environment.active_environment()
if not env:
raise spack.environment.SpackEnvironmentViewError(
"Module generation with views requires active environment"
)
view_name = spack.environment.default_view_name if use_view is True else use_view
view = env.views[use_view]
if not env.has_view(view_name):
raise spack.environment.SpackEnvironmentViewError(
f"View {view_name} not found in environment {env.name} when generating modules"
)
view = env.views[view_name]
else:
view = None
spec.prefix = view.get_projection_for_spec(spec)
env = spack.util.environment.inspect_path(
self.spec.prefix, prefix_inspections, exclude=spack.util.environment.is_system_path
spec.prefix, prefix_inspections, exclude=spack.util.environment.is_system_path
)
# Let the extendee/dependency modify their extensions/dependencies
@@ -733,19 +726,13 @@ def environment_modifications(self):
# whole chain of setup_dependent_package has to be followed from leaf to spec.
# So: just run it here, but don't collect env mods.
spack.build_environment.SetupContext(
self.spec, context=Context.RUN
spec, context=Context.RUN
).set_all_package_py_globals()
# Then run setup_dependent_run_environment before setup_run_environment.
for dep in self.spec.dependencies(deptype=("link", "run")):
dep.package.setup_dependent_run_environment(env, self.spec)
self.spec.package.setup_run_environment(env)
# Project the environment variables from prefix to view if needed
if view and self.spec in view:
spack.user_environment.project_env_mods(
*self.spec.traverse(deptype=dt.LINK | dt.RUN), view=view, env=env
)
for dep in spec.dependencies(deptype=("link", "run")):
dep.package.setup_dependent_run_environment(env, spec)
spec.package.setup_run_environment(env)
# Modifications required from modules.yaml
env.extend(self.conf.env)
@@ -767,11 +754,11 @@ def environment_modifications(self):
msg = "some tokens cannot be expanded in an environment variable name"
_check_tokens_are_valid(x.name, message=msg)
# Transform them
x.name = self.spec.format(x.name, transform=transform)
x.name = spec.format(x.name, transform=transform)
if self.modification_needs_formatting(x):
try:
# Not every command has a value
x.value = self.spec.format(x.value)
x.value = spec.format(x.value)
except AttributeError:
pass
x.name = str(x.name).replace("-", "_")

View File

@@ -134,7 +134,7 @@ def upload_blob(
return True
# Otherwise, do another PUT request.
spack.oci.opener.ensure_status(request, response, 202)
spack.oci.opener.ensure_status(response, 202)
assert "Location" in response.headers
# Can be absolute or relative, joining handles both
@@ -143,16 +143,19 @@ def upload_blob(
)
f.seek(0)
request = Request(
url=upload_url,
method="PUT",
data=f,
headers={"Content-Type": "application/octet-stream", "Content-Length": str(file_size)},
response = _urlopen(
Request(
url=upload_url,
method="PUT",
data=f,
headers={
"Content-Type": "application/octet-stream",
"Content-Length": str(file_size),
},
)
)
response = _urlopen(request)
spack.oci.opener.ensure_status(request, response, 201)
spack.oci.opener.ensure_status(response, 201)
# print elapsed time and # MB/s
_log_upload_progress(digest, file_size, time.time() - start)
@@ -186,16 +189,16 @@ def upload_manifest(
if not tag:
ref = ref.with_digest(digest)
request = Request(
url=ref.manifest_url(),
method="PUT",
data=data,
headers={"Content-Type": oci_manifest["mediaType"]},
response = _urlopen(
Request(
url=ref.manifest_url(),
method="PUT",
data=data,
headers={"Content-Type": oci_manifest["mediaType"]},
)
)
response = _urlopen(request)
spack.oci.opener.ensure_status(request, response, 201)
spack.oci.opener.ensure_status(response, 201)
return digest, size

View File

@@ -310,15 +310,19 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
# Login failed, avoid infinite recursion where we go back and
# forth between auth server and registry
if hasattr(req, "login_attempted"):
raise spack.util.web.DetailedHTTPError(
req, code, f"Failed to login: {msg}", headers, fp
raise urllib.error.HTTPError(
req.full_url, code, f"Failed to login to {req.full_url}: {msg}", headers, fp
)
# On 401 Unauthorized, parse the WWW-Authenticate header
# to determine what authentication is required
if "WWW-Authenticate" not in headers:
raise spack.util.web.DetailedHTTPError(
req, code, "Cannot login to registry, missing WWW-Authenticate header", headers, fp
raise urllib.error.HTTPError(
req.full_url,
code,
"Cannot login to registry, missing WWW-Authenticate header",
headers,
fp,
)
header_value = headers["WWW-Authenticate"]
@@ -326,8 +330,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
try:
challenge = get_bearer_challenge(parse_www_authenticate(header_value))
except ValueError as e:
raise spack.util.web.DetailedHTTPError(
req,
raise urllib.error.HTTPError(
req.full_url,
code,
f"Cannot login to registry, malformed WWW-Authenticate header: {header_value}",
headers,
@@ -336,8 +340,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
# If there is no bearer challenge, we can't handle it
if not challenge:
raise spack.util.web.DetailedHTTPError(
req,
raise urllib.error.HTTPError(
req.full_url,
code,
f"Cannot login to registry, unsupported authentication scheme: {header_value}",
headers,
@@ -352,8 +356,8 @@ def http_error_401(self, req: Request, fp, code, msg, headers):
timeout=req.timeout,
)
except ValueError as e:
raise spack.util.web.DetailedHTTPError(
req,
raise urllib.error.HTTPError(
req.full_url,
code,
f"Cannot login to registry, failed to obtain bearer token: {e}",
headers,
@@ -408,13 +412,13 @@ def create_opener():
return opener
def ensure_status(request: urllib.request.Request, response: HTTPResponse, status: int):
def ensure_status(response: HTTPResponse, status: int):
"""Raise an error if the response status is not the expected one."""
if response.status == status:
return
raise spack.util.web.DetailedHTTPError(
request, response.status, response.reason, response.info(), None
raise urllib.error.HTTPError(
response.geturl(), response.status, response.reason, response.info(), None
)

View File

@@ -24,7 +24,6 @@
import textwrap
import time
import traceback
import typing
import warnings
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
@@ -67,7 +66,7 @@
from spack.stage import DIYStage, ResourceStage, Stage, StageComposite, compute_stage_name
from spack.util.executable import ProcessError, which
from spack.util.package_hash import package_hash
from spack.version import GitVersion, StandardVersion
from spack.version import GitVersion, StandardVersion, Version
FLAG_HANDLER_RETURN_TYPE = Tuple[
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
@@ -94,26 +93,29 @@
spack_times_log = "install_times.json"
def deprecated_version(pkg: "PackageBase", version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.
def deprecated_version(pkg, version):
"""Return True if the version is deprecated, False otherwise.
Arguments:
pkg: The package whose version is to be checked.
version: The version being checked
pkg (PackageBase): The package whose version is to be checked.
version (str or spack.version.StandardVersion): The version being checked
"""
if not isinstance(version, StandardVersion):
version = StandardVersion.from_string(version)
version = Version(version)
details = pkg.versions.get(version)
return details is not None and details.get("deprecated", False)
for k, v in pkg.versions.items():
if version == k and v.get("deprecated", False):
return True
return False
def preferred_version(pkg: "PackageBase"):
def preferred_version(pkg):
"""
Returns a sorted list of the preferred versions of the package.
Arguments:
pkg: The package whose versions are to be assessed.
pkg (PackageBase): The package whose versions are to be assessed.
"""
# Here we sort first on the fact that a version is marked
# as preferred in the package, then on the fact that the
@@ -730,13 +732,13 @@ def dependencies_by_name(cls, when: bool = False):
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
transitive=True,
expand_virtuals=True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
visited=None,
missing=None,
virtuals=None,
):
"""Return dict of possible dependencies of this package.
Args:
@@ -900,16 +902,22 @@ def version(self):
@classmethod
@memoized
def version_urls(cls) -> Dict[StandardVersion, str]:
"""Dict of explicitly defined URLs for versions of this package.
def version_urls(cls):
"""OrderedDict of explicitly defined URLs for versions of this package.
Return:
An dict mapping version to url, ordered by version.
An OrderedDict (version -> URL) different versions of this
package, sorted by version.
A version's URL only appears in the result if it has an an explicitly defined ``url``
argument. So, this list may be empty if a package only defines ``url`` at the top level.
A version's URL only appears in the result if it has an an
explicitly defined ``url`` argument. So, this list may be empty
if a package only defines ``url`` at the top level.
"""
return {v: args["url"] for v, args in sorted(cls.versions.items()) if "url" in args}
version_urls = collections.OrderedDict()
for v, args in sorted(cls.versions.items()):
if "url" in args:
version_urls[v] = args["url"]
return version_urls
def nearest_url(self, version):
"""Finds the URL with the "closest" version to ``version``.
@@ -952,39 +960,36 @@ def update_external_dependencies(self, extendee_spec=None):
"""
pass
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
def all_urls_for_version(self, version):
"""Return all URLs derived from version_urls(), url, urls, and
list_url (if it contains a version) in a package in that order.
Args:
version: the version for which a URL is sought
version (spack.version.Version): the version for which a URL is sought
"""
uf = None
if type(self).url_for_version != PackageBase.url_for_version:
uf = self.url_for_version
return self._implement_all_urls_for_version(version, uf)
def _implement_all_urls_for_version(
self,
version: Union[str, StandardVersion],
custom_url_for_version: Optional[Callable[[StandardVersion], Optional[str]]] = None,
) -> List[str]:
version = StandardVersion.from_string(version) if isinstance(version, str) else version
def _implement_all_urls_for_version(self, version, custom_url_for_version=None):
if not isinstance(version, StandardVersion):
version = Version(version)
urls: List[str] = []
urls = []
# If we have a specific URL for this version, don't extrapolate.
url = self.version_urls().get(version)
if url:
urls.append(url)
version_urls = self.version_urls()
if version in version_urls:
urls.append(version_urls[version])
# if there is a custom url_for_version, use it
if custom_url_for_version is not None:
u = custom_url_for_version(version)
if u is not None and u not in urls:
if u not in urls and u is not None:
urls.append(u)
def sub_and_add(u: Optional[str]) -> None:
def sub_and_add(u):
if u is None:
return
# skip the url if there is no version to replace
@@ -992,7 +997,9 @@ def sub_and_add(u: Optional[str]) -> None:
spack.url.parse_version(u)
except spack.url.UndetectableVersionError:
return
urls.append(spack.url.substitute_version(u, self.url_version(version)))
nu = spack.url.substitute_version(u, self.url_version(version))
urls.append(nu)
# If no specific URL, use the default, class-level URL
sub_and_add(getattr(self, "url", None))
@@ -1123,7 +1130,13 @@ def stage(self, stage):
@property
def env_path(self):
"""Return the build environment file path associated with staging."""
return os.path.join(self.stage.path, _spack_build_envfile)
# Backward compatibility: Return the name of an existing log path;
# otherwise, return the current install env path name.
old_filename = os.path.join(self.stage.path, "spack-build.env")
if os.path.exists(old_filename):
return old_filename
else:
return os.path.join(self.stage.path, _spack_build_envfile)
@property
def env_mods_path(self):
@@ -1154,6 +1167,13 @@ def install_env_path(self):
@property
def log_path(self):
"""Return the build log file path associated with staging."""
# Backward compatibility: Return the name of an existing log path.
for filename in ["spack-build.out", "spack-build.txt"]:
old_log = os.path.join(self.stage.path, filename)
if os.path.exists(old_log):
return old_log
# Otherwise, return the current log path name.
return os.path.join(self.stage.path, _spack_build_logfile)
@property
@@ -1166,15 +1186,15 @@ def phase_log_files(self):
@property
def install_log_path(self):
"""Return the (compressed) build log file path on successful installation"""
"""Return the build log file path on successful installation."""
# Backward compatibility: Return the name of an existing install log.
for filename in [_spack_build_logfile, "build.out", "build.txt"]:
for filename in ["build.out", "build.txt"]:
old_log = os.path.join(self.metadata_dir, filename)
if os.path.exists(old_log):
return old_log
# Otherwise, return the current install log path name.
return os.path.join(self.metadata_dir, _spack_build_logfile + ".gz")
return os.path.join(self.metadata_dir, _spack_build_logfile)
@property
def configure_args_path(self):
@@ -1392,9 +1412,13 @@ def download_instr(self):
(str): default manual download instructions
"""
required = (
f"Manual download is required for {self.spec.name}. " if self.manual_download else ""
"Manual download is required for {0}. ".format(self.spec.name)
if self.manual_download
else ""
)
return "{0}Refer to {1} for download instructions.".format(
required, self.spec.package.homepage
)
return f"{required}Refer to {self.homepage} for download instructions."
def do_fetch(self, mirror_only=False):
"""
@@ -2067,6 +2091,15 @@ def unit_test_check(self):
"""
return True
@property
def build_log_path(self):
"""
Return the expected (or current) build log file path. The path points
to the staging build file until the software is successfully installed,
when it points to the file in the installation directory.
"""
return self.install_log_path if self.spec.installed else self.log_path
@classmethod
def inject_flags(cls: Type[Pb], name: str, flags: Iterable[str]) -> FLAG_HANDLER_RETURN_TYPE:
"""
@@ -2350,14 +2383,15 @@ def format_doc(cls, **kwargs):
return results.getvalue()
@property
def all_urls(self) -> List[str]:
def all_urls(self):
"""A list of all URLs in a package.
Check both class-level and version-specific URLs.
Returns a list of URLs
Returns:
list: a list of URLs
"""
urls: List[str] = []
urls = []
if hasattr(self, "url") and self.url:
urls.append(self.url)
@@ -2370,9 +2404,7 @@ def all_urls(self) -> List[str]:
urls.append(args["url"])
return urls
def fetch_remote_versions(
self, concurrency: Optional[int] = None
) -> Dict[StandardVersion, str]:
def fetch_remote_versions(self, concurrency=None):
"""Find remote versions of this package.
Uses ``list_url`` and any other URLs listed in the package file.
@@ -2461,21 +2493,14 @@ def flatten_dependencies(spec, flat_dir):
dep_files.merge(flat_dir + "/" + name)
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
def possible_dependencies(*pkg_or_spec, **kwargs):
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
if isinstance(pos, PackageMeta):
packages.append(pos)
continue
@@ -2488,16 +2513,9 @@ def possible_dependencies(
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
visited = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
pkg.possible_dependencies(visited=visited, **kwargs)
return visited
@@ -2545,7 +2563,3 @@ class DependencyConflictError(spack.error.SpackError):
def __init__(self, conflict):
super().__init__("%s conflicts with another file in the flattened directory." % (conflict))
class ManualDownloadRequiredError(InvalidPackageOpError):
"""Raised when attempting an invalid operation on a package that requires a manual download."""

View File

@@ -7,7 +7,6 @@
import os
import re
from collections import OrderedDict
from typing import List, Optional
import macholib.mach_o
import macholib.MachO
@@ -48,7 +47,7 @@ def __init__(self, file_path, root_path):
@memoized
def _patchelf() -> Optional[executable.Executable]:
def _patchelf():
"""Return the full path to the patchelf binary, if available, else None."""
import spack.bootstrap
@@ -56,7 +55,9 @@ def _patchelf() -> Optional[executable.Executable]:
return None
with spack.bootstrap.ensure_bootstrap_configuration():
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
return patchelf.path
def _elf_rpaths_for(path):
@@ -339,34 +340,31 @@ def macholib_get_paths(cur_path):
return (rpaths, deps, ident)
def _set_elf_rpaths_and_interpreter(
target: str, rpaths: List[str], interpreter: Optional[str] = None
) -> Optional[str]:
"""Replace the original RPATH of the target with the paths passed as arguments.
def _set_elf_rpaths(target, rpaths):
"""Replace the original RPATH of the target with the paths passed
as arguments.
Args:
target: target executable. Must be an ELF object.
rpaths: paths to be set in the RPATH
interpreter: optionally set the interpreter
Returns:
A string concatenating the stdout and stderr of the call to ``patchelf`` if it was invoked
A string concatenating the stdout and stderr of the call
to ``patchelf`` if it was invoked
"""
# Join the paths using ':' as a separator
rpaths_str = ":".join(rpaths)
patchelf, output = executable.Executable(_patchelf()), None
try:
# TODO: error handling is not great here?
# TODO: revisit the use of --force-rpath as it might be conditional
# TODO: if we want to support setting RUNPATH from binary packages
args = ["--force-rpath", "--set-rpath", rpaths_str]
if interpreter:
args.extend(["--set-interpreter", interpreter])
args.append(target)
return _patchelf()(*args, output=str, error=str)
patchelf_args = ["--force-rpath", "--set-rpath", rpaths_str, target]
output = patchelf(*patchelf_args, output=str, error=str)
except executable.ProcessError as e:
tty.warn(str(e))
return None
msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
tty.warn(msg.format(target, e))
return output
def needs_binary_relocation(m_type, m_subtype):
@@ -503,12 +501,10 @@ def new_relocate_elf_binaries(binaries, prefix_to_prefix):
for path in binaries:
try:
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix)
except elf.ElfCStringUpdatesFailed as e:
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
interpreter = e.pt_interp.new_value.decode("utf-8") if e.pt_interp else None
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
elf.replace_rpath_in_place_or_raise(path, prefix_to_prefix)
except elf.ElfDynamicSectionUpdateFailed as e:
# Fall back to the old `patchelf --set-rpath` method.
_set_elf_rpaths(path, e.new.decode("utf-8").split(":"))
def relocate_elf_binaries(
@@ -550,10 +546,10 @@ def relocate_elf_binaries(
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
# check to see if relative rpaths are changed before rewriting
if sorted(new_rpaths) != sorted(orig_rpaths):
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
else:
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
def make_link_relative(new_links, orig_links):
@@ -600,7 +596,7 @@ def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
orig_rpaths = _elf_rpaths_for(new_binary)
if orig_rpaths:
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
def warn_if_link_cant_be_relocated(link, target):

View File

@@ -490,7 +490,7 @@ def read(self, stream):
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
def update(self, pkg_fullname):
self.index.update_package(pkg_fullname.split(".")[-1])
self.index.update_package(pkg_fullname)
def write(self, stream):
self.index.to_json(stream)

View File

@@ -6,7 +6,6 @@
import collections
import contextlib
import functools
import gzip
import os
import time
import traceback
@@ -191,13 +190,9 @@ def on_success(self, pkg, kwargs, package_record):
def fetch_log(self, pkg):
try:
if os.path.exists(pkg.install_log_path):
stream = gzip.open(pkg.install_log_path, "rt")
else:
stream = open(pkg.log_path)
with stream as f:
return f.read()
except OSError:
with open(pkg.build_log_path, "r", encoding="utf-8") as stream:
return "".join(stream.readlines())
except Exception:
return f"Cannot open log for {pkg.spec.cshort_spec}"
def extract_package_from_signature(self, instance, *args, **kwargs):

View File

@@ -3,17 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for bootstrap.yaml configuration file."""
from typing import Any, Dict
#: Schema of a single source
_source_schema: Dict[str, Any] = {
_source_schema = {
"type": "object",
"properties": {"name": {"type": "string"}, "metadata": {"type": "string"}},
"additionalProperties": False,
"required": ["name", "metadata"],
}
properties: Dict[str, Any] = {
properties = {
"bootstrap": {
"type": "object",
"properties": {

View File

@@ -6,31 +6,27 @@
"""Schema for a buildcache spec.yaml file
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.spec
properties: Dict[str, Any] = {
# `buildinfo` is no longer needed as of Spack 0.21
"buildinfo": {"type": "object"},
"spec": {
"type": "object",
"additionalProperties": True,
"items": spack.schema.spec.properties,
},
"binary_cache_checksum": {
"type": "object",
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
},
"buildcache_layout_version": {"type": "number"},
}
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack buildcache specfile schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
# `buildinfo` is no longer needed as of Spack 0.21
"buildinfo": {"type": "object"},
"spec": {
"type": "object",
"additionalProperties": True,
"items": spack.schema.spec.properties,
},
"binary_cache_checksum": {
"type": "object",
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
},
"buildcache_layout_version": {"type": "number"},
},
}

View File

@@ -2,15 +2,16 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for cdash.yaml configuration file.
.. literalinclude:: ../spack/schema/cdash.py
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"cdash": {
"type": "object",
"additionalProperties": False,

View File

@@ -2,12 +2,12 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for gitlab-ci.yaml configuration file.
.. literalinclude:: ../spack/schema/ci.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
@@ -164,7 +164,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"ci": {
"oneOf": [
# TODO: Replace with core-shared-properties in Spack 0.23

View File

@@ -2,17 +2,16 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for compilers.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/compilers.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"compilers": {
"type": "array",
"items": {

View File

@@ -2,14 +2,14 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for concretizer.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/concretizer.py
:lines: 12-
:lines: 13-
"""
from typing import Any, Dict
properties: Dict[str, Any] = {
properties = {
"concretizer": {
"type": "object",
"additionalProperties": False,

View File

@@ -5,16 +5,15 @@
"""Schema for config.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/config.py
:lines: 17-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.projections
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"config": {
"type": "object",
"default": {},

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for the 'container' subsection of Spack environments."""
from typing import Any, Dict
_stages_from_dockerhub = {
"type": "object",
@@ -86,4 +85,4 @@
},
}
properties: Dict[str, Any] = {"container": container_schema}
properties = {"container": container_schema}

View File

@@ -11,115 +11,112 @@
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
"""
from typing import Any, Dict
properties: Dict[str, Any] = {
"_meta": {
"type": "object",
"additionalProperties": False,
"properties": {
"file-type": {"type": "string", "minLength": 1},
"cpe-version": {"type": "string", "minLength": 1},
"system-type": {"type": "string", "minLength": 1},
"schema-version": {"type": "string", "minLength": 1},
# Older schemas use did not have "cpe-version", just the
# schema version; in that case it was just called "version"
"version": {"type": "string", "minLength": 1},
},
},
"compilers": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"prefix": {"type": "string", "minLength": 1},
"executables": {
"type": "object",
"additionalProperties": False,
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
"fc": {"type": "string", "minLength": 1},
},
},
"arch": {
"type": "object",
"required": ["os", "target"],
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
"target": {"type": "string", "minLength": 1},
},
},
},
},
},
"specs": {
"type": "array",
"items": {
"type": "object",
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"arch": {
"type": "object",
"required": ["platform", "platform_os", "target"],
"additionalProperties": False,
"properties": {
"platform": {"type": "string", "minLength": 1},
"platform_os": {"type": "string", "minLength": 1},
"target": {
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {"name": {"type": "string", "minLength": 1}},
},
},
},
"compiler": {
"type": "object",
"required": ["name", "version"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
},
},
"dependencies": {
"type": "object",
"patternProperties": {
"\\w[\\w-]*": {
"type": "object",
"required": ["hash"],
"additionalProperties": False,
"properties": {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
"items": {"type": "string", "minLength": 1},
},
},
}
},
},
"prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {"type": "object"},
},
},
},
}
schema = {
"$schema": "http://json-schema.org/schema#",
"title": "CPE manifest schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
"_meta": {
"type": "object",
"additionalProperties": False,
"properties": {
"file-type": {"type": "string", "minLength": 1},
"cpe-version": {"type": "string", "minLength": 1},
"system-type": {"type": "string", "minLength": 1},
"schema-version": {"type": "string", "minLength": 1},
# Older schemas use did not have "cpe-version", just the
# schema version; in that case it was just called "version"
"version": {"type": "string", "minLength": 1},
},
},
"compilers": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"prefix": {"type": "string", "minLength": 1},
"executables": {
"type": "object",
"additionalProperties": False,
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
"fc": {"type": "string", "minLength": 1},
},
},
"arch": {
"type": "object",
"required": ["os", "target"],
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
"target": {"type": "string", "minLength": 1},
},
},
},
},
},
"specs": {
"type": "array",
"items": {
"type": "object",
"required": ["name", "version", "arch", "compiler", "prefix", "hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
"arch": {
"type": "object",
"required": ["platform", "platform_os", "target"],
"additioanlProperties": False,
"properties": {
"platform": {"type": "string", "minLength": 1},
"platform_os": {"type": "string", "minLength": 1},
"target": {
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {"name": {"type": "string", "minLength": 1}},
},
},
},
"compiler": {
"type": "object",
"required": ["name", "version"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
"version": {"type": "string", "minLength": 1},
},
},
"dependencies": {
"type": "object",
"patternProperties": {
"\\w[\\w-]*": {
"type": "object",
"required": ["hash"],
"additionalProperties": False,
"properties": {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
"items": {"type": "string", "minLength": 1},
},
},
}
},
},
"prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {"type": "object"},
},
},
},
},
}

View File

@@ -6,41 +6,12 @@
"""Schema for database index.json file
.. literalinclude:: _spack_root/lib/spack/spack/schema/database_index.py
:lines: 17-
:lines: 36-
"""
from typing import Any, Dict
import spack.schema.spec
# spack.schema.spec.properties
properties: Dict[str, Any] = {
"database": {
"type": "object",
"required": ["installs", "version"],
"additionalProperties": False,
"properties": {
"installs": {
"type": "object",
"patternProperties": {
r"^[\w\d]{32}$": {
"type": "object",
"properties": {
"spec": spack.schema.spec.properties,
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
"installed": {"type": "boolean"},
"ref_count": {"type": "integer", "minimum": 0},
"explicit": {"type": "boolean"},
"installation_time": {"type": "number"},
},
}
},
},
"version": {"type": "string"},
},
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
@@ -48,5 +19,30 @@
"type": "object",
"required": ["database"],
"additionalProperties": False,
"properties": properties,
"properties": {
"database": {
"type": "object",
"required": ["installs", "version"],
"additionalProperties": False,
"properties": {
"installs": {
"type": "object",
"patternProperties": {
r"^[\w\d]{32}$": {
"type": "object",
"properties": {
"spec": spack.schema.spec.properties,
"path": {"oneOf": [{"type": "string"}, {"type": "null"}]},
"installed": {"type": "boolean"},
"ref_count": {"type": "integer", "minimum": 0},
"explicit": {"type": "boolean"},
"installation_time": {"type": "number"},
},
}
},
},
"version": {"type": "string"},
},
}
},
}

View File

@@ -6,14 +6,13 @@
"""Schema for definitions
.. literalinclude:: _spack_root/lib/spack/spack/schema/definitions.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"definitions": {
"type": "array",
"default": [],

View File

@@ -2,9 +2,9 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Any, Dict
properties: Dict[str, Any] = {
properties = {
"develop": {
"type": "object",
"default": {},

View File

@@ -6,10 +6,8 @@
"""Schema for env.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/env.py
:lines: 19-
:lines: 36-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.gitlab_ci # DEPRECATED
@@ -21,31 +19,61 @@
projections_scheme = spack.schema.projections.properties["projections"]
properties: Dict[str, Any] = {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
spack.schema.gitlab_ci.properties,
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spack.schema.spec_list_schema,
},
),
}
}
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
"properties": {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
spack.schema.gitlab_ci.properties,
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spack.schema.spec_list_schema,
"view": {
"anyOf": [
{"type": "boolean"},
{"type": "string"},
{
"type": "object",
"patternProperties": {
r"\w+": {
"required": ["root"],
"additionalProperties": False,
"properties": {
"root": {"type": "string"},
"link": {
"type": "string",
"pattern": "(roots|all|run)",
},
"link_type": {"type": "string"},
"select": {
"type": "array",
"items": {"type": "string"},
},
"exclude": {
"type": "array",
"items": {"type": "string"},
},
"projections": projections_scheme,
},
}
},
},
]
},
},
),
}
},
}

View File

@@ -6,7 +6,6 @@
schemas.
"""
import collections.abc
from typing import Any, Dict
array_of_strings_or_num = {
"type": "array",
@@ -19,7 +18,7 @@
"patternProperties": {r"\w[\w-]*": {"anyOf": [{"type": "string"}, {"type": "number"}]}},
}
definition: Dict[str, Any] = {
definition = {
"type": "object",
"default": {},
"additionalProperties": False,

View File

@@ -6,9 +6,8 @@
"""Schema for gitlab-ci.yaml configuration file.
.. literalinclude:: ../spack/schema/gitlab_ci.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
@@ -36,7 +35,7 @@
runner_selector_schema = {
"type": "object",
"additionalProperties": True,
"additionalProperties": False,
"required": ["tags"],
"properties": runner_attributes_schema_items,
}
@@ -113,7 +112,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {"gitlab-ci": gitlab_ci_properties}
properties = {"gitlab-ci": gitlab_ci_properties}
#: Full schema with metadata
schema = {

View File

@@ -6,10 +6,8 @@
"""Schema for configuration merged into one file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/merged.py
:lines: 32-
:lines: 39-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema.bootstrap
@@ -26,10 +24,9 @@
import spack.schema.packages
import spack.schema.repos
import spack.schema.upstreams
import spack.schema.view
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = union_dicts(
properties = union_dicts(
spack.schema.bootstrap.properties,
spack.schema.cdash.properties,
spack.schema.compilers.properties,
@@ -44,7 +41,6 @@
spack.schema.packages.properties,
spack.schema.repos.properties,
spack.schema.upstreams.properties,
spack.schema.view.properties,
)

View File

@@ -6,9 +6,8 @@
"""Schema for mirrors.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/mirrors.py
:lines: 13-
:lines: 12-69
"""
from typing import Any, Dict
#: Common properties for connection specification
connection = {
@@ -51,7 +50,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"mirrors": {
"type": "object",
"default": {},

View File

@@ -6,10 +6,8 @@
"""Schema for modules.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/modules.py
:lines: 16-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
import spack.schema.projections
@@ -143,7 +141,7 @@
# Properties for inclusion into other schemas (requires definitions)
properties: Dict[str, Any] = {
properties = {
"modules": {
"type": "object",
"additionalProperties": False,

View File

@@ -5,10 +5,8 @@
"""Schema for packages.yaml configuration files.
.. literalinclude:: _spack_root/lib/spack/spack/schema/packages.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
import spack.schema.environment
permissions = {
@@ -56,24 +54,6 @@
]
}
prefer_and_conflict = {
"type": "array",
"items": {
"oneOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {
"spec": {"type": "string"},
"message": {"type": "string"},
"when": {"type": "string"},
},
},
{"type": "string"},
]
},
}
permissions = {
"type": "object",
"additionalProperties": False,
@@ -93,7 +73,7 @@
REQUIREMENT_URL = "https://spack.readthedocs.io/en/latest/packages_yaml.html#package-requirements"
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"packages": {
"type": "object",
"default": {},
@@ -105,8 +85,6 @@
"additionalProperties": False,
"properties": {
"require": requirements,
"prefer": prefer_and_conflict,
"conflict": prefer_and_conflict,
"version": {}, # Here only to warn users on ignored properties
"target": {
"type": "array",
@@ -155,8 +133,6 @@
"additionalProperties": False,
"properties": {
"require": requirements,
"prefer": prefer_and_conflict,
"conflict": prefer_and_conflict,
"version": {
"type": "array",
"default": [],
@@ -210,6 +186,7 @@
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",

View File

@@ -6,12 +6,12 @@
"""Schema for projections.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/projections.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"projections": {"type": "object", "patternProperties": {r"all|\w[\w-]*": {"type": "string"}}}
}

View File

@@ -6,14 +6,12 @@
"""Schema for repos.yaml configuration file.
.. literalinclude:: _spack_root/lib/spack/spack/schema/repos.py
:lines: 14-
:lines: 13-
"""
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"repos": {"type": "array", "default": [], "items": {"type": "string"}}
}
properties = {"repos": {"type": "array", "default": [], "items": {"type": "string"}}}
#: Full schema with metadata

View File

@@ -1,46 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for spack environment
.. literalinclude:: _spack_root/lib/spack/spack/schema/spack.py
:lines: 20-
"""
from typing import Any, Dict
from llnl.util.lang import union_dicts
import spack.schema
import spack.schema.gitlab_ci as ci_schema # DEPRECATED
import spack.schema.merged as merged_schema
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"spack": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": union_dicts(
# Include deprecated "gitlab-ci" section
ci_schema.properties,
# merged configuration scope schemas
merged_schema.properties,
# extra environment schema properties
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spack.schema.spec_list_schema,
},
),
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack environment file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
}

View File

@@ -8,9 +8,9 @@
TODO: This needs to be updated? Especially the hashes under properties.
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
:lines: 15-
:lines: 13-
"""
from typing import Any, Dict
target = {
"oneOf": [
@@ -57,7 +57,7 @@
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"spec": {
"type": "object",
"additionalProperties": False,

View File

@@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from typing import Any, Dict
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
properties = {
"upstreams": {
"type": "object",
"default": {},

View File

@@ -1,49 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for view
.. literalinclude:: _spack_root/lib/spack/spack/schema/view.py
:lines: 15-
"""
from typing import Any, Dict
import spack.schema
projections_scheme = spack.schema.projections.properties["projections"]
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"view": {
"anyOf": [
{"type": "boolean"},
{"type": "string"},
{
"type": "object",
"patternProperties": {
r"\w+": {
"required": ["root"],
"additionalProperties": False,
"properties": {
"root": {"type": "string"},
"link": {"type": "string", "pattern": "(roots|all|run)"},
"link_type": {"type": "string"},
"select": {"type": "array", "items": {"type": "string"}},
"exclude": {"type": "array", "items": {"type": "string"}},
"projections": projections_scheme,
},
}
},
},
]
}
}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack view configuration file schema",
"properties": properties,
}

File diff suppressed because it is too large Load Diff

View File

@@ -45,9 +45,6 @@
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "link"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("link dependency out of the root unification set").
:- attr("depends_on", node(min_dupe_id, Package), node(ID, _), "run"), ID != min_dupe_id, unification_set("root", node(min_dupe_id, Package)), internal_error("run dependency out of the root unification set").
% Namespaces are statically assigned by a package fact
attr("namespace", node(ID, Package), Namespace) :- attr("node", node(ID, Package)), pkg_fact(Package, namespace(Namespace)).
% Rules on "unification sets", i.e. on sets of nodes allowing a single configuration of any given package
unify(SetID, PackageName) :- unification_set(SetID, node(_, PackageName)).
:- 2 { unification_set(SetID, node(_, PackageName)) }, unify(SetID, PackageName).
@@ -698,18 +695,6 @@ requirement_group_satisfied(node(ID, Package), X) :-
activate_requirement(node(ID, Package), X),
requirement_group(Package, X).
% When we have a required provider, we need to ensure that the provider/2 facts respect
% the requirement. This is particularly important for packages that could provide multiple
% virtuals independently
required_provider(Provider, Virtual)
:- requirement_group_member(ConditionID, Virtual, RequirementID),
condition_holds(ConditionID, _),
virtual(Virtual),
pkg_fact(Virtual, condition_effect(ConditionID, EffectID)),
imposed_constraint(EffectID, "node", Provider).
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
% TODO: the following two choice rules allow the solver to add compiler
% flags if their only source is from a requirement. This is overly-specific
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180

View File

@@ -1,272 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Low-level wrappers around clingo API."""
import importlib
import pathlib
from types import ModuleType
from typing import Any, Callable, NamedTuple, Optional, Tuple, Union
from llnl.util import lang
def _ast_getter(*names: str) -> Callable[[Any], Any]:
"""Helper to retrieve AST attributes from different versions of the clingo API"""
def getter(node):
for name in names:
result = getattr(node, name, None)
if result:
return result
raise KeyError(f"node has no such keys: {names}")
return getter
ast_type = _ast_getter("ast_type", "type")
ast_sym = _ast_getter("symbol", "term")
class AspObject:
"""Object representing a piece of ASP code."""
def _id(thing: Any) -> Union[str, AspObject]:
"""Quote string if needed for it to be a valid identifier."""
if isinstance(thing, AspObject):
return thing
elif isinstance(thing, bool):
return f'"{str(thing)}"'
elif isinstance(thing, int):
return str(thing)
else:
return f'"{str(thing)}"'
@lang.key_ordering
class AspFunction(AspObject):
"""A term in the ASP logic program"""
__slots__ = ["name", "args"]
def __init__(self, name: str, args: Optional[Tuple[Any, ...]] = None) -> None:
self.name = name
self.args = () if args is None else tuple(args)
def _cmp_key(self) -> Tuple[str, Optional[Tuple[Any, ...]]]:
return self.name, self.args
def __call__(self, *args: Any) -> "AspFunction":
"""Return a new instance of this function with added arguments.
Note that calls are additive, so you can do things like::
>>> attr = AspFunction("attr")
attr()
>>> attr("version")
attr("version")
>>> attr("version")("foo")
attr("version", "foo")
>>> v = AspFunction("attr", "version")
attr("version")
>>> v("foo", "bar")
attr("version", "foo", "bar")
"""
return AspFunction(self.name, self.args + args)
def _argify(self, arg: Any) -> Any:
"""Turn the argument into an appropriate clingo symbol"""
if isinstance(arg, bool):
return clingo().String(str(arg))
elif isinstance(arg, int):
return clingo().Number(arg)
elif isinstance(arg, AspFunction):
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
return clingo().String(str(arg))
def symbol(self):
"""Return a clingo symbol for this function"""
return clingo().Function(
self.name, [self._argify(arg) for arg in self.args], positive=True
)
def __str__(self) -> str:
return f"{self.name}({', '.join(str(_id(arg)) for arg in self.args)})"
def __repr__(self) -> str:
return str(self)
class _AspFunctionBuilder:
def __getattr__(self, name):
return AspFunction(name)
#: Global AspFunction builder
fn = _AspFunctionBuilder()
_CLINGO_MODULE: Optional[ModuleType] = None
def clingo() -> ModuleType:
"""Lazy imports the Python module for clingo, and returns it."""
if _CLINGO_MODULE is not None:
return _CLINGO_MODULE
try:
clingo_mod = importlib.import_module("clingo")
# Make sure we didn't import an empty module
_ensure_clingo_or_raise(clingo_mod)
except ImportError:
clingo_mod = None
if clingo_mod is not None:
return _set_clingo_module_cache(clingo_mod)
clingo_mod = _bootstrap_clingo()
return _set_clingo_module_cache(clingo_mod)
def _set_clingo_module_cache(clingo_mod: ModuleType) -> ModuleType:
"""Sets the global cache to the lazy imported clingo module"""
global _CLINGO_MODULE
importlib.import_module("clingo.ast")
_CLINGO_MODULE = clingo_mod
return clingo_mod
def _ensure_clingo_or_raise(clingo_mod: ModuleType) -> None:
"""Ensures the clingo module can access expected attributes, otherwise raises an error."""
# These are imports that may be problematic at top level (circular imports). They are used
# only to provide exhaustive details when erroring due to a broken clingo module.
import spack.config
import spack.paths as sp
import spack.util.path as sup
try:
clingo_mod.Symbol
except AttributeError:
assert clingo_mod.__file__ is not None, "clingo installation is incomplete or invalid"
# Reaching this point indicates a broken clingo installation
# If Spack derived clingo, suggest user re-run bootstrap
# if non-spack, suggest user investigate installation
# assume Spack is not responsible for broken clingo
msg = (
f"Clingo installation at {clingo_mod.__file__} is incomplete or invalid."
"Please repair installation or re-install. "
"Alternatively, consider installing clingo via Spack."
)
# check whether Spack is responsible
if (
pathlib.Path(
sup.canonicalize_path(
spack.config.CONFIG.get("bootstrap:root", sp.default_user_bootstrap_path)
)
)
in pathlib.Path(clingo_mod.__file__).parents
):
# Spack is responsible for the broken clingo
msg = (
"Spack bootstrapped copy of Clingo is broken, "
"please re-run the bootstrapping process via command `spack bootstrap now`."
" If this issue persists, please file a bug at: github.com/spack/spack"
)
raise RuntimeError(
"Clingo installation may be broken or incomplete, "
"please verify clingo has been installed correctly"
"\n\nClingo does not provide symbol clingo.Symbol"
f"{msg}"
)
def clingo_cffi() -> bool:
"""Returns True if clingo uses the CFFI interface"""
return hasattr(clingo().Symbol, "_rep")
def _bootstrap_clingo() -> ModuleType:
"""Bootstraps the clingo module and returns it"""
import spack.bootstrap
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
clingo_mod = importlib.import_module("clingo")
return clingo_mod
def parse_files(*args, **kwargs):
"""Wrapper around clingo parse_files, that dispatches the function according
to clingo API version.
"""
clingo()
try:
return importlib.import_module("clingo.ast").parse_files(*args, **kwargs)
except (ImportError, AttributeError):
return clingo().parse_files(*args, **kwargs)
def parse_term(*args, **kwargs):
"""Wrapper around clingo parse_term, that dispatches the function according
to clingo API version.
"""
clingo()
try:
return importlib.import_module("clingo.symbol").parse_term(*args, **kwargs)
except (ImportError, AttributeError):
return clingo().parse_term(*args, **kwargs)
class NodeArgument(NamedTuple):
"""Represents a node in the DAG"""
id: str
pkg: str
def intermediate_repr(sym):
"""Returns an intermediate representation of clingo models for Spack's spec builder.
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
Returns:
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
``clingo.Symbol`` objects into a tuple of those objects.
"""
# TODO: simplify this when we no longer have to support older clingo versions.
if isinstance(sym, (list, tuple)):
return tuple(intermediate_repr(a) for a in sym)
try:
if sym.name == "node":
return NodeArgument(
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
)
except RuntimeError:
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
# that are not functions
pass
if clingo_cffi():
# Clingo w/ CFFI will throw an exception on failure
try:
return sym.string
except RuntimeError:
return str(sym)
else:
return sym.string or str(sym)
def extract_args(model, predicate_name):
"""Extract the arguments to predicates with the provided name from a model.
Pull out all the predicates with name ``predicate_name`` from the model, and
return their intermediate representation.
"""
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]

View File

@@ -1501,9 +1501,7 @@ def edge_attributes(self) -> str:
result = f"{deptypes_str} {virtuals_str}".strip()
return f"[{result}]"
def dependencies(
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
) -> List["Spec"]:
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependencies (nodes in the DAG).
Args:
@@ -1514,9 +1512,7 @@ def dependencies(
deptype = dt.canonicalize(deptype)
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
def dependents(
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
) -> List["Spec"]:
def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
"""Return a list of direct dependents (nodes in the DAG).
Args:
@@ -1640,23 +1636,23 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
return
# Keep the intersection of constraints when a dependency is added multiple times.
# The only restriction, currently, is keeping the same dependency type
# Keep the intersection of constraints when a dependency is added
# multiple times. Currently, we only allow identical edge types.
orig = self._dependencies[spec.name]
try:
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
except StopIteration:
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
current_deps = ", ".join(
dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig
)
raise DuplicateDependencyError(
f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n"
f"\tRequired: {dt.flag_to_chars(depflag)}\n"
f"\tDependency: {current_deps}"
)
try:
dspec.spec.constrain(spec)
dspec.update_virtuals(virtuals=virtuals)
except spack.error.UnsatisfiableSpecError:
raise DuplicateDependencyError(
f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'"

View File

@@ -107,20 +107,6 @@ def remove(self, spec):
self._constraints = None
self._specs = None
def replace(self, idx: int, spec: str):
"""Replace the existing spec at the index with the new one.
Args:
idx: index of the spec to replace in the speclist
spec: new spec
"""
self.yaml_list[idx] = spec
# invalidate cache variables when we change the list
self._expanded_list = None
self._constraints = None
self._specs = None
def extend(self, other, copy_reference=True):
self.yaml_list.extend(other.yaml_list)
self._expanded_list = None
@@ -162,7 +148,6 @@ def _expand_references(self, yaml):
if isinstance(item, str) and item.startswith("$"):
# replace the reference and apply the sigil if needed
name, sigil = self._parse_reference(item)
referent = [
_sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
]

View File

@@ -199,11 +199,9 @@ def get_stage_root():
def _mirror_roots():
mirrors = spack.config.get("mirrors")
return [
(
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
)
sup.substitute_path_variables(root)
if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
for root in mirrors.values()
]
@@ -1068,14 +1066,14 @@ def interactive_version_filter(
def get_checksums_for_versions(
url_by_version: Dict[StandardVersion, str],
url_by_version: Dict[str, str],
package_name: str,
*,
first_stage_function: Optional[Callable[[Stage, str], None]] = None,
keep_stage: bool = False,
concurrency: Optional[int] = None,
fetch_options: Optional[Dict[str, str]] = None,
) -> Dict[StandardVersion, str]:
) -> Dict[str, str]:
"""Computes the checksums for each version passed in input, and returns the results.
Archives are fetched according to the usl dictionary passed as input.

View File

@@ -457,10 +457,10 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
# b (parallel =True)
s = default_mock_concretization("a foobar=bar")
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
spack.build_environment.set_package_py_globals(s.package)
assert s["a"].package.module.make_jobs == 1
spack.build_environment.set_package_py_globals(s["b"].package, context=Context.BUILD)
spack.build_environment.set_package_py_globals(s["b"].package)
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
parallel=s["b"].package.parallel
)
@@ -685,33 +685,3 @@ def test_clear_compiler_related_runtime_variables_of_build_deps(default_mock_con
assert "FC" not in result
assert "F77" not in result
assert result["ANOTHER_VAR"] == "this-should-be-present"
@pytest.mark.parametrize("context", [Context.BUILD, Context.RUN])
def test_build_system_globals_only_set_on_root_during_build(default_mock_concretization, context):
"""Test whether when setting up a build environment, the build related globals are set only
in the top level spec.
TODO: Since module instances are globals themselves, and Spack defines properties on them, they
persist across tests. In principle this is not terrible, cause the variables are mostly static.
But obviously it can lead to very hard to find bugs... We should get rid of those globals and
define them instead as a property on the package instance.
"""
root = spack.spec.Spec("mpileaks").concretized()
build_variables = ("std_cmake_args", "std_meson_args", "std_pip_args")
# See todo above, we clear out any properties that may have been set by the previous test.
# Commenting this loop will make the test fail. I'm leaving it here as a reminder that those
# globals were always a bad idea, and we should pass them to the package instance.
for spec in root.traverse():
for variable in build_variables:
spec.package.module.__dict__.pop(variable, None)
spack.build_environment.SetupContext(root, context=context).set_all_package_py_globals()
# Excpect the globals to be set at the root in a build context only.
should_be_set = lambda depth: context == Context.BUILD and depth == 0
for depth, spec in root.traverse(depth=True, root=True):
for variable in build_variables:
assert hasattr(spec.package.module, variable) == should_be_set(depth)

View File

@@ -8,63 +8,16 @@
import pytest
import spack.cmd.checksum
import spack.package_base
import spack.parser
import spack.repo
import spack.spec
import spack.stage
import spack.util.web
from spack.main import SpackCommand
from spack.package_base import ManualDownloadRequiredError
from spack.stage import interactive_version_filter
from spack.version import Version
spack_checksum = SpackCommand("checksum")
@pytest.fixture
def can_fetch_versions(monkeypatch):
"""Fake successful version detection."""
def fetch_remote_versions(pkg, concurrency):
return {Version("1.0"): "https://www.example.com/pkg-1.0.tar.gz"}
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
return {
v: "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
for v in url_by_version
}
def url_exists(url, curl=None):
return True
monkeypatch.setattr(
spack.package_base.PackageBase, "fetch_remote_versions", fetch_remote_versions
)
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
monkeypatch.setattr(spack.util.web, "url_exists", url_exists)
@pytest.fixture
def cannot_fetch_versions(monkeypatch):
"""Fake unsuccessful version detection."""
def fetch_remote_versions(pkg, concurrency):
return {}
def get_checksums_for_versions(url_by_version, package_name, **kwargs):
return {}
def url_exists(url, curl=None):
return False
monkeypatch.setattr(
spack.package_base.PackageBase, "fetch_remote_versions", fetch_remote_versions
)
monkeypatch.setattr(spack.stage, "get_checksums_for_versions", get_checksums_for_versions)
monkeypatch.setattr(spack.util.web, "url_exists", url_exists)
@pytest.mark.parametrize(
"arguments,expected",
[
@@ -95,7 +48,7 @@ def test_checksum_args(arguments, expected):
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
],
)
def test_checksum(arguments, expected, mock_packages, can_fetch_versions):
def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stage):
output = spack_checksum(*arguments)
assert expected in output
@@ -273,7 +226,7 @@ def test_checksum_interactive_unrecognized_command():
assert interactive_version_filter(v.copy(), input=input) == v
def test_checksum_versions(mock_packages, can_fetch_versions):
def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
versions = [str(v) for v in pkg_cls.versions]
output = spack_checksum("zlib", *versions)
@@ -284,7 +237,7 @@ def test_checksum_versions(mock_packages, can_fetch_versions):
assert "Added 0 new versions to" in output
def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
def test_checksum_missing_version(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
output = spack_checksum("preferred-test", "99.99.99", fail_on_error=False)
assert "Could not find any remote versions" in output
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
@@ -292,50 +245,30 @@ def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
assert "Added 1 new versions to" not in output
def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
output = spack_checksum("deprecated-versions", "1.1.0", fail_on_error=False)
assert "Version 1.1.0 is deprecated" in output
output = spack_checksum(
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
)
assert "Version 1.1.0 is deprecated" in output
# TODO alecbcs: broken assertion.
# assert "Added 0 new versions to" not in output
assert "Added 0 new versions to" not in output
def test_checksum_url(mock_packages, config):
def test_checksum_url(mock_packages):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
with pytest.raises(spack.parser.SpecSyntaxError):
spack_checksum(f"{pkg_cls.url}")
def test_checksum_verification_fails(default_mock_concretization, capsys, can_fetch_versions):
def test_checksum_verification_fails(install_mockery, capsys):
spec = spack.spec.Spec("zlib").concretized()
pkg = spec.package
versions = list(pkg.versions.keys())
version_hashes = {versions[0]: "abadhash", Version("0.1"): "123456789"}
version_hashes = {versions[0]: "abadhash", spack.version.Version("0.1"): "123456789"}
with pytest.raises(SystemExit):
spack.cmd.checksum.print_checksum_status(pkg, version_hashes)
out = str(capsys.readouterr())
assert out.count("Correct") == 0
assert "No previous checksum" in out
assert "Invalid checksum" in out
def test_checksum_manual_download_fails(mock_packages, monkeypatch):
"""Confirm that checksumming a manually downloadable package fails."""
name = "zlib"
pkg_cls = spack.repo.PATH.get_pkg_class(name)
versions = [str(v) for v in pkg_cls.versions]
monkeypatch.setattr(spack.package_base.PackageBase, "manual_download", True)
# First check that the exception is raised with the default download
# instructions.
with pytest.raises(ManualDownloadRequiredError, match=f"required for {name}"):
spack_checksum(name, *versions)
# Now check that the exception is raised with custom download instructions.
error = "Cannot calculate the checksum for a manually downloaded package."
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
with pytest.raises(ManualDownloadRequiredError, match=error):
spack_checksum(name, *versions)

View File

@@ -16,7 +16,6 @@
import spack
import spack.binary_distribution
import spack.ci as ci
import spack.cmd.ci
import spack.config
import spack.environment as ev
import spack.hash_types as ht
@@ -2029,43 +2028,6 @@ def fake_download_and_extract_artifacts(url, work_dir):
assert expect_out in rep_out
@pytest.mark.parametrize(
"url_in,url_out",
[
(
"https://example.com/api/v4/projects/1/jobs/2/artifacts",
"https://example.com/api/v4/projects/1/jobs/2/artifacts",
),
(
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
(
"https://example.com/spack/spack/-/jobs/123456",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
(
"https://example.com/spack/spack/-/jobs/////123456////?x=y#z",
"https://example.com/spack/spack/-/jobs/123456/artifacts/download",
),
],
)
def test_reproduce_build_url_validation(url_in, url_out):
assert spack.cmd.ci._gitlab_artifacts_url(url_in) == url_out
def test_reproduce_build_url_validation_fails():
"""Wrong URLs should cause an exception"""
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "example.com/spack/spack/-/jobs/123456/artifacts/download")
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "https://example.com/spack/spack/-/issues")
with pytest.raises(SystemExit):
ci_cmd("reproduce-build", "https://example.com/spack/spack/-")
@pytest.mark.parametrize(
"subcmd", [(""), ("generate"), ("rebuild-index"), ("rebuild"), ("reproduce-build")]
)

View File

@@ -27,11 +27,7 @@
[r"TestNamedPackage(Package)", r"def install(self"],
),
(["file://example.tar.gz"], "example", [r"Example(Package)", r"def install(self"]),
(
["-n", "test-license"],
"test-license",
[r'license("UNKNOWN", checked_by="github_user1")'],
),
(["-n", "test-license"], "test-license", [r'license("UNKNOWN")']),
# Template-specific cases
(
["-t", "autoreconf", "/test-autoreconf"],

View File

@@ -215,44 +215,6 @@ def test_dev_build_env(tmpdir, install_mockery, mutable_mock_env_path):
assert f.read() == spec.package.replacement_string
def test_dev_build_env_with_vars(tmpdir, install_mockery, mutable_mock_env_path, monkeypatch):
"""Test Spack does dev builds for packages in develop section of env (path with variables)."""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir("build")
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={build_dir}")
spec.concretize()
# store the build path in an environment variable that will be used in the environment
monkeypatch.setenv("CUSTOM_BUILD_PATH", build_dir)
with build_dir.as_cwd(), open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
with open("spack.yaml", "w") as f:
f.write(
"""\
spack:
specs:
- dev-build-test-install@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: $CUSTOM_BUILD_PATH
"""
)
env("create", "test", "./spack.yaml")
with ev.read("test"):
install()
assert spec.package.filename in os.listdir(spec.prefix)
with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
assert f.read() == spec.package.replacement_string
def test_dev_build_env_version_mismatch(tmpdir, install_mockery, mutable_mock_env_path):
"""Test Spack constraints concretization by develop specs."""
# setup dev-build-test-install package for dev build

View File

@@ -48,7 +48,6 @@
install = SpackCommand("install")
add = SpackCommand("add")
change = SpackCommand("change")
config = SpackCommand("config")
remove = SpackCommand("remove")
concretize = SpackCommand("concretize")
stage = SpackCommand("stage")
@@ -303,6 +302,20 @@ def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch,
assert env_variables["DEPENDENCY_ENV_VAR"] == "1"
def test_env_install_same_spec_twice(install_mockery, mock_fetch):
env("create", "test")
e = ev.read("test")
with e:
# The first installation outputs the package prefix, updates the view
out = install("--add", "cmake-client")
assert "Updating view at" in out
# The second installation reports all packages already installed
out = install("cmake-client")
assert "already installed" in out
def test_env_definition_symlink(install_mockery, mock_fetch, tmpdir):
filepath = str(tmpdir.join("spack.yaml"))
filepath_mid = str(tmpdir.join("spack_mid.yaml"))
@@ -856,114 +869,6 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
"""Test ``config change`` with config in the ``spack.yaml`` as well as an
included file scope.
"""
included_file = "included-packages.yaml"
included_path = tmp_path / included_file
with open(included_path, "w") as f:
f.write(
"""\
packages:
mpich:
require:
- spec: "@3.0.2"
libelf:
require: "@0.8.10"
bowtie:
require:
- one_of: ["@1.3.0", "@1.2.0"]
"""
)
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
f"""\
spack:
packages:
mpich:
require:
- spec: "+debug"
include:
- {os.path.join(".", included_file)}
specs: []
"""
)
e = ev.Environment(tmp_path)
with e:
# List of requirements, flip a variant
config("change", "packages:mpich:require:~debug")
test_spec = spack.spec.Spec("mpich").concretized()
assert test_spec.satisfies("@3.0.2~debug")
# List of requirements, change the version (in a different scope)
config("change", "packages:mpich:require:@3.0.3")
test_spec = spack.spec.Spec("mpich").concretized()
assert test_spec.satisfies("@3.0.3")
# "require:" as a single string, also try specifying
# a spec string that requires enclosing in quotes as
# part of the config path
config("change", 'packages:libelf:require:"@0.8.12:"')
spack.spec.Spec("libelf@0.8.12").concretized()
# No need for assert, if there wasn't a failure, we
# changed the requirement successfully.
# Use change to add a requirement for a package that
# has no requirements defined
config("change", "packages:fftw:require:+mpi")
test_spec = spack.spec.Spec("fftw").concretized()
assert test_spec.satisfies("+mpi")
config("change", "packages:fftw:require:~mpi")
test_spec = spack.spec.Spec("fftw").concretized()
assert test_spec.satisfies("~mpi")
config("change", "packages:fftw:require:@1.0")
test_spec = spack.spec.Spec("fftw").concretized()
assert test_spec.satisfies("@1.0~mpi")
# Use "--match-spec" to change one spec in a "one_of"
# list
config("change", "packages:bowtie:require:@1.2.2", "--match-spec", "@1.2.0")
spack.spec.Spec("bowtie@1.3.0").concretize()
spack.spec.Spec("bowtie@1.2.2").concretized()
def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
"""\
spack:
specs: []
"""
)
e = ev.Environment(tmp_path)
with e:
config("change", "packages:mpich:require:~debug")
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
spack.spec.Spec("mpich+debug").concretized()
spack.spec.Spec("mpich~debug").concretized()
# Now check that we raise an error if we need to add a require: constraint
# when preexisting config manually specified it as a singular spec
spack_yaml.write_text(
"""\
spack:
specs: []
packages:
mpich:
require: "@3.0.3"
"""
)
with e:
assert spack.spec.Spec("mpich").concretized().satisfies("@3.0.3")
with pytest.raises(spack.config.ConfigError, match="not a list"):
config("change", "packages:mpich:require:~debug")
def test_env_with_included_config_file_url(tmpdir, mutable_empty_config, packages_file):
"""Test configuration inclusion of a file whose path is a URL before
the environment is concretized."""
@@ -974,6 +879,8 @@ def test_env_with_included_config_file_url(tmpdir, mutable_empty_config, package
env = ev.Environment(tmpdir.strpath)
ev.activate(env)
scopes = env.included_config_scopes()
assert len(scopes) == 1
cfg = spack.config.get("packages")
assert cfg["mpileaks"]["version"] == ["2.2"]
@@ -1471,8 +1378,8 @@ def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch,
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
add("view-file")
add("view-dir")
add("view-dir-file")
add("view-dir-dir")
with pytest.raises(
llnl.util.link_tree.MergeConflictSummary, match=os.path.join("bin", "x")
):
@@ -1486,8 +1393,8 @@ def test_env_view_succeeds_symlinked_dir_file(
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
add("view-symlinked-dir")
add("view-dir")
add("view-dir-symlinked-dir")
add("view-dir-dir")
install()
x_dir = os.path.join(str(view_dir), "bin", "x")
assert os.path.exists(os.path.join(x_dir, "file_in_dir"))
@@ -2537,88 +2444,58 @@ def test_stack_view_no_activate_without_default(
assert viewdir not in shell
@pytest.mark.parametrize("include_views", [True, False, "split"])
def test_stack_view_multiple_views(
tmp_path,
mock_fetch,
mock_packages,
mock_archive,
install_mockery,
mutable_config,
include_views,
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
"""Test multiple views as both included views (True), as both environment
views (False), or as one included and the other in the environment."""
# Write the view configuration and or manifest file
view_filename = tmp_path / "view.yaml"
base_content = """\
filename = str(tmpdir.join("spack.yaml"))
default_viewdir = str(tmpdir.join("default-view"))
combin_viewdir = str(tmpdir.join("combinatorial-view"))
with open(filename, "w") as f:
f.write(
"""\
spack:
definitions:
- packages: [mpileaks, cmake]
- compilers: ['%gcc', '%clang']
- compilers: ['%%gcc', '%%clang']
specs:
- matrix:
- [$packages]
- [$compilers]
"""
include_content = f" include:\n - {view_filename}\n"
view_line = " view:\n"
view:
default:
root: %s
select: ['%%gcc']
combinatorial:
root: %s
exclude: [callpath %%gcc]
projections:
'all': '{name}/{version}-{compiler.name}'"""
% (default_viewdir, combin_viewdir)
)
with tmpdir.as_cwd():
env("create", "test", "./spack.yaml")
with ev.read("test"):
install()
comb_dir = tmp_path / "combinatorial-view"
comb_view = """\
{0}combinatorial:
{0} root: {1}
{0} exclude: [callpath%gcc]
{0} projections:
"""
shell = env("activate", "--sh", "test")
assert "PATH" in shell
assert os.path.join(default_viewdir, "bin") in shell
projection = " 'all': '{name}/{version}-{compiler.name}'"
default_dir = tmp_path / "default-view"
default_view = """\
{0}default:
{0} root: {1}
{0} select: ['%gcc']
"""
content = "spack:\n"
indent = " "
if include_views is True:
# Include both the gcc and combinatorial views
view = "view:\n" + default_view.format(indent, str(default_dir))
view += comb_view.format(indent, str(comb_dir)) + indent + projection
view_filename.write_text(view)
content += include_content + base_content
elif include_views == "split":
# Include the gcc view and inline the combinatorial view
view = "view:\n" + default_view.format(indent, str(default_dir))
view_filename.write_text(view)
content += include_content + base_content + view_line
indent += " "
content += comb_view.format(indent, str(comb_dir)) + indent + projection
else:
# Inline both the gcc and combinatorial views in the environment.
indent += " "
content += base_content + view_line
content += default_view.format(indent, str(default_dir))
content += comb_view.format(indent, str(comb_dir)) + indent + projection
filename = tmp_path / ev.manifest_name
filename.write_text(content)
env("create", "test", str(filename))
with ev.read("test"):
install()
with ev.read("test") as e:
assert os.path.exists(str(default_dir / "bin"))
for spec in e._get_environment_specs():
spec_subdir = f"{spec.version}-{spec.compiler.name}"
comb_spec_dir = str(comb_dir / spec.name / spec_subdir)
test = ev.read("test")
for spec in test._get_environment_specs():
if not spec.satisfies("callpath%gcc"):
assert os.path.exists(comb_spec_dir)
assert os.path.exists(
os.path.join(
combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
)
)
else:
assert not os.path.exists(comb_spec_dir)
assert not os.path.exists(
os.path.join(
combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
)
)
def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery):
@@ -3000,51 +2877,51 @@ def test_modules_relative_to_views(environment_from_manifest, install_mockery, m
assert spec.prefix not in contents
def test_modules_exist_after_env_install(
environment_from_manifest, install_mockery, mock_fetch, monkeypatch
):
# Some caching issue
monkeypatch.setattr(spack.modules.tcl, "configuration_registry", {})
def test_multiple_modules_post_env_hook(environment_from_manifest, install_mockery, mock_fetch):
environment_from_manifest(
"""
spack:
specs:
- mpileaks
- trivial-install-test-package
modules:
default:
enable:: [tcl]
use_view: true
roots:
tcl: uses_view
tcl: modules
full:
enable:: [tcl]
roots:
tcl: without_view
tcl: full_modules
"""
)
with ev.read("test") as e:
install()
specs = e.all_specs()
for module_set in ("uses_view", "without_view"):
modules = glob.glob(f"{e.path}/{module_set}/**/*/*")
assert len(modules) == len(specs), "Not all modules were generated"
for spec in specs:
module = next((m for m in modules if os.path.dirname(m).endswith(spec.name)), None)
assert module, f"Module for {spec} not found"
spec = e.specs_by_hash[e.concretized_order[0]]
view_prefix = e.default_view.get_projection_for_spec(spec)
modules_glob = "%s/modules/**/*/*" % e.path
modules = glob.glob(modules_glob)
assert len(modules) == 1
module = modules[0]
# Now verify that modules have paths pointing into the view instead of the package
# prefix if and only if they set use_view to true.
with open(module, "r") as f:
contents = f.read()
full_modules_glob = "%s/full_modules/**/*/*" % e.path
full_modules = glob.glob(full_modules_glob)
assert len(full_modules) == 1
full_module = full_modules[0]
if module_set == "uses_view":
assert e.default_view.get_projection_for_spec(spec) in contents
assert spec.prefix not in contents
else:
assert e.default_view.get_projection_for_spec(spec) not in contents
assert spec.prefix in contents
with open(module, "r") as f:
contents = f.read()
with open(full_module, "r") as f:
full_contents = f.read()
assert view_prefix in contents
assert spec.prefix not in contents
assert view_prefix not in full_contents
assert spec.prefix in full_contents
@pytest.mark.regression("24148")
@@ -3079,9 +2956,7 @@ def test_query_develop_specs(tmpdir):
)
def test_activation_and_deactiviation_ambiguities(method, env, no_env, env_dir, capsys):
"""spack [-e x | -E | -D x/] env [activate | deactivate] y are ambiguous"""
args = Namespace(
shell="sh", env_name="a", env=env, no_env=no_env, env_dir=env_dir, keep_relative=False
)
args = Namespace(shell="sh", activate_env="a", env=env, no_env=no_env, env_dir=env_dir)
with pytest.raises(SystemExit):
method(args)
_, err = capsys.readouterr()
@@ -3122,34 +2997,6 @@ def test_activate_temp(monkeypatch, tmpdir):
assert ev.is_env_dir(str(tmpdir))
@pytest.mark.parametrize(
"conflict_arg", [["--dir"], ["--keep-relative"], ["--with-view", "foo"], ["env"]]
)
def test_activate_parser_conflicts_with_temp(conflict_arg):
with pytest.raises(SpackCommandError):
env("activate", "--sh", "--temp", *conflict_arg)
def test_create_and_activate_managed(tmp_path):
with fs.working_dir(str(tmp_path)):
shell = env("activate", "--without-view", "--create", "--sh", "foo")
active_env_var = next(line for line in shell.splitlines() if ev.spack_env_var in line)
assert str(tmp_path) in active_env_var
active_ev = ev.active_environment()
assert "foo" == active_ev.name
env("deactivate")
def test_create_and_activate_unmanaged(tmp_path):
with fs.working_dir(str(tmp_path)):
env_dir = os.path.join(str(tmp_path), "foo")
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)
active_env_var = next(line for line in shell.splitlines() if ev.spack_env_var in line)
assert str(env_dir) in active_env_var
assert ev.is_env_dir(env_dir)
env("deactivate")
def test_activate_default(monkeypatch):
"""Tests whether `spack env activate` creates / activates the default
environment"""
@@ -3698,6 +3545,8 @@ def test_env_include_packages_url(
with spack.config.override("config:url_fetch_method", "curl"):
env = ev.Environment(tmpdir.strpath)
ev.activate(env)
scopes = env.included_config_scopes()
assert len(scopes) == 1
cfg = spack.config.get("packages")
assert "openmpi" in cfg["all"]["providers"]["mpi"]
@@ -3744,191 +3593,3 @@ def test_environment_created_from_lockfile_has_view(mock_packages, temporary_sto
# Make sure the view was created
with ev.Environment(env_b) as e:
assert os.path.isdir(e.view_path_default)
def test_env_view_disabled(tmp_path, mutable_mock_env_path):
"""Ensure an inlined view being disabled means not even the default view
is created (since the case doesn't appear to be covered in this module)."""
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
"""\
spack:
specs:
- mpileaks
view: false
"""
)
env("create", "disabled", str(spack_yaml))
with ev.read("disabled") as e:
e.concretize()
assert len(e.views) == 0
assert not os.path.exists(e.view_path_default)
@pytest.mark.parametrize("first", ["false", "true", "custom"])
def test_env_include_mixed_views(tmp_path, mutable_mock_env_path, mutable_config, first):
"""Ensure including path and boolean views in different combinations result
in the creation of only the first view if it is not disabled."""
false_yaml = tmp_path / "false-view.yaml"
false_yaml.write_text("view: false\n")
true_yaml = tmp_path / "true-view.yaml"
true_yaml.write_text("view: true\n")
custom_name = "my-test-view"
custom_view = tmp_path / custom_name
custom_yaml = tmp_path / "custom-view.yaml"
custom_yaml.write_text(
f"""
view:
{custom_name}:
root: {custom_view}
"""
)
if first == "false":
order = [false_yaml, true_yaml, custom_yaml]
elif first == "true":
order = [true_yaml, custom_yaml, false_yaml]
else:
order = [custom_yaml, false_yaml, true_yaml]
includes = [f" - {yaml}\n" for yaml in order]
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
f"""\
spack:
include:
{''.join(includes)}
specs:
- mpileaks
packages:
mpileaks:
compiler: [gcc]
"""
)
env("create", "test", str(spack_yaml))
with ev.read("test") as e:
concretize()
# Only the first included view should be created if view not disabled by it
assert len(e.views) == 0 if first == "false" else 1
if first == "true":
assert os.path.exists(e.view_path_default)
else:
assert not os.path.exists(e.view_path_default)
if first == "custom":
assert os.path.exists(custom_view)
else:
assert not os.path.exists(custom_view)
def test_stack_view_multiple_views_same_name(
tmp_path, mock_fetch, mock_packages, mock_archive, install_mockery, mutable_config
):
"""Test multiple views with the same name combine settings with precedence
given to the options in spack.yaml."""
# Write the view configuration and or manifest file
view_filename = tmp_path / "view.yaml"
default_dir = tmp_path / "default-view"
default_view = f"""\
view:
default:
root: {default_dir}
select: ['%gcc']
projections:
all: '{{name}}/{{version}}-{{compiler.name}}'
"""
view_filename.write_text(default_view)
view_dir = tmp_path / "view"
content = f"""\
spack:
include:
- {view_filename}
definitions:
- packages: [mpileaks, cmake]
- compilers: ['%gcc', '%clang']
specs:
- matrix:
- [$packages]
- [$compilers]
view:
default:
root: {view_dir}
exclude: ['cmake']
projections:
all: '{{name}}/{{compiler.name}}-{{version}}'
"""
filename = tmp_path / ev.manifest_name
filename.write_text(content)
env("create", "test", str(filename))
with ev.read("test"):
install()
with ev.read("test") as e:
# the view root in the included view should NOT exist
assert not os.path.exists(str(default_dir))
for spec in e._get_environment_specs():
# no specs will exist in the included view projection
included_spec_subdir = f"{spec.version}-{spec.compiler.name}"
included_spec_dir = str(view_dir / spec.name / included_spec_subdir)
assert not os.path.exists(included_spec_dir)
# only specs compiled with %gcc (selected in the included view) that
# are also not cmake (excluded in the environment view) should exist
env_spec_subdir = f"{spec.compiler.name}-{spec.version}"
env_spec_dir = str(view_dir / spec.name / env_spec_subdir)
if spec.satisfies("cmake") or spec.satisfies("%clang"):
assert not os.path.exists(env_spec_dir)
else:
assert os.path.exists(env_spec_dir)
def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, mock_fetch):
"""When files clash in a view, but refer to the same file on disk (for example, the dependent
symlinks to a file in the dependency at the same relative path), Spack links the first regular
file instead of symlinks. This is important for copy type views where we need the underlying
file to be copied instead of the symlink (when a symlink would be copied, it would become a
self-referencing symlink after relocation). The test uses a symlink type view though, since
that keeps track of the original file path."""
with ev.create("env", with_view=tmp_path / "view") as e:
add("view-resolve-conflict-top")
install()
top = e.matching_spec("view-resolve-conflict-top").prefix
bottom = e.matching_spec("view-file").prefix
# In this example we have `./bin/x` in 3 prefixes, two links, one regular file. We expect the
# regular file to be linked into the view. There are also 2 links at `./bin/y`, but no regular
# file, so we expect standard behavior: first entry is linked into the view.
# view-resolve-conflict-top/bin/
# x -> view-file/bin/x
# y -> view-resolve-conflict-middle/bin/y # expect this y to be linked
# view-resolve-conflict-middle/bin/
# x -> view-file/bin/x
# y -> view-file/bin/x
# view-file/bin/
# x # expect this x to be linked
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
"""Test that file-file conflicts for two unique files in environment views are ignored, and
that the dependent's file is linked into the view, not the dependency's file."""
with ev.create("env", with_view=tmp_path / "view") as e:
add("view-ignore-conflict")
install()
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
# The dependent's file is linked into the view
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x

View File

@@ -5,7 +5,6 @@
import argparse
import builtins
import filecmp
import gzip
import itertools
import os
import pathlib
@@ -138,7 +137,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
pkg = spec.package
pkg.do_install(verbose=True)
with gzip.open(pkg.install_log_path, "rt") as f:
log_file = pkg.build_log_path
with open(log_file) as f:
out = f.read()
# make sure that output from the actual package file appears in the

View File

@@ -1,119 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import gzip
import os
import sys
import tempfile
from contextlib import contextmanager
from io import BytesIO, TextIOWrapper
import pytest
import spack
from spack.main import SpackCommand
logs = SpackCommand("logs")
install = SpackCommand("install")
@contextmanager
def stdout_as_buffered_text_stream():
"""Attempt to simulate "typical" interface for stdout when user is
running Spack/Python from terminal. "spack log" should not be run
for all possible cases of what stdout might look like, in
particular some programmatic redirections of stdout like StringIO
are not meant to be supported by this command; more-generally,
mechanisms that depend on decoding binary output prior to write
are not supported for "spack log".
"""
original_stdout = sys.stdout
with tempfile.TemporaryFile(mode="w+b") as tf:
sys.stdout = TextIOWrapper(tf)
try:
yield tf
finally:
sys.stdout = original_stdout
def _rewind_collect_and_decode(rw_stream):
rw_stream.seek(0)
return rw_stream.read().decode("utf-8")
@pytest.fixture
def disable_capture(capfd):
with capfd.disabled():
yield
def test_logs_cmd_errors(install_mockery, mock_fetch, mock_archive, mock_packages):
spec = spack.spec.Spec("libelf").concretized()
assert not spec.installed
with pytest.raises(spack.main.SpackCommandError, match="is not installed or staged"):
logs("libelf")
with pytest.raises(spack.main.SpackCommandError, match="Too many specs"):
logs("libelf mpi")
install("libelf")
os.remove(spec.package.install_log_path)
with pytest.raises(spack.main.SpackCommandError, match="No logs are available"):
logs("libelf")
def _write_string_to_path(string, path):
"""Write a string to a file, preserving newline format in the string."""
with open(path, "wb") as f:
f.write(string.encode("utf-8"))
def test_dump_logs(install_mockery, mock_fetch, mock_archive, mock_packages, disable_capture):
"""Test that ``spack log`` can find (and print) the logs for partial
builds and completed installs.
Also make sure that for compressed logs, that we automatically
decompress them.
"""
cmdline_spec = spack.spec.Spec("libelf")
concrete_spec = cmdline_spec.concretized()
# Sanity check, make sure this test is checking what we want: to
# start with
assert not concrete_spec.installed
stage_log_content = "test_log stage output\nanother line"
installed_log_content = "test_log install output\nhere to test multiple lines"
with concrete_spec.package.stage:
_write_string_to_path(stage_log_content, concrete_spec.package.log_path)
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == stage_log_content
install("libelf")
# Sanity check: make sure a path is recorded, regardless of whether
# it exists (if it does exist, we will overwrite it with content
# in this test)
assert concrete_spec.package.install_log_path
with gzip.open(concrete_spec.package.install_log_path, "wb") as compressed_file:
bstream = BytesIO(installed_log_content.encode("utf-8"))
compressed_file.writelines(bstream)
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == installed_log_content
with concrete_spec.package.stage:
_write_string_to_path(stage_log_content, concrete_spec.package.log_path)
# We re-create the stage, but "spack log" should ignore that
# if the package is installed
with stdout_as_buffered_text_stream() as redirected_stdout:
spack.cmd.logs._logs(cmdline_spec, concrete_spec)
assert _rewind_collect_and_decode(redirected_stdout) == installed_log_content

View File

@@ -98,9 +98,13 @@ def test_url_list(mock_packages):
def test_url_summary(mock_packages):
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
(total_urls, correct_names, correct_versions, name_count_dict, version_count_dict) = (
url_summary(None)
)
(
total_urls,
correct_names,
correct_versions,
name_count_dict,
version_count_dict,
) = url_summary(None)
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls

View File

@@ -191,7 +191,7 @@ def test_view_files_not_ignored(
pkg.do_install()
pkg.assert_installed(spec.prefix)
install("view-file") # Arbitrary package to add noise
install("view-dir-file") # Arbitrary package to add noise
viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
@@ -205,7 +205,7 @@ def test_view_files_not_ignored(
prefix_in_view = viewpath
args = []
view(cmd, *(args + [viewpath, "view-not-ignored", "view-file"]))
view(cmd, *(args + [viewpath, "view-not-ignored", "view-dir-file"]))
pkg.assert_installed(prefix_in_view)
view("remove", viewpath, "view-not-ignored")

Some files were not shown because too many files have changed in this diff Show More