Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
8b020067f4 spec.py: further arch related fixes 2025-01-28 16:59:53 +01:00
469 changed files with 3987 additions and 4587 deletions

View File

@@ -40,17 +40,17 @@ jobs:
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:

View File

@@ -81,10 +81,6 @@ jobs:
with:
with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks:
needs: [ prechecks ]
if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with:
verbose: true
fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:

View File

@@ -63,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -1,5 +1,5 @@
config:
locks: false
build_stage::
- '$user_cache_path/stage'
- '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
"environment variables" section lists environment variables that the
build system uses to pass flags to the compiler and linker.
^^^^^^^^^^^^^^^^^^^^^^^^^
Adding flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^
Addings flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^^
For most of the flags you encounter, you will want a variant to
optionally enable/disable them. You can then optionally pass these
@@ -285,7 +285,7 @@ function like so:
def configure_args(self):
args = []
...
if self.spec.satisfies("+mpi"):
args.append("--enable-mpi")
else:
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
.. code-block:: python
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("mpi"))
return args
return [self.enable_or_disable("mpi")]
Note that we are explicitly disabling MPI support if it is not
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
default=False,
description="Memchecker support for debugging [degrades performance]"
)
...
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("memchecker"))
return args
config_args.extend(self.enable_or_disable("memchecker"))
In this example, specifying the variant ``+memchecker`` will generate
the following configuration options:

View File

@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
* ``.linux_neoverse_n1``
* ``.linux_neoverse_v1``
* ``.linux_neoverse_v2``
* ``.linux_power``
* ``.linux_skylake``
* ``.linux_x86_64``
* ``.linux_x86_64_v4``

View File

@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
explicit or default configuration settings (e.g., a different version of
Spack or for a different user account).
Environments created from a manifest will copy any included configs
from relative paths inside the environment. Relative paths from
outside the environment will cause errors, and absolute paths will be
kept absolute. For example, if ``spack.yaml`` includes:
.. code-block:: yaml
spack:
include: [./config.yaml]
then the created environment will have its own copy of the file
``config.yaml`` copied from the location in the original environment.
Create an environment from a ``spack.lock`` file using:
.. code-block:: console
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
and ignore the create-specific flags.
.. code-block:: console
$ spack env activate --create -p myenv
# ...
# [creates if myenv does not exist yet]
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
The ``spack develop`` command allows one to develop Spack packages in
an environment. It requires a spec containing a concrete version, and
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will automatically pick the highest version the package has defined.
This means any infinity versions (``develop``, ``main``, ``stable``) will be
preferred in this selection process.
@@ -448,9 +435,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
any time the environment is installed if the package's local source
code has been modified. Spack's native implementation to check for modifications
is to check if ``mtime`` is newer than the installation.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
Spack ensures that all instances of a
developed package in the environment are concretized to match the
@@ -466,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the
``--build_directory`` flag to control the location and name of the build directory.
``--build_directory`` flag to control the location and name of the build directory.
This is a shortcut to set the ``package_attributes:build_directory`` in the
``packages`` configuration (see :ref:`assigning-package-attributes`).
The supplied location will become the build-directory for that package in all future builds.

View File

@@ -668,7 +668,7 @@ def copy(src, dest, _permissions=False):
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -679,7 +679,7 @@ def copy(src, dest, _permissions=False):
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
@@ -710,7 +710,7 @@ def install(src, dest):
dest (str): the destination file or directory
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -748,7 +748,7 @@ def copy_tree(
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if _permissions:
@@ -762,7 +762,7 @@ def copy_tree(
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
@@ -843,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
ignore (typing.Callable): function indicating which files to ignore
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)

View File

@@ -308,7 +308,7 @@ class LinkTree:
def __init__(self, source_root):
if not os.path.exists(source_root):
raise OSError("No such file or directory: '%s'", source_root)
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root

View File

@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
return True
except OSError as e:
except IOError as e:
# EAGAIN and EACCES == locked by another process (so try again)
if e.errno not in (errno.EAGAIN, errno.EACCES):
raise

View File

@@ -344,6 +344,26 @@ def close(self):
self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs):
"""Context manager that logs its output to a file.
@@ -427,6 +447,7 @@ def __init__(
self.echo = echo
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@@ -498,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails
pass
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
with replace_environment(self.env):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
finally:
if input_fd:
@@ -707,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
@@ -764,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close()
self._active = True
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
with replace_environment(self.env):
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -892,7 +918,7 @@ def _writer_daemon(
try:
if stdin_file.read(1) == "v":
echo = not echo
except OSError as e:
except IOError as e:
# If SIGTTIN is ignored, the system gives EIO
# to let the caller know the read failed b/c it
# was in the bg. Ignore that too.
@@ -987,7 +1013,7 @@ def wrapped(*args, **kwargs):
while True:
try:
return function(*args, **kwargs)
except OSError as e:
except IOError as e:
if e.errno == errno.EINTR:
continue
raise

View File

@@ -10,7 +10,7 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0"
__version__ = "0.24.0.dev0"
spack_version = __version__

View File

@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
if not spec.virtual or not spec.variants:
return
error = error_cls(
f"{pkg_name}: {msg}",

View File

@@ -2529,10 +2529,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies
"""
# Early termination
if spec.external or not spec.concrete:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.installed and not force:
elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return

View File

@@ -27,9 +27,9 @@
class ClingoBootstrapConcretizer:
def __init__(self, configuration):
self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.default_operating_system()
self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.default_arch()
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec()

View File

@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.default_arch()
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()

View File

@@ -11,7 +11,6 @@
from typing import Any, List, Optional, Tuple
import llnl.util.filesystem as fs
from llnl.util import tty
from llnl.util.lang import stable_partition
import spack.builder
@@ -459,23 +458,11 @@ def cmake(
) -> None:
"""Runs ``cmake`` in the build directory"""
if spec.is_develop:
# skip cmake phase if it is an incremental develop build
# Determine the files that will re-run CMake that are generated from a successful
# configure step based on state
primary_generator = _extract_primary_generator(self.generator)
configure_artifact = "Makefile"
if primary_generator == "Ninja":
configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
tty.msg(
"Incremental build criteria satisfied."
"Skipping CMake configure step. To force configuration run"
f" `spack clean {pkg.name}`"
)
return
# skip cmake phase if it is an incremental develop build
if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, "CMakeCache.txt")
):
return
options = self.std_cmake_args
options += self.cmake_args()

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
Maintainers: ax3l, Rombur, davidbeckingsale
"""
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89",
"90",
"90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -264,17 +264,16 @@ def update_external_dependencies(self, extendee_spec=None):
# Ensure architecture information is present
if not python.architecture:
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system("default_os")
host_target = host_platform.target("default_target")
python.architecture = spack.spec.ArchSpec(
(str(host_platform), str(host_os), str(host_target))
)
else:
if not python.architecture.platform:
python.architecture.platform = spack.platforms.host()
platform = spack.platforms.by_name(python.architecture.platform)
if not python.architecture.os:
python.architecture.os = platform.default_operating_system()
python.architecture.os = "default_os"
if not python.architecture.target:
python.architecture.target = archspec.cpu.host().family.name

View File

@@ -14,9 +14,8 @@
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set
from urllib.request import Request
from urllib.request import HTTPHandler, Request, build_opener
import llnl.path
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@@ -63,8 +62,6 @@
PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
@@ -84,9 +81,6 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
# git returns posix paths always, normalize input to be comptaible
# with that
env_path = llnl.path.convert_to_posix_path(env_path)
git = spack.util.git.git()
if git:
with fs.working_dir(spack.paths.prefix):
@@ -633,19 +627,29 @@ def download_and_extract_artifacts(url, work_dir):
if token:
headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET")
opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try:
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path)

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import warnings
import archspec.cpu
@@ -52,10 +51,10 @@ def setup_parser(subparser):
"-t", "--target", action="store_true", default=False, help="print only the target"
)
parts2.add_argument(
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
"-f", "--frontend", action="store_true", default=False, help="print frontend"
)
parts2.add_argument(
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
"-b", "--backend", action="store_true", default=False, help="print backend"
)
@@ -99,14 +98,15 @@ def arch(parser, args):
display_targets(archspec.cpu.TARGETS)
return
os_args, target_args = "default_os", "default_target"
if args.frontend:
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
os_args, target_args = "frontend", "frontend"
elif args.backend:
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
os_args, target_args = "backend", "backend"
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system(os_args)
host_target = host_platform.target(target_args)
if args.family:
host_target = host_target.family
elif args.generic:

View File

@@ -86,8 +86,8 @@ def create_db_tarball(args):
def report(args):
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system("frontend")
host_target = host_platform.target("frontend")
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
print("* **Spack:**", spack.get_version())
print("* **Python:**", platform.python_version())

View File

@@ -9,9 +9,9 @@
import spack.cmd
import spack.environment as ev
import spack.package_base
import spack.store
from spack.cmd.common import arguments
from spack.solver.input_analysis import create_graph_analyzer
description = "show dependencies of a package"
section = "basic"
@@ -68,17 +68,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
dependencies = spack.package_base.possible_dependencies(
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
allowed_deps=args.deptype,
depflag=args.deptype,
)
if not args.expand_virtuals:
dependencies.update(virtuals)
if spec.name in dependencies:
dependencies.remove(spec.name)
del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))

View File

@@ -125,7 +125,7 @@ def develop(parser, args):
version = spec.versions.concrete_range_as_version
if not version:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])

View File

@@ -110,7 +110,10 @@ def external_find(args):
# Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected).
skip_msg = "Skipping manifest and continuing with other external checks"
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
errno.EPERM,
errno.EACCES,
]:
# The manifest file does not have sufficient permissions enabled:
# print a warning and keep going
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)

View File

@@ -54,6 +54,10 @@
@m{target=target} specific <target> processor
@m{arch=platform-os-target} shortcut for all three above
cross-compiling:
@m{os=backend} or @m{os=be} build for compute node (backend)
@m{os=frontend} or @m{os=fe} build for login node (frontend)
dependencies:
^dependency [constraints] specify constraints on dependencies
^@K{/hash} build with a specific installed

View File

@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(

View File

@@ -41,11 +41,7 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites
non_virtual = [
str(s)
for s in specs
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
]
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
if non_virtual:
msg = "non-virtual specs cannot be part of the query "
msg += "[{0}]\n".format(", ".join(non_virtual))

View File

@@ -6,7 +6,7 @@
import os
import re
import sys
from itertools import islice, zip_longest
from itertools import zip_longest
from typing import Dict, List, Optional
import llnl.util.tty as tty
@@ -423,8 +423,7 @@ def _run_import_check(
continue
for m in is_abs_import.finditer(contents):
# Find at most two occurences: the first is the import itself, the second is its usage.
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
if contents.count(m.group(1)) == 1:
to_remove.append(m.group(0))
exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
@@ -439,7 +438,7 @@ def _run_import_check(
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1

View File

@@ -177,15 +177,16 @@ def test_run(args):
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
TODO: Need to write out a log message and/or CDASH Testing
output that package not installed IF continue to process
these issues here.
# TODO: Need to write out a log message and/or CDASH Testing
# output that package not installed IF continue to process
# these issues here.
# if args.log_format:
# # Proceed with the spec assuming the test process
# # to ensure report package as skipped (e.g., for CI)
# specs_to_test.append(spec)
if args.log_format:
# Proceed with the spec assuming the test process
# to ensure report package as skipped (e.g., for CI)
specs_to_test.append(spec)
"""
specs_to_test.extend(matching)
@@ -252,9 +253,7 @@ def has_test_and_tags(pkg_class):
hashes = env.all_hashes() if env else None
specs = spack.store.STORE.db.query(hashes=hashes)
specs = list(
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
)
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
spack.cmd.display_specs(specs, long=True)

View File

@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system()
target = host_platform.default_target()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target")
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().default_target()
target = spack.platforms.host().target("default_target")
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.default_operating_system()
operating_system = host_platform.operating_system("default_os")
return operating_system, target

View File

@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
opt, i, answer = min(result.answers)
name = spec.name
# TODO: Consolidate this code with similar code in solve.py
if spack.repo.PATH.is_virtual(spec.name):
if spec.virtual:
providers = [s.name for s in answer.values() if s.package.provides(name)]
name = providers[0]

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True:
if not env_dict["concretizer"]["unify"] is True:
warnings.warn(
'"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same "

View File

@@ -41,8 +41,6 @@
Union,
)
import spack.repo
try:
import uuid
@@ -1558,12 +1556,7 @@ def _query(
# If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual.
if (
not results
and query_spec is not None
and deferred
and spack.repo.PATH.is_virtual(query_spec.name)
):
if not results and query_spec is not None and deferred and query_spec.virtual:
results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results

View File

@@ -259,8 +259,6 @@ def detect_specs(
)
return []
from spack.repo import PATH as repo_path
result = []
for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths)
@@ -307,10 +305,7 @@ def detect_specs(
resolved_specs[spec] = candidate_path
try:
# Validate the spec calling a package specific method
pkg_cls = repo_path.get_pkg_class(spec.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(spec, spec.extra_attributes)
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '

View File

@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
# Check if the target path lexists
try:
st = os.lstat(new_root)
except OSError:
except (IOError, OSError):
return
# Empty directories are fine
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
):
try:
shutil.rmtree(old_root)
except OSError as e:
except (IOError, OSError) as e:
msg = "Failed to remove old view at %s\n" % old_root
msg += str(e)
tty.warn(msg)
@@ -2554,7 +2554,7 @@ def is_latest_format(manifest):
try:
with open(manifest, encoding="utf-8") as f:
data = syaml.load(f)
except OSError:
except (OSError, IOError):
return True
top_level_key = _top_level_key(data)
changed = spack.schema.env.update(data[top_level_key])
@@ -2634,32 +2634,6 @@ def _ensure_env_dir():
shutil.copy(envfile, target_manifest)
# Copy relative path includes that live inside the environment dir
try:
manifest = EnvironmentManifestFile(environment_dir)
except Exception:
# error handling for bad manifests is handled on other code paths
return
includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes:
if os.path.isabs(include):
continue
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir:
tty.debug(f"Will not copy relative include from outside environment: {include}")
continue
orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue
fs.touchp(abspath)
shutil.copy(orig_abspath, abspath)
class EnvironmentManifestFile(collections.abc.Mapping):
"""Manages the in-memory representation of a manifest file, and its synchronization

View File

@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
if name == target_name:
return path
else:
raise OSError('extension "{0}" not found'.format(target_name))
raise IOError('extension "{0}" not found'.format(target_name))
def get_module(cmd_name):

View File

@@ -427,7 +427,7 @@ def needs_file(spec, file):
try:
with open(manifest_file, "r", encoding="utf-8") as f:
manifest = s_json.load(f)
except OSError:
except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file.
manifest = {}
return test_path in manifest
@@ -831,7 +831,7 @@ def get_spec_from_file(filename):
try:
with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f)
except OSError:
except IOError:
return None

View File

@@ -42,10 +42,10 @@
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.spec
import spack.tengine
import spack.traverse
from spack.solver.input_analysis import create_graph_analyzer
def find(seq, predicate):
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
def _static_edges(specs, depflag):
for spec in specs:
*_, edges = create_graph_analyzer().possible_dependencies(
spec.name, expand_virtuals=True, allowed_deps=depflag
)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
for parent_name, dependencies in edges.items():
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),

View File

@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
with open(filepath, "rb") as f:
elf = parse_elf(f, interpreter=True, dynamic_section=True)
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
except (OSError, ElfParsingError):
except (IOError, OSError, ElfParsingError):
return False

View File

@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
# Only look at executable, non-symlink files.
try:
st = os.lstat(path)
except OSError:
except (IOError, OSError):
continue
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:

View File

@@ -566,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
# copy test data into test stage data dir
try:
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
pkg_cls = test_spec.package_class
except spack.repo.UnknownPackageError:
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
return
@@ -623,7 +623,7 @@ def test_functions(
vpkgs = virtuals(pkg)
for vname in vpkgs:
try:
classes.append(spack.repo.PATH.get_pkg_class(vname))
classes.append((Spec(vname)).package_class)
except spack.repo.UnknownPackageError:
tty.debug(f"{vname}: virtual does not appear to have a package file")
@@ -668,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
# grab test functions associated with the spec, which may be virtual
try:
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
tests = test_functions(spec.package_class)
except spack.repo.UnknownPackageError:
# Some virtuals don't have a package so we don't want to report
# them as not having tests when that isn't appropriate.

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
):
depflag |= dt.BUILD
if self.run_tests(pkg):
@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
log_contextmanager = log_output(
log_file, self.echo, True, filter_fn=self.filter_fn
log_file,
self.echo,
True,
env=self.unmodified_env,
filter_fn=self.filter_fn,
)
with log_contextmanager as logger:

View File

@@ -163,7 +163,7 @@ def format_help_sections(self, level):
# lazily add all commands to the parser when needed.
add_all_commands(self)
# Print help on subcommands in neatly formatted sections.
"""Print help on subcommands in neatly formatted sections."""
formatter = self._get_formatter()
# Create a list of subcommand actions. Argparse internals are nasty!
@@ -728,7 +728,7 @@ def _compatible_sys_types():
with the current host.
"""
host_platform = spack.platforms.host()
host_os = str(host_platform.default_operating_system())
host_os = str(host_platform.operating_system("default_os"))
host_target = archspec.cpu.host()
compatible_targets = [host_target] + host_target.ancestors

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod
def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError(
f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}."

View File

@@ -209,7 +209,7 @@ def provides(self):
# All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens:
if self.spec.package.provides(x):
provides[x] = self.spec
provides[x] = self.spec[x]
return provides
@property

View File

@@ -383,7 +383,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector()
for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(),

View File

@@ -2,64 +2,31 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack."""
# flake8: noqa: F401
"""spack.util.package is a set of useful build tools and directives for packages.
Everything in this module is automatically imported into Spack package files.
"""
from os import chdir, environ, getcwd, makedirs, mkdir, remove, removedirs
from shutil import move, rmtree
from spack.error import InstallError, NoHeadersError, NoLibrariesError
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# import most common types used in packages
from typing import Dict, List, Optional
class tty:
import llnl.util.tty as _tty
debug = _tty.debug
error = _tty.error
info = _tty.info
msg = _tty.msg
warn = _tty.warn
from llnl.util.filesystem import (
FileFilter,
FileList,
HeaderList,
LibraryList,
ancestor,
can_access,
change_sed_delimiter,
copy,
copy_tree,
filter_file,
find,
find_all_headers,
find_first,
find_headers,
find_libraries,
find_system_libraries,
force_remove,
force_symlink,
install,
install_tree,
is_exe,
join_path,
keep_modification_time,
library_extensions,
mkdirp,
remove_directory_contents,
remove_linked_tree,
rename,
set_executable,
set_install_permissions,
touch,
working_dir,
)
import llnl.util.filesystem
from llnl.util.filesystem import *
from llnl.util.symlink import symlink
import spack.util.executable
# These props will be overridden when the build env is set up.
from spack.build_environment import MakeExecutable
from spack.build_systems.aspell_dict import AspellDictPackage
from spack.build_systems.autotools import AutotoolsPackage
@@ -109,24 +76,7 @@ class tty:
from spack.builder import BaseBuilder
from spack.config import determine_number_of_jobs
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import (
build_system,
can_splice,
conditional,
conflicts,
depends_on,
extends,
license,
maintainers,
patch,
provides,
redistribute,
requires,
resource,
variant,
version,
)
from spack.error import InstallError, NoHeadersError, NoLibrariesError
from spack.directives import *
from spack.install_test import (
SkipTest,
cache_extra_test_sources,
@@ -136,28 +86,26 @@ class tty:
install_test_root,
test_part,
)
from spack.installer import ExternalPackageError, InstallLockError, UpstreamPackageError
from spack.mixins import filter_compiler_wrappers
from spack.multimethod import default_args, when
from spack.package_base import build_system_flags, env_flags, inject_flags, on_package_attributes
from spack.package_completions import (
bash_completion_path,
fish_completion_path,
zsh_completion_path,
from spack.package_base import (
DependencyConflictError,
build_system_flags,
env_flags,
flatten_dependencies,
inject_flags,
install_dependency_symlinks,
on_package_attributes,
)
from spack.package_completions import *
from spack.phase_callbacks import run_after, run_before
from spack.spec import Spec
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable, ProcessError, which, which_string
from spack.spec import InvalidSpecDetected, Spec
from spack.util.executable import *
from spack.util.filesystem import fix_darwin_install_name
from spack.util.prefix import Prefix
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver
# Emulate some shell commands for convenience
env = environ
cd = chdir
pwd = getcwd
# These are just here for editor support; they may be set when the build env is set up.
configure: Executable
make_jobs: int

View File

@@ -22,6 +22,7 @@
import textwrap
import time
import traceback
import typing
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
@@ -29,6 +30,7 @@
import llnl.util.filesystem as fsys
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
from llnl.util.link_tree import LinkTree
import spack.compilers
import spack.config
@@ -65,6 +67,10 @@
]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
#: Filename for the Spack build/install log.
_spack_build_logfile = "spack-build-out.txt"
@@ -696,6 +702,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs.
_verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package
homepage: Optional[str] = None
@@ -709,6 +718,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Do not include @ here in order not to unnecessarily ping the users.
maintainers: List[str] = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
#: It is used to ensure a compiler and build dependencies like 'cmake'
#: are available to build a custom test code.
@@ -808,6 +830,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
except StopIteration:
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Return dict of possible dependencies of this package.
Args:
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
Returns:
(dict): dictionary mapping dependency names to *their*
immediate dependencies
Each item in the returned dictionary maps a (potentially
transitive) dependency of this package to its possible
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
"""
visited = {} if visited is None else visited
missing = {} if missing is None else missing
visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies_by_name(when=True).items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for deplist in conditions.values():
for dep in deplist:
depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
if spack.repo.PATH.is_virtual(name):
if virtuals is not None:
virtuals.add(name)
if expand_virtuals:
providers = spack.repo.PATH.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
dep_names = [name]
# add the dependency names to the visited dict
visited.setdefault(cls.name, set()).update(set(dep_names))
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
)
return visited
@classproperty
def package_dir(cls):
"""Directory where the package.py file lives."""
@@ -2172,6 +2292,85 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags
def install_dependency_symlinks(pkg, spec, prefix):
"""
Execute a dummy install and flatten dependencies.
This routine can be used in a ``package.py`` definition by setting
``install = install_dependency_symlinks``.
This feature comes in handy for creating a common location for the
the installation of third-party libraries.
"""
flatten_dependencies(spec, prefix)
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc"
os.environ["CXX"] = "CC"
os.environ["FC"] = "ftn"
os.environ["F77"] = "ftn"
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
name = dep.name
dep_path = spack.store.STORE.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + "/" + name)
conflict = dep_files.find_conflict(flat_dir + "/" + name)
if conflict:
raise DependencyConflictError(conflict)
dep_files.merge(flat_dir + "/" + name)
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
packages.append(pos)
continue
if not isinstance(pos, spack.spec.Spec):
pos = spack.spec.Spec(pos)
if spack.repo.PATH.is_virtual(pos.name):
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.

View File

@@ -52,7 +52,8 @@ def use_platform(new_platform):
import spack.config
assert isinstance(new_platform, Platform), f'"{new_platform}" must be an instance of Platform'
msg = '"{0}" must be an instance of Platform'
assert isinstance(new_platform, Platform), msg.format(new_platform)
original_host_fn = host

View File

@@ -1,22 +1,42 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import warnings
from typing import Optional
import archspec.cpu
import llnl.util.lang
import spack.error
class NoPlatformError(spack.error.SpackError):
def __init__(self):
msg = "Could not determine a platform for this machine"
super().__init__(msg)
@llnl.util.lang.lazy_lexicographic_ordering
class Platform:
"""Platform is an abstract class extended by subclasses.
To add a new type of platform (such as cray_xe), create a subclass and set all the
class attributes such as priority, front_target, back_target, front_os, back_os.
Platform also contain a priority class attribute. A lower number signifies higher
priority. These numbers are arbitrarily set and can be changed though often there
isn't much need unless a new platform is added and the user wants that to be
detected first.
Targets are created inside the platform subclasses. Most architecture (like linux,
and darwin) will have only one target family (x86_64) but in the case of Cray
machines, there is both a frontend and backend processor. The user can specify
which targets are present on front-end and back-end architecture.
Depending on the platform, operating systems are either autodetected or are
set. The user can set the frontend and backend operating setting by the class
attributes front_os and back_os. The operating system will be responsible for
compiler detection.
"""
# Subclass sets number. Controls detection order
@@ -25,72 +45,82 @@ class Platform:
#: binary formats used on this platform; used by relocation logic
binary_formats = ["elf"]
default: str
default_os: str
front_end: Optional[str] = None
back_end: Optional[str] = None
default: Optional[str] = None # The default back end target.
front_os: Optional[str] = None
back_os: Optional[str] = None
default_os: Optional[str] = None
reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
deprecated_names = ["frontend", "fe", "backend", "be"]
def __init__(self, name):
self.targets = {}
self.operating_sys = {}
self.name = name
self._init_targets()
def add_target(self, name: str, target: archspec.cpu.Microarchitecture) -> None:
"""Used by the platform specific subclass to list available targets.
Raises an error if the platform specifies a name
that is reserved by spack as an alias.
"""
if name in Platform.reserved_targets:
msg = f"{name} is a spack reserved alias and cannot be the name of a target"
raise ValueError(msg)
msg = "{0} is a spack reserved alias and cannot be the name of a target"
raise ValueError(msg.format(name))
self.targets[name] = target
def _init_targets(self):
self.default = archspec.cpu.host().name
def _add_archspec_targets(self):
for name, microarchitecture in archspec.cpu.TARGETS.items():
self.add_target(name, microarchitecture)
def target(self, name):
"""This is a getter method for the target dictionary
that handles defaulting based on the values provided by default,
front-end, and back-end. This can be overwritten
by a subclass for which we want to provide further aliasing options.
"""
# TODO: Check if we can avoid using strings here
name = str(name)
if name in Platform.deprecated_names:
warnings.warn(f"target={name} is deprecated, use target={self.default} instead")
if name in Platform.reserved_targets:
if name == "default_target":
name = self.default
elif name == "frontend" or name == "fe":
name = self.front_end
elif name == "backend" or name == "be":
name = self.back_end
return self.targets.get(name, None)
def add_operating_system(self, name, os_class):
if name in Platform.reserved_oss + Platform.deprecated_names:
msg = f"{name} is a spack reserved alias and cannot be the name of an OS"
raise ValueError(msg)
"""Add the operating_system class object into the
platform.operating_sys dictionary.
"""
if name in Platform.reserved_oss:
msg = "{0} is a spack reserved alias and cannot be the name of an OS"
raise ValueError(msg.format(name))
self.operating_sys[name] = os_class
def default_target(self):
return self.target(self.default)
def default_operating_system(self):
return self.operating_system(self.default_os)
def operating_system(self, name):
if name in Platform.deprecated_names:
warnings.warn(f"os={name} is deprecated, use os={self.default_os} instead")
if name in Platform.reserved_oss:
if name == "default_os":
name = self.default_os
if name == "frontend" or name == "fe":
name = self.front_os
if name == "backend" or name == "be":
name = self.back_os
return self.operating_sys.get(name, None)
def setup_platform_environment(self, pkg, env):
"""Platform-specific build environment modifications.
This method is meant toi be overridden by subclasses, when needed.
"""Subclass can override this method if it requires any
platform-specific build environment modifications.
"""
pass
@classmethod
def detect(cls):
"""Returns True if the host platform is detected to be the current Platform class,
False otherwise.
"""Return True if the the host platform is detected to be the current
Platform class, False otherwise.
Derived classes are responsible for implementing this method.
"""
@@ -105,7 +135,11 @@ def __str__(self):
def _cmp_iter(self):
yield self.name
yield self.default
yield self.front_end
yield self.back_end
yield self.default_os
yield self.front_os
yield self.back_os
def targets():
for t in sorted(self.targets.values()):

View File

@@ -4,6 +4,8 @@
import platform as py_platform
import archspec.cpu
from spack.operating_systems.mac_os import MacOs
from spack.version import Version
@@ -17,8 +19,18 @@ class Darwin(Platform):
def __init__(self):
super().__init__("darwin")
self._add_archspec_targets()
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
mac_os = MacOs()
self.default_os = str(mac_os)
self.front_os = str(mac_os)
self.back_os = str(mac_os)
self.add_operating_system(str(mac_os), mac_os)
@classmethod

View File

@@ -3,6 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
import archspec.cpu
from spack.operating_systems.freebsd import FreeBSDOs
from ._platform import Platform
@@ -13,8 +15,18 @@ class FreeBSD(Platform):
def __init__(self):
super().__init__("freebsd")
self._add_archspec_targets()
# Get specific default
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
os = FreeBSDOs()
self.default_os = str(os)
self.front_os = self.default_os
self.back_os = self.default_os
self.add_operating_system(str(os), os)
@classmethod

View File

@@ -3,6 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
import archspec.cpu
from spack.operating_systems.linux_distro import LinuxDistro
from ._platform import Platform
@@ -13,8 +15,18 @@ class Linux(Platform):
def __init__(self):
super().__init__("linux")
self._add_archspec_targets()
# Get specific default
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
linux_dist = LinuxDistro()
self.default_os = str(linux_dist)
self.front_os = self.default_os
self.back_os = self.default_os
self.add_operating_system(str(linux_dist), linux_dist)
@classmethod

View File

@@ -16,19 +16,31 @@ class Test(Platform):
if platform.system().lower() == "darwin":
binary_formats = ["macho"]
if platform.machine() == "arm64":
front_end = "aarch64"
back_end = "m1"
default = "m1"
else:
front_end = "x86_64"
back_end = "core2"
default = "core2"
front_os = "redhat6"
back_os = "debian6"
default_os = "debian6"
default = "m1" if platform.machine() == "arm64" else "core2"
def __init__(self, name=None):
name = name or "test"
super().__init__(name)
self.add_operating_system("debian6", spack.operating_systems.OperatingSystem("debian", 6))
self.add_operating_system("redhat6", spack.operating_systems.OperatingSystem("redhat", 6))
self.add_target(self.default, archspec.cpu.TARGETS[self.default])
self.add_target(self.front_end, archspec.cpu.TARGETS[self.front_end])
def _init_targets(self):
targets = ("aarch64", "m1") if platform.machine() == "arm64" else ("x86_64", "core2")
for t in targets:
self.add_target(t, archspec.cpu.TARGETS[t])
self.add_operating_system(
self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
)
self.add_operating_system(
self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
)
@classmethod
def detect(cls):

View File

@@ -4,6 +4,8 @@
import platform
import archspec.cpu
from spack.operating_systems.windows_os import WindowsOs
from ._platform import Platform
@@ -14,8 +16,18 @@ class Windows(Platform):
def __init__(self):
super().__init__("windows")
self._add_archspec_targets()
self.default = archspec.cpu.host().name
self.front_end = self.default
self.back_end = self.default
windows_os = WindowsOs()
self.default_os = str(windows_os)
self.front_os = str(windows_os)
self.back_os = str(windows_os)
self.add_operating_system(str(windows_os), windows_os)
@classmethod

View File

@@ -236,15 +236,22 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
def _warn_if_link_cant_be_relocated(link: str, target: str):
if not os.path.isabs(target):
return
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
"""Relocate links to a new install prefix."""
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
for link in links:
old_target = readlink(link)
if not os.path.isabs(old_target):
continue
match = regex.match(old_target)
# No match.
if match is None:
_warn_if_link_cant_be_relocated(link, old_target)
continue
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
@@ -406,8 +413,8 @@ def fixup_macos_rpaths(spec):
entries which makes it harder to adjust with ``install_name_tool
-delete_rpath``.
"""
if spec.external or not spec.concrete:
tty.warn("external/abstract spec cannot be fixed up: {0!s}".format(spec))
if spec.external or spec.virtual:
tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
return False
if "platform=darwin" not in spec:

View File

@@ -1041,7 +1041,7 @@ def _read_config(self) -> Dict[str, str]:
return yaml_data["repo"]
except OSError:
except IOError:
tty.die(f"Error reading {self.config_file} when opening {self.root}")
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
@@ -1369,7 +1369,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n")
except OSError as e:
except (IOError, OSError) as e:
# try to clean up.
if existed:
shutil.rmtree(config_path, ignore_errors=True)

View File

@@ -1,7 +1,6 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections
import hashlib
import os
@@ -14,7 +13,7 @@
import xml.sax.saxutils
from typing import Dict, Optional
from urllib.parse import urlencode
from urllib.request import Request
from urllib.request import HTTPSHandler, Request, build_opener
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
@@ -25,10 +24,10 @@
import spack.spec
import spack.tengine
import spack.util.git
import spack.util.web as web_util
from spack.error import SpackError
from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from .base import Reporter
from .extract import extract_test_parts
@@ -107,7 +106,7 @@ def __init__(self, configuration: CDashConfiguration):
self.site = configuration.site or socket.gethostname()
self.osname = platform.system()
self.osrelease = platform.release()
self.target = spack.platforms.host().default_target()
self.target = spack.platforms.host().target("default_target")
self.starttime = int(time.time())
self.endtime = self.starttime
self.buildstamp = (
@@ -434,6 +433,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
with open(filename, "rb") as f:
params_dict = {
"build": self.buildname,
@@ -443,21 +443,26 @@ def upload(self, filename):
}
encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f, method="PUT")
request = Request(url, data=f)
request.add_header("Content-Type", "text/xml")
request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken:
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try:
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
# By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
if self.current_package_name not in self.buildIds:
resp_value = codecs.getreader("utf-8")(response).read()
resp_value = response.read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid
except Exception as e:
print(f"Upload to CDash failed: {e}")
print("Upload to CDash failed: {0}".format(e))
def finalize_report(self):
if self.buildIds:

View File

@@ -87,7 +87,6 @@
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"static_analysis": {"type": "boolean"},
"timeout": {"type": "integer", "minimum": 0},
"error_on_timeout": {"type": "boolean"},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},

View File

@@ -6,7 +6,6 @@
import copy
import enum
import functools
import io
import itertools
import os
import pathlib
@@ -63,7 +62,7 @@
parse_files,
parse_term,
)
from .input_analysis import create_counter, create_graph_analyzer
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
from .requirements import RequirementKind, RequirementParser, RequirementRule
from .version_order import concretization_version_order
@@ -74,19 +73,17 @@
#: Enable the addition of a runtime node
WITH_RUNTIME = sys.platform != "win32"
class OutputConfiguration(NamedTuple):
"""Data class that contains configuration on what a clingo solve should output."""
#: Print out coarse timers for different solve phases
timers: bool
#: Whether to output Clingo's internal solver statistics
stats: bool
#: Optional output stream for the generated ASP program
out: Optional[io.IOBase]
#: If True, stop after setup and don't solve
setup_only: bool
#: Data class that contain configuration on what a
#: clingo solve should output.
#:
#: Args:
#: timers (bool): Print out coarse timers for different solve phases.
#: stats (bool): Whether to output Clingo's internal solver statistics.
#: out: Optional output stream for the generated ASP program.
#: setup_only (bool): if True, stop after setup and don't solve (default False).
OutputConfiguration = collections.namedtuple(
"OutputConfiguration", ["timers", "stats", "out", "setup_only"]
)
#: Default output configuration for a solve
DEFAULT_OUTPUT_CONFIGURATION = OutputConfiguration(
@@ -274,6 +271,15 @@ def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunc
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests)
return NoDuplicatesCounter(specs, tests=tests)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
@@ -501,7 +507,7 @@ def _compute_specs_from_answer_set(self):
# The specs must be unified to get here, so it is safe to associate any satisfying spec
# with the input. Multiple inputs may be matched to the same concrete spec
node = SpecBuilder.make_node(pkg=input_spec.name)
if spack.repo.PATH.is_virtual(input_spec.name):
if input_spec.virtual:
providers = [
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
]
@@ -1115,8 +1121,6 @@ class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve."""
def __init__(self, tests: bool = False):
self.possible_graph = create_graph_analyzer()
# these are all initialized in setup()
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
self.requirement_parser = RequirementParser(spack.config.CONFIG)
@@ -2083,11 +2087,7 @@ def _spec_clauses(
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
if spec.name:
clauses.append(
f.node(spec.name)
if not spack.repo.PATH.is_virtual(spec.name)
else f.virtual_node(spec.name)
)
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
if spec.namespace:
clauses.append(f.namespace(spec.name, spec.namespace))
@@ -2114,7 +2114,7 @@ def _spec_clauses(
for value in variant.value_as_tuple:
# ensure that the value *can* be valid for the spec
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
if spec.name and not spec.concrete and not spec.virtual:
variant_defs = vt.prevalidate_variant_value(
self.pkg_class(spec.name), variant, spec
)
@@ -2397,20 +2397,38 @@ def keyfun(os):
def target_defaults(self, specs):
"""Add facts about targets and target compatibility."""
self.gen.h2("Default target")
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
self.gen.h2("Target compatibility")
# Add targets explicitly requested from specs
candidate_targets = []
for x in self.possible_graph.candidate_targets():
if all(
self.possible_graph.unreachable(pkg_name=pkg_name, when_spec=f"target={x}")
for pkg_name in self.pkgs
):
tty.debug(f"[{__name__}] excluding target={x}, cause no package can use it")
continue
candidate_targets.append(x)
# Construct the list of targets which are compatible with the host
candidate_targets = [uarch] + uarch.ancestors
host_compatible = spack.config.CONFIG.get("concretizer:targets:host_compatible")
# Get configuration options
granularity = spack.config.get("concretizer:targets:granularity")
host_compatible = spack.config.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == uarch.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
# Add targets explicitly requested from specs
for spec in specs:
if not spec.architecture or not spec.architecture.target:
continue
@@ -2426,8 +2444,6 @@ def target_defaults(self, specs):
if ancestor not in candidate_targets:
candidate_targets.append(ancestor)
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
best_targets = {uarch.family.name}
for compiler_id, known_compiler in enumerate(self.possible_compilers):
if not known_compiler.available:
@@ -2485,6 +2501,7 @@ def target_defaults(self, specs):
self.gen.newline()
self.default_targets = list(sorted(set(self.default_targets)))
self.target_preferences()
def virtual_providers(self):
@@ -2588,14 +2605,7 @@ def define_variant_values(self):
# Tell the concretizer about possible values from specs seen in spec_clauses().
# We might want to order these facts by pkg and name if we are debugging.
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
try:
vid = self.variant_ids_by_def_id[variant_def_id]
except KeyError:
tty.debug(
f"[{__name__}] cannot retrieve id of the {value} variant from {pkg_name}"
)
continue
vid = self.variant_ids_by_def_id[variant_def_id]
self.gen.fact(fn.pkg_fact(pkg_name, fn.variant_possible_value(vid, value)))
def register_concrete_spec(self, spec, possible):
@@ -2666,7 +2676,7 @@ def setup(
"""
check_packages_exist(specs)
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
@@ -2674,9 +2684,7 @@ def setup(
# Fail if we already know an unreachable node is requested
for spec in specs:
missing_deps = [
str(d)
for d in spec.traverse()
if d.name not in self.pkgs and not spack.repo.PATH.is_virtual(d.name)
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
]
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
@@ -2893,11 +2901,7 @@ def literal_specs(self, specs):
pkg_name = clause.args[1]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
requirements.append(
fn.attr(
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
)
)
requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
cache[imposed_spec_key] = (effect_id, requirements)
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
@@ -3485,7 +3489,7 @@ def external_spec_selected(self, node, idx):
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
# If this is an extension, update the dependencies to include the extendee
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
package = self._specs[node].package_class(self._specs[node])
extendee_spec = package.extendee_spec
if extendee_spec:
@@ -4098,10 +4102,10 @@ def _check_input_and_extract_concrete_specs(specs):
reusable = []
for root in specs:
for s in root.traverse():
if s.virtual:
continue
if s.concrete:
reusable.append(s)
elif spack.repo.PATH.is_virtual(s.name):
continue
spack.spec.Spec.ensure_valid_variants(s)
return reusable

View File

@@ -265,7 +265,6 @@ error(100, "Cannot select a single version for virtual '{0}'", Virtual)
% If we select a deprecated version, mark the package as deprecated
attr("deprecated", node(ID, Package), Version) :-
attr("version", node(ID, Package), Version),
not external(node(ID, Package)),
pkg_fact(Package, deprecated_version(Version)).
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)

View File

@@ -0,0 +1,179 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set
from llnl.util import lang
import spack.deptypes as dt
import spack.package_base
import spack.repo
import spack.spec
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
runtime_virtuals = set()
for x in runtime_pkgs:
pkg_class = spack.repo.PATH.get_pkg_class(x)
runtime_virtuals.update(pkg_class.provided_virtual_names())
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = (
set(x.name for x in specs if x.virtual) | runtime_virtuals
)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
virtuals, reals = lang.stable_partition(
build_dependencies, spack.repo.PATH.is_virtual_safe
)
self._possible_virtuals.update(virtuals)
for virtual_dep in virtuals:
providers = spack.repo.PATH.providers_for(virtual_dep)
self._direct_build.update(str(x) for x in providers)
self._direct_build.update(reals)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()

View File

@@ -1,526 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes to analyze the input of a solve, and provide information to set up the ASP problem"""
import collections
from typing import Dict, List, NamedTuple, Set, Tuple, Union
import archspec.cpu
from llnl.util import lang, tty
import spack.binary_distribution
import spack.config
import spack.deptypes as dt
import spack.platforms
import spack.repo
import spack.spec
import spack.store
from spack.error import SpackError
RUNTIME_TAG = "runtime"
class PossibleGraph(NamedTuple):
real_pkgs: Set[str]
virtuals: Set[str]
edges: Dict[str, Set[str]]
class PossibleDependencyGraph:
"""Returns information needed to set up an ASP problem"""
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
raise NotImplementedError
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
raise NotImplementedError
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
"""Returns the set of possible dependencies, and the set of possible virtuals.
Both sets always include runtime packages, which may be injected by compilers.
Args:
transitive: return transitive dependencies if True, only direct dependencies if False
allowed_deps: dependency types to consider
strict_depflag: if True, only the specific dep type is considered, if False any
deptype that intersects with allowed deptype is considered
expand_virtuals: expand virtual dependencies into all possible implementations
"""
raise NotImplementedError
class NoStaticAnalysis(PossibleDependencyGraph):
"""Implementation that tries to minimize the setup time (i.e. defaults to give fast
answers), rather than trying to reduce the ASP problem size with more complex analysis.
"""
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
self.configuration = configuration
self.repo = repo
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
self.runtime_virtuals = set()
self._platform_condition = spack.spec.Spec(
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
)
for x in self.runtime_pkgs:
pkg_class = self.repo.get_pkg_class(x)
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
try:
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
except spack.repo.UnknownPackageError:
self.libc_pkgs = []
def is_virtual(self, name: str) -> bool:
return self.repo.is_virtual(name)
@lang.memoized
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
"""Returns true if a package is allowed on the current host"""
pkg_cls = self.repo.get_pkg_class(pkg_name)
for when_spec, conditions in pkg_cls.requirements.items():
if not when_spec.intersects(self._platform_condition):
continue
for requirements, _, _ in conditions:
if not any(x.intersects(self._platform_condition) for x in requirements):
tty.debug(f"[{__name__}] {pkg_name} is not for this platform")
return False
return True
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
"""Returns a list of possible providers for the virtual string in input."""
return self.repo.providers_for(virtual_str)
def can_be_installed(self, *, pkg_name) -> bool:
"""Returns True if a package can be installed, False otherwise."""
return True
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
return False
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
platform = spack.platforms.host()
default_target = archspec.cpu.TARGETS[platform.default]
# Construct the list of targets which are compatible with the host
candidate_targets = [default_target] + default_target.ancestors
granularity = self.configuration.get("concretizer:targets:granularity")
host_compatible = self.configuration.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == default_target.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
return candidate_targets
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
stack = [x for x in self._package_list(specs)]
virtuals: Set[str] = set()
edges: Dict[str, Set[str]] = {}
while stack:
pkg_name = stack.pop()
if pkg_name in edges:
continue
edges[pkg_name] = set()
# Since libc is not buildable, there is no need to extend the
# search space with libc dependencies.
if pkg_name in self.libc_pkgs:
continue
pkg_cls = self.repo.get_pkg_class(pkg_name=pkg_name)
for name, conditions in pkg_cls.dependencies_by_name(when=True).items():
if all(self.unreachable(pkg_name=pkg_name, when_spec=x) for x in conditions):
tty.debug(
f"[{__name__}] Not adding {name} as a dep of {pkg_name}, because "
f"conditions cannot be met"
)
continue
if not self._has_deptypes(
conditions, allowed_deps=allowed_deps, strict=strict_depflag
):
continue
if name in virtuals:
continue
dep_names = set()
if self.is_virtual(name):
virtuals.add(name)
if expand_virtuals:
providers = self.providers_for(name)
dep_names = {spec.name for spec in providers}
else:
dep_names = {name}
edges[pkg_name].update(dep_names)
if not transitive:
continue
for dep_name in dep_names:
if dep_name in edges:
continue
if not self._is_possible(pkg_name=dep_name):
continue
stack.append(dep_name)
real_packages = set(edges)
if not transitive:
# We exit early, so add children from the edges information
for root, children in edges.items():
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
virtuals.update(self.runtime_virtuals)
real_packages = real_packages | self.runtime_pkgs
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
stack = []
for current_spec in specs:
if isinstance(current_spec, str):
current_spec = spack.spec.Spec(current_spec)
if self.repo.is_virtual(current_spec.name):
stack.extend([p.name for p in self.providers_for(current_spec.name)])
continue
stack.append(current_spec.name)
return sorted(set(stack))
def _has_deptypes(self, dependencies, *, allowed_deps: dt.DepFlag, strict: bool) -> bool:
if strict is True:
return any(
dep.depflag == allowed_deps for deplist in dependencies.values() for dep in deplist
)
return any(
dep.depflag & allowed_deps for deplist in dependencies.values() for dep in deplist
)
def _is_possible(self, *, pkg_name):
try:
return self.is_allowed_on_this_platform(pkg_name=pkg_name) and self.can_be_installed(
pkg_name=pkg_name
)
except spack.repo.UnknownPackageError:
return False
class StaticAnalysis(NoStaticAnalysis):
"""Performs some static analysis of the configuration, store, etc. to provide more precise
answers on whether some packages can be installed, or used as a provider.
It increases the setup time, but might decrease the grounding and solve time considerably,
especially when requirements restrict the possible choices for providers.
"""
def __init__(
self,
*,
configuration: spack.config.Configuration,
repo: spack.repo.RepoPath,
store: spack.store.Store,
binary_index: spack.binary_distribution.BinaryCacheIndex,
):
super().__init__(configuration=configuration, repo=repo)
self.store = store
self.binary_index = binary_index
@lang.memoized
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
candidates = super().providers_for(virtual_str)
result = []
for spec in candidates:
if not self._is_provider_candidate(pkg_name=spec.name, virtual=virtual_str):
continue
result.append(spec)
return result
@lang.memoized
def buildcache_specs(self) -> List[spack.spec.Spec]:
self.binary_index.update()
return self.binary_index.get_all_built_specs()
@lang.memoized
def can_be_installed(self, *, pkg_name) -> bool:
if self.configuration.get(f"packages:{pkg_name}:buildable", True):
return True
if self.configuration.get(f"packages:{pkg_name}:externals", []):
return True
reuse = self.configuration.get("concretizer:reuse")
if reuse is not False and self.store.db.query(pkg_name):
return True
if reuse is not False and any(x.name == pkg_name for x in self.buildcache_specs()):
return True
tty.debug(f"[{__name__}] {pkg_name} cannot be installed")
return False
@lang.memoized
def _is_provider_candidate(self, *, pkg_name: str, virtual: str) -> bool:
if not self.is_allowed_on_this_platform(pkg_name=pkg_name):
return False
if not self.can_be_installed(pkg_name=pkg_name):
return False
virtual_spec = spack.spec.Spec(virtual)
if self.unreachable(pkg_name=virtual_spec.name, when_spec=pkg_name):
tty.debug(f"[{__name__}] {pkg_name} cannot be a provider for {virtual}")
return False
return True
@lang.memoized
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
candidates = self.configuration.get(f"packages:{pkg_name}:require", [])
if not candidates and pkg_name != "all":
return self.unreachable(pkg_name="all", when_spec=when_spec)
if not candidates:
return False
if isinstance(candidates, str):
candidates = [candidates]
union_requirement = spack.spec.Spec()
for c in candidates:
if not isinstance(c, str):
continue
try:
union_requirement.constrain(c)
except SpackError:
# Less optimized, but shouldn't fail
pass
if not union_requirement.intersects(when_spec):
return True
return False
def create_graph_analyzer() -> PossibleDependencyGraph:
static_analysis = spack.config.CONFIG.get("concretizer:static_analysis", False)
if static_analysis:
return StaticAnalysis(
configuration=spack.config.CONFIG,
repo=spack.repo.PATH,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
)
return NoStaticAnalysis(configuration=spack.config.CONFIG, repo=spack.repo.PATH)
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
self.possible_graph = possible_graph
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: Set[str] = set()
self._possible_virtuals: Set[str] = {
x.name for x in specs if spack.repo.PATH.is_virtual(x.name)
}
def possible_dependencies(self) -> Set[str]:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self) -> None:
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self) -> None:
self._possible_dependencies, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
super().__init__(specs, tests, possible_graph)
self._link_run: Set[str] = set()
self._direct_build: Set[str] = set()
self._total_build: Set[str] = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self) -> None:
self._link_run, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.link_run_types
)
self._possible_virtuals.update(virtuals)
self._link_run_virtuals.update(virtuals)
for x in self._link_run:
reals, virtuals, _ = self.possible_graph.possible_dependencies(
x, allowed_deps=dt.BUILD, transitive=False, strict_depflag=True
)
self._possible_virtuals.update(virtuals)
self._direct_build.update(reals)
self._total_build, virtuals, _ = self.possible_graph.possible_dependencies(
*self._direct_build, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
def create_counter(
specs: List[spack.spec.Spec], tests: bool, possible_graph: PossibleDependencyGraph
) -> Counter:
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
return NoDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)

View File

@@ -230,7 +230,7 @@ def ensure_modern_format_string(fmt: str) -> None:
def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
if isinstance(name, archspec.cpu.Microarchitecture):
return name
return archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
return archspec.cpu.TARGETS.get(name) or archspec.cpu.generic_microarchitecture(name)
@lang.lazy_lexicographic_ordering
@@ -238,14 +238,23 @@ class ArchSpec:
"""Aggregate the target platform, the operating system and the target microarchitecture."""
@staticmethod
def default_arch():
"""Return the default architecture"""
def _return_arch(os_tag, target_tag):
platform = spack.platforms.host()
default_os = platform.default_operating_system()
default_target = platform.default_target()
default_os = platform.operating_system(os_tag)
default_target = platform.target(target_tag)
arch_tuple = str(platform), str(default_os), str(default_target)
return ArchSpec(arch_tuple)
@staticmethod
def default_arch():
"""Return the default architecture"""
return ArchSpec._return_arch("default_os", "default_target")
@staticmethod
def frontend_arch():
"""Return the frontend architecture"""
return ArchSpec._return_arch("frontend", "frontend")
__slots__ = "_platform", "_os", "_target"
def __init__(self, spec_or_platform_tuple=(None, None, None)):
@@ -458,10 +467,10 @@ def _target_constrain(self, other: "ArchSpec") -> bool:
if not other._target_satisfies(self, strict=False):
raise UnsatisfiableArchitectureSpecError(self, other)
if self.target_concrete:
if self._target_concrete:
return False
elif other.target_concrete:
elif other._target_concrete:
self.target = other.target
return True
@@ -476,8 +485,8 @@ def _target_constrain(self, other: "ArchSpec") -> bool:
self.target = intersection_target
return True
def _target_intersection(self, other):
results = []
def _target_intersection(self, other: "ArchSpec") -> List[str]:
results: List[str] = []
if not self.target or not other.target:
return results
@@ -584,23 +593,23 @@ def constrain(self, other: "ArchSpec") -> bool:
return constrained
def copy(self):
def copy(self) -> "ArchSpec":
"""Copy the current instance and returns the clone."""
return ArchSpec(self)
@property
def concrete(self):
"""True if the spec is concrete, False otherwise"""
return self.platform and self.os and self.target and self.target_concrete
return self.platform and self.os and self.target and self._target_concrete
@property
def target_concrete(self):
def _target_concrete(self) -> bool:
"""True if the target is not a range or list."""
return (
self.target is not None and ":" not in str(self.target) and "," not in str(self.target)
)
def to_dict(self):
def to_dict(self) -> dict:
# Generic targets represent either an architecture family (like x86_64)
# or a custom micro-architecture
if self.target.vendor == "generic":
@@ -612,7 +621,7 @@ def to_dict(self):
return {"arch": {"platform": self.platform, "platform_os": self.os, "target": target_data}}
@staticmethod
def from_dict(d):
def from_dict(d: dict) -> "ArchSpec":
"""Import an ArchSpec from raw YAML/JSON data"""
arch = d["arch"]
target_name = arch["target"]
@@ -622,13 +631,12 @@ def from_dict(d):
return ArchSpec((arch["platform"], arch["platform_os"], target))
def __str__(self):
return "%s-%s-%s" % (self.platform, self.os, self.target)
return f"{self.platform}-{self.os}-{self.target}"
def __repr__(self):
fmt = "ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))"
return fmt.format(self, str(self.target))
return f"ArchSpec(({self.platform!r}, {self.os!r}, {str(self.target)!r}))"
def __contains__(self, string):
def __contains__(self, string) -> bool:
return string in str(self) or string in self.target
@@ -1337,20 +1345,14 @@ class SpecBuildInterface(lang.ObjectWrapper):
"command", default_handler=_command_default_handler, _indirect=True
)
def __init__(
self,
spec: "Spec",
name: str,
query_parameters: List[str],
_parent: "Spec",
is_virtual: bool,
):
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters, _parent, is_virtual
self.token = original_spec, name, query_parameters, _parent
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
@@ -1533,8 +1535,9 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
self._external_path = external_path
self.external_modules = Spec._format_module_list(external_modules)
# This attribute is used to store custom information for external specs.
self.extra_attributes: dict = {}
# This attribute is used to store custom information for
# external specs. None signal that it was not set yet.
self.extra_attributes = None
# This attribute holds the original build copy of the spec if it is
# deployed differently than it was built. None signals that the spec
@@ -1911,22 +1914,10 @@ def package_class(self):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
warnings.warn(
"`Spec.package_class` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.get_pkg_class(spec.fullname) instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.get_pkg_class(self.fullname)
@property
def virtual(self):
warnings.warn(
"`Spec.virtual` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.is_virtual(spec.name)` instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.is_virtual(self.name)
@property
@@ -2368,10 +2359,15 @@ def to_node_dict(self, hash=ht.dag_hash):
)
if self.external:
if self.extra_attributes:
extra_attributes = syaml.sorted_dict(self.extra_attributes)
else:
extra_attributes = None
d["external"] = {
"path": self.external_path,
"module": self.external_modules or None,
"extra_attributes": syaml.sorted_dict(self.extra_attributes),
"module": self.external_modules,
"extra_attributes": extra_attributes,
}
if not self._concrete:
@@ -2826,6 +2822,24 @@ def from_detection(
s.extra_attributes = extra_attributes
return s
def validate_detection(self):
"""Validate the detection of an external spec.
This method is used as part of Spack's detection protocol, and is
not meant for client code use.
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
self
)
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2864,7 +2878,7 @@ def inject_patches_variant(root):
# Add any patches from the package to the spec.
patches = set()
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
for patch in patch_list:
patches.add(patch)
@@ -2877,7 +2891,7 @@ def inject_patches_variant(root):
if dspec.spec.concrete:
continue
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
pkg_deps = dspec.parent.package_class.dependencies
patches = []
for cond, deps_by_name in pkg_deps.items():
@@ -3084,7 +3098,7 @@ def validate_or_raise(self):
# FIXME: raise just the first one encountered
for spec in self.traverse():
# raise an UnknownPackageError if the spec's package isn't real.
if spec.name and not spack.repo.PATH.is_virtual(spec.name):
if (not spec.virtual) and spec.name:
spack.repo.PATH.get_pkg_class(spec.fullname)
# validate compiler in addition to the package name.
@@ -3093,7 +3107,7 @@ def validate_or_raise(self):
raise UnsupportedCompilerError(spec.compiler.name)
# Ensure correctness of variants (if the spec is not virtual)
if not spack.repo.PATH.is_virtual(spec.name):
if not spec.virtual:
Spec.ensure_valid_variants(spec)
substitute_abstract_variants(spec)
@@ -3111,7 +3125,7 @@ def ensure_valid_variants(spec):
if spec.concrete:
return
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spec.package_class
pkg_variants = pkg_cls.variant_names()
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
@@ -3328,9 +3342,7 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
self_virtual = spack.repo.PATH.is_virtual(self.name)
other_virtual = spack.repo.PATH.is_virtual(other.name)
if self_virtual and other_virtual:
if self.virtual and other.virtual:
# Two virtual specs intersect only if there are providers for both
lhs = spack.repo.PATH.providers_for(str(self))
rhs = spack.repo.PATH.providers_for(str(other))
@@ -3338,8 +3350,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return bool(intersection)
# A provider can satisfy a virtual dependency.
elif self_virtual or other_virtual:
virtual_spec, non_virtual_spec = (self, other) if self_virtual else (other, self)
elif self.virtual or other.virtual:
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
@@ -3444,9 +3456,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency.
if not spack.repo.PATH.is_virtual(self.name) and spack.repo.PATH.is_virtual(
other.name
):
if not self.virtual and other.virtual:
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
@@ -3514,7 +3524,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
if rhs_edge.spec.virtual:
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
if not rhs_edge.virtuals:
@@ -3560,7 +3570,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spack.repo.PATH.is_virtual(spec.name)]
return [spec for spec in self.traverse() if spec.virtual]
@property # type: ignore[misc] # decorated prop not supported in mypy
def patches(self):
@@ -3760,16 +3770,21 @@ def __getitem__(self, name: str):
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
# and prefer matches closest to the root.
try:
edge = next((e for e in order() if e.spec.name == name or name in e.virtuals))
child: Spec = next(
e.spec
for e in itertools.chain(
(e for e in order() if e.spec.name == name or name in e.virtuals),
# for historical reasons
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
)
)
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
)
return SpecBuildInterface(child, name, query_parameters, _parent=self)
return edge.spec
return child
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.
@@ -3854,13 +3869,6 @@ def _cmp_iter(self):
for item in self._cmp_node():
yield item
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
# two specs can be identical modulo DAG hash, depending on what time they were concretized
# From the perspective of many operation in Spack (database, build cache, etc) a different
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
# serialized before the hash change and one after, are considered different.
yield self.dag_hash() if self.concrete else None
# This needs to be in _cmp_iter so that no specs with different process hashes
# are considered the same by `__hash__` or `__eq__`.
#
@@ -4704,7 +4712,7 @@ def concrete(self):
bool: True or False
"""
return self.spec._concrete or all(
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
v in self for v in self.spec.package_class.variant_names()
)
def copy(self) -> "VariantMap":
@@ -4726,10 +4734,7 @@ def __str__(self):
bool_keys = []
kv_keys = []
for key in sorted_keys:
if isinstance(self[key].value, bool):
bool_keys.append(key)
else:
kv_keys.append(key)
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants.
string = io.StringIO()
@@ -4764,14 +4769,14 @@ def substitute_abstract_variants(spec: Spec):
elif name in vt.reserved_names:
continue
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name)
variant_defs = spec.package_class.variant_definitions(name)
valid_defs = []
for when, vdef in variant_defs:
if when.intersects(spec):
valid_defs.append(vdef)
if not valid_defs:
if name not in spack.repo.PATH.get_pkg_class(spec.fullname).variant_names():
if name not in spec.package_class.variant_names():
unknown.append(name)
else:
whens = [str(when) for when, _ in variant_defs]
@@ -4843,9 +4848,7 @@ def reconstruct_virtuals_on_edges(spec):
possible_virtuals = set()
for node in spec.traverse():
try:
possible_virtuals.update(
{x for x in node.package.dependencies if spack.repo.PATH.is_virtual(x)}
)
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue
@@ -4910,7 +4913,7 @@ def from_node_dict(cls, node):
spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
spec.extra_attributes = node["external"].get("extra_attributes") or {}
spec.extra_attributes = node["external"].get("extra_attributes", {})
# specs read in are concrete unless marked abstract
if node.get("concrete", True):
@@ -5187,10 +5190,12 @@ def get_host_environment_metadata() -> Dict[str, str]:
def get_host_environment() -> Dict[str, Any]:
"""Returns a dictionary with host information (not including the os.environ)."""
"""Return a dictionary (lookup) with host information (not including the
os.environ).
"""
host_platform = spack.platforms.host()
host_target = host_platform.default_target()
host_os = host_platform.default_operating_system()
host_target = host_platform.target("default_target")
host_os = host_platform.operating_system("default_os")
arch_fmt = "platform={0} os={1} target={2}"
arch_spec = Spec(arch_fmt.format(host_platform, host_os, host_target))
return {

View File

@@ -60,7 +60,8 @@ def test_user_input_combination(config, target_str, os_str):
"""Test for all the valid user input combinations that both the target and
the operating system match.
"""
spec = Spec(f"libelf os={os_str} target={target_str}")
spec_str = "libelf os={} target={}".format(os_str, target_str)
spec = Spec(spec_str)
assert spec.architecture.os == str(TEST_PLATFORM.operating_system(os_str))
assert spec.architecture.target == TEST_PLATFORM.target(target_str)
@@ -70,8 +71,8 @@ def test_default_os_and_target(default_mock_concretization):
after concretization.
"""
spec = default_mock_concretization("libelf")
assert spec.architecture.os == str(TEST_PLATFORM.default_operating_system())
assert spec.architecture.target == TEST_PLATFORM.default_target()
assert spec.architecture.os == str(TEST_PLATFORM.operating_system("default_os"))
assert spec.architecture.target == TEST_PLATFORM.target("default_target")
def test_operating_system_conversion_to_dict():

View File

@@ -1,10 +1,8 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os
import subprocess
from urllib.error import HTTPError
import pytest
@@ -17,7 +15,6 @@
import spack.paths as spack_paths
import spack.repo as repo
import spack.util.git
from spack.test.conftest import MockHTTPResponse
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@@ -165,8 +162,38 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key)
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
class FakeWebResponder:
def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
return self
def getcode(self):
return self._resp_code
def read(self, length=None):
if len(self._content) <= 0:
return None
if not self._read[-1]:
return_content = self._content[-1]
if length:
self._read[-1] = True
else:
self._read.pop()
self._content.pop()
return return_content
self._read.pop()
self._content.pop()
return None
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro")
@@ -174,13 +201,10 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch):
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
)
def _urlopen_OK(*args, **kwargs):
with open(test_artifacts_path, "rb") as f:
return MockHTTPResponse(
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
)
with open(test_artifacts_path, "rb") as fd:
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
ci.download_and_extract_artifacts(url, working_dir)
@@ -190,11 +214,7 @@ def _urlopen_OK(*args, **kwargs):
found_install = fs.find(working_dir, "install.sh")
assert len(found_install) == 1
def _urlopen_500(*args, **kwargs):
raise HTTPError(url, 500, "Internal Server Error", {}, None)
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
fake_responder._resp_code = 400
with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir)
@@ -308,14 +328,16 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
e1.add("hypre")
e1.concretize()
# Concretizing the above environment results in the following graphs:
"""
Concretizing the above environment results in the following graphs:
# mpileaks -> mpich (provides mpi virtual dep of mpileaks)
# -> callpath -> dyninst -> libelf
# -> libdwarf -> libelf
# -> mpich (provides mpi dep of callpath)
mpileaks -> mpich (provides mpi virtual dep of mpileaks)
-> callpath -> dyninst -> libelf
-> libdwarf -> libelf
-> mpich (provides mpi dep of callpath)
# hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
"""
touched = ["libdwarf"]

View File

@@ -329,14 +329,14 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
f"""\
spack:
specs:
- dependent-install
- flatten-deps
mirrors:
buildcache-destination: {tmp_path / 'ci-mirror'}
ci:
pipeline-gen:
- submapping:
- match:
- dependent-install
- flatten-deps
build-job:
tags:
- donotcare
@@ -355,12 +355,12 @@ def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environme
assert "stage" in ci_obj
assert ci_obj["stage"] == "stage-0"
found.append("dependency-install")
if "dependent-install" in ci_key:
if "flatten-deps" in ci_key:
assert "stage" in ci_obj
assert ci_obj["stage"] == "stage-1"
found.append("dependent-install")
found.append("flatten-deps")
assert "dependent-install" in found
assert "flatten-deps" in found
assert "dependency-install" in found
@@ -372,14 +372,14 @@ def test_ci_generate_for_pr_pipeline(ci_generate_test, tmp_path, monkeypatch):
f"""\
spack:
specs:
- dependent-install
- flatten-deps
mirrors:
buildcache-destination: {tmp_path / 'ci-mirror'}
ci:
pipeline-gen:
- submapping:
- match:
- dependent-install
- flatten-deps
build-job:
tags:
- donotcare
@@ -899,7 +899,7 @@ def test_ci_generate_override_runner_attrs(
f"""\
spack:
specs:
- dependent-install
- flatten-deps
- pkg-a
mirrors:
buildcache-destination: {tmp_path / "ci-mirror"}
@@ -908,7 +908,7 @@ def test_ci_generate_override_runner_attrs(
- match_behavior: {match_behavior}
submapping:
- match:
- dependent-install
- flatten-deps
build-job:
tags:
- specific-one
@@ -1006,8 +1006,8 @@ def test_ci_generate_override_runner_attrs(
assert the_elt["script"][0] == "main step"
assert len(the_elt["after_script"]) == 1
assert the_elt["after_script"][0] == "post step one"
if "dependent-install" in ci_key:
# The dependent-install match specifies that we keep the two
if "flatten-deps" in ci_key:
# The flatten-deps match specifies that we keep the two
# top level variables, but add a third specifc one. It
# also adds a custom tag which should be combined with
# the top-level tag.
@@ -1182,12 +1182,12 @@ def test_ci_generate_read_broken_specs_url(
spec_a = spack.concretize.concretize_one("pkg-a")
a_dag_hash = spec_a.dag_hash()
spec_flattendeps = spack.concretize.concretize_one("dependent-install")
spec_flattendeps = spack.concretize.concretize_one("flatten-deps")
flattendeps_dag_hash = spec_flattendeps.dag_hash()
broken_specs_url = tmp_path.as_uri()
# Mark 'a' as broken (but not 'dependent-install')
# Mark 'a' as broken (but not 'flatten-deps')
broken_spec_a_url = "{0}/{1}".format(broken_specs_url, a_dag_hash)
job_stack = "job_stack"
a_job_url = "a_job_url"
@@ -1201,7 +1201,7 @@ def test_ci_generate_read_broken_specs_url(
f"""\
spack:
specs:
- dependent-install
- flatten-deps
- pkg-a
mirrors:
buildcache-destination: {(tmp_path / "ci-mirror").as_uri()}
@@ -1211,7 +1211,7 @@ def test_ci_generate_read_broken_specs_url(
- submapping:
- match:
- pkg-a
- dependent-install
- flatten-deps
- pkg-b
- dependency-install
build-job:
@@ -1234,7 +1234,7 @@ def test_ci_generate_read_broken_specs_url(
)
assert expected in output
not_expected = f"dependent-install/{flattendeps_dag_hash[:7]} (in stack"
not_expected = f"flatten-deps/{flattendeps_dag_hash[:7]} (in stack"
assert not_expected not in output
@@ -1447,7 +1447,7 @@ def test_gitlab_config_scopes(ci_generate_test, tmp_path):
include: [{configs_path}]
view: false
specs:
- dependent-install
- flatten-deps
mirrors:
buildcache-destination: {tmp_path / "ci-mirror"}
ci:

View File

@@ -51,8 +51,8 @@ def test_create_db_tarball(tmpdir, database):
def test_report():
out = debug("report")
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
host_target = host_platform.default_target()
host_os = host_platform.operating_system("frontend")
host_target = host_platform.target("frontend")
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
assert spack.get_version() in out

View File

@@ -24,24 +24,32 @@
mpi_deps = ["fake"]
@pytest.mark.parametrize(
"cli_args,expected",
[
(["mpileaks"], set(["callpath"] + mpis)),
(
["--transitive", "mpileaks"],
set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps),
),
(["--transitive", "--deptype=link,run", "dtbuild1"], {"dtlink2", "dtrun2"}),
(["--transitive", "--deptype=build", "dtbuild1"], {"dtbuild2", "dtlink2"}),
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}),
],
)
def test_direct_dependencies(cli_args, expected, mock_runtimes):
out = dependencies(*cli_args)
result = set(re.split(r"\s+", out.strip()))
expected.update(mock_runtimes)
assert expected == result
def test_direct_dependencies(mock_packages):
out = dependencies("mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath"] + mpis)
assert expected == actual
def test_transitive_dependencies(mock_packages):
out = dependencies("--transitive", "mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
assert expected == actual
def test_transitive_dependencies_with_deptypes(mock_packages):
out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2", "dtrun2"]) == deps
out = dependencies("--transitive", "--deptype=build", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtbuild2", "dtlink2"]) == deps
out = dependencies("--transitive", "--deptype=link", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2"]) == deps
@pytest.mark.db

View File

@@ -1038,58 +1038,6 @@ def test_init_from_yaml(environment_from_manifest):
assert not e2.specs_by_hash
def test_init_from_yaml_relative_includes(tmp_path):
files = [
"relative_copied/packages.yaml",
"./relative_copied/compilers.yaml",
"repos.yaml",
"./config.yaml",
]
manifest = f"""
spack:
specs: []
include: {files}
"""
e1_path = tmp_path / "e1"
e1_manifest = e1_path / "spack.yaml"
fs.mkdirp(e1_path)
with open(e1_manifest, "w", encoding="utf-8") as f:
f.write(manifest)
for f in files:
fs.touchp(e1_path / f)
e2 = _env_create("test2", init_file=e1_manifest)
for f in files:
assert os.path.exists(os.path.join(e2.path, f))
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
files = ["../outside_env_not_copied/repos.yaml"]
manifest = f"""
spack:
specs: []
include: {files}
"""
# subdir to ensure parent of environment dir is not shared
e1_path = tmp_path / "e1_subdir" / "e1"
e1_manifest = e1_path / "spack.yaml"
fs.mkdirp(e1_path)
with open(e1_manifest, "w", encoding="utf-8") as f:
f.write(manifest)
for f in files:
fs.touchp(e1_path / f)
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
_ = _env_create("test2", init_file=e1_manifest)
def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
fake_prefix = tmp_path / "a-prefix"
fake_bin = fake_prefix / "bin"
@@ -2666,7 +2614,7 @@ def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
- packages:
- matrix:
- [mpileaks, callpath]
- [target=default_target]
- [target=be]
specs:
- $packages
"""
@@ -2691,7 +2639,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
- packages:
- matrix:
- [mpileaks, callpath]
- [target=default_target]
- [target=be]
specs:
- $packages
"""
@@ -2711,7 +2659,7 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
assert before_user == after_user
mpileaks_spec = Spec("mpileaks target=default_target")
mpileaks_spec = Spec("mpileaks target=be")
assert mpileaks_spec in before_conc
assert mpileaks_spec not in after_conc

View File

@@ -139,7 +139,7 @@ def test_gc_except_specific_environments(mutable_database, mutable_mock_env_path
def test_gc_except_nonexisting_dir_env(mutable_database, mutable_mock_env_path, tmpdir):
output = gc("-ye", tmpdir.strpath, fail_on_error=False)
assert "No such environment" in output
assert gc.returncode == 1
gc.returncode == 1
@pytest.mark.db

View File

@@ -26,9 +26,9 @@ def test_manpath_trailing_colon(
else ("--sh", "export %s=%s", ";")
)
# Test that the commands generated by load add the MANPATH prefix
# inspections. Also test that Spack correctly preserves the default/existing
# manpath search path via a trailing colon
"""Test that the commands generated by load add the MANPATH prefix
inspections. Also test that Spack correctly preserves the default/existing
manpath search path via a trailing colon"""
install("mpileaks")
sh_out = load(shell, "mpileaks")
@@ -81,9 +81,7 @@ def extract_value(output, variable):
# Finally, do we list them in topo order?
for i, pkg in enumerate(pkgs):
assert {s.name for s in mpileaks_spec[pkg].traverse(direction="parents")}.issubset(
pkgs[: i + 1]
)
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
# Lastly, do we keep track that mpileaks was loaded?
assert (

View File

@@ -304,8 +304,6 @@ def test_run_import_check(tmp_path: pathlib.Path):
contents = '''
import spack.cmd
import spack.config # do not drop this import because of this comment
import spack.repo
import spack.repo_utils
# this comment about spack.error should not be removed
class Example(spack.build_systems.autotools.AutotoolsPackage):
@@ -316,7 +314,6 @@ def foo(config: "spack.error.SpackError"):
# the type hint is quoted, so it should not be removed
spack.util.executable.Executable("example")
print(spack.__version__)
print(spack.repo_utils.__file__)
'''
file.write_text(contents)
root = str(tmp_path)
@@ -332,7 +329,6 @@ def foo(config: "spack.error.SpackError"):
output = output_buf.getvalue()
assert "issues.py: redundant import: spack.cmd" in output
assert "issues.py: redundant import: spack.repo" in output
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
assert "issues.py: missing import: spack" in output # used by spack.__version__
assert "issues.py: missing import: spack.build_systems.autotools" in output

View File

@@ -148,6 +148,7 @@ def current_host(request, monkeypatch):
cpu, _, is_preference = request.param.partition("-")
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
if not is_preference:
target = archspec.cpu.TARGETS[cpu]
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
@@ -384,11 +385,10 @@ def test_different_compilers_get_different_flags(
):
"""Tests that nodes get the flags of the associated compiler."""
mutable_config.set("compilers", [clang12_with_flags, gcc11_with_flags])
t = archspec.cpu.host().family
client = spack.concretize.concretize_one(
Spec(
f"cmake-client %gcc@11.1.0 platform=test os=redhat6 target={t}"
f" ^cmake %clang@12.2.0 platform=test os=redhat6 target={t}"
"cmake-client %gcc@11.1.0 platform=test os=fe target=fe"
" ^cmake %clang@12.2.0 platform=test os=fe target=fe"
)
)
cmake = client["cmake"]
@@ -413,8 +413,7 @@ def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
mutable_config.set("compilers", [clang12_with_flags])
# Correct arch to use test compiler that has flags
t = archspec.cpu.host().family
spec = Spec(f"pkg-a %clang@12.2.0 platform=test os=redhat6 target={t}")
spec = Spec("pkg-a %clang@12.2.0 platform=test os=fe target=fe")
# Get the compiler that matches the spec (
compiler = spack.compilers.compiler_for_spec("clang@=12.2.0", spec.architecture)
@@ -1243,7 +1242,7 @@ def test_transitive_conditional_virtual_dependency(self, mutable_config):
def test_conditional_provides_or_depends_on(self):
# Check that we can concretize correctly a spec that can either
# provide a virtual or depend on it based on the value of a variant
s = spack.concretize.concretize_one("v1-consumer ^conditional-provider +disable-v1")
s = spack.concretize.concretize_one("conditional-provider +disable-v1")
assert "v1-provider" in s
assert s["v1"].name == "v1-provider"
assert s["v2"].name == "conditional-provider"
@@ -2112,15 +2111,14 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
def test_require_targets_are_allowed(self, mutable_database):
"""Test that users can set target constraints under the require attribute."""
# Configuration to be added to packages.yaml
required_target = archspec.cpu.TARGETS[spack.platforms.test.Test.default].family
external_conf = {"all": {"require": f"target={required_target}"}}
external_conf = {"all": {"require": "target=%s" % spack.platforms.test.Test.front_end}}
spack.config.set("packages", external_conf)
with spack.config.override("concretizer:reuse", False):
spec = spack.concretize.concretize_one("mpich")
for s in spec.traverse():
assert s.satisfies(f"target={required_target}")
assert s.satisfies("target=%s" % spack.platforms.test.Test.front_end)
def test_external_python_extensions_have_dependency(self):
"""Test that python extensions have access to a python dependency

View File

@@ -1,6 +1,17 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pathlib
import pytest
import spack.concretize
import spack.config
import spack.environment as ev
import spack.paths
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
"""
These tests include the following package DAGs:
@@ -31,18 +42,6 @@
y
"""
import pathlib
import pytest
import spack.concretize
import spack.config
import spack.environment as ev
import spack.paths
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
@pytest.fixture
def test_repo(mutable_config, monkeypatch, mock_stage):

View File

@@ -259,7 +259,7 @@ def test_develop(self):
def test_external_mpi(self):
# make sure this doesn't give us an external first.
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(
@@ -293,7 +293,7 @@ def mock_module(cmd, module):
monkeypatch.setattr(spack.util.module_cmd, "module", mock_module)
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib
import pytest
@@ -181,7 +182,7 @@ def test_requirement_adds_version_satisfies(
# Sanity check: early version of T does not include U
s0 = spack.concretize.concretize_one("t@2.0")
assert "u" not in s0
assert not ("u" in s0)
conf_str = """\
packages:
@@ -199,11 +200,11 @@ def test_requirement_adds_version_satisfies(
@pytest.mark.parametrize("require_checksum", (True, False))
def test_requirement_adds_git_hash_version(
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
):
# A full commit sha is a checksummed version, so this test should pass in both cases
if require_checksum:
monkeypatch.setenv("SPACK_CONCRETIZER_REQUIRE_CHECKSUM", "yes")
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr(

View File

@@ -408,7 +408,7 @@ def test_substitute_config_variables(mock_low_high_config, monkeypatch):
os.path.join("foo", "$platform", "bar")
) == os.path.abspath(os.path.join("foo", "test", "bar"))
host_target = spack.platforms.host().default_target()
host_target = spack.platforms.host().target("default_target")
host_target_family = str(host_target.family)
assert spack_path.canonicalize_path(
os.path.join("foo", "$target_family", "bar")

View File

@@ -621,7 +621,7 @@ def linux_os():
platform = spack.platforms.host()
name, version = "debian", "6"
if platform.name == "linux":
current_os = platform.default_operating_system()
current_os = platform.operating_system("default_os")
name, version = current_os.name, current_os.version
LinuxOS = collections.namedtuple("LinuxOS", ["name", "version"])
return LinuxOS(name=name, version=version)
@@ -680,6 +680,7 @@ def load_json():
def mock_targets(mock_uarch_configuration, monkeypatch):
"""Use this fixture to enable mock uarch targets for testing."""
targets_json, targets = mock_uarch_configuration
monkeypatch.setattr(archspec.cpu.schema, "TARGETS_JSON", targets_json)
monkeypatch.setattr(archspec.cpu.microarchitecture, "TARGETS", targets)
@@ -2171,8 +2172,3 @@ def getcode(self):
def info(self):
return self.headers
@pytest.fixture()
def mock_runtimes(config, mock_packages):
return mock_packages.packages_with_tags("runtime")

View File

@@ -13,8 +13,6 @@
import pytest
import archspec.cpu
import spack
import spack.cmd
import spack.cmd.external
@@ -100,8 +98,11 @@ def spec_json(self):
@pytest.fixture
def _common_arch(test_platform):
generic = archspec.cpu.TARGETS[test_platform.default].family
return JsonArchEntry(platform=test_platform.name, os="redhat6", target=generic.name)
return JsonArchEntry(
platform=test_platform.name,
os=test_platform.front_os,
target=test_platform.target("fe").name,
)
@pytest.fixture

View File

@@ -206,7 +206,7 @@ def test_repo(_create_test_repo, monkeypatch, mock_stage):
)
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
spec = spack.spec.Spec(spec_str)
assert spack.repo.PATH.get_pkg_class(spec.fullname).redistribute_source(spec) == distribute_src
assert spec.package_class.redistribute_source(spec) == distribute_src
concretized_spec = spack.concretize.concretize_one(spec)
assert concretized_spec.package.redistribute_binary == distribute_bin

View File

@@ -198,6 +198,17 @@ def test_installed_dependency_request_conflicts(install_mockery, mock_fetch, mut
spack.concretize.concretize_one(dependent)
def test_install_dependency_symlinks_pkg(install_mockery, mock_fetch, mutable_mock_repo):
"""Test dependency flattening/symlinks mock package."""
spec = spack.concretize.concretize_one("flatten-deps")
pkg = spec.package
PackageInstaller([pkg], explicit=True).install()
# Ensure dependency directory exists after the installation.
dependency_dir = os.path.join(pkg.prefix, "dependency-install")
assert os.path.isdir(dependency_dir)
def test_install_times(install_mockery, mock_fetch, mutable_mock_repo):
"""Test install times added."""
spec = spack.concretize.concretize_one("dev-build-test-install-phases")
@@ -217,6 +228,26 @@ def test_install_times(install_mockery, mock_fetch, mutable_mock_repo):
assert all(isinstance(x["seconds"], float) for x in times["phases"])
def test_flatten_deps(install_mockery, mock_fetch, mutable_mock_repo):
"""Explicitly test the flattening code for coverage purposes."""
# Unfortunately, executing the 'flatten-deps' spec's installation does
# not affect code coverage results, so be explicit here.
spec = spack.concretize.concretize_one("dependent-install")
pkg = spec.package
PackageInstaller([pkg], explicit=True).install()
# Demonstrate that the directory does not appear under the spec
# prior to the flatten operation.
dependency_name = "dependency-install"
assert dependency_name not in os.listdir(pkg.prefix)
# Flatten the dependencies and ensure the dependency directory is there.
spack.package_base.flatten_dependencies(spec, pkg.prefix)
dependency_dir = os.path.join(pkg.prefix, dependency_name)
assert os.path.isdir(dependency_dir)
@pytest.fixture()
def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
"""Provides a function that installs a specified set of specs to an
@@ -458,7 +489,7 @@ def test_log_install_without_build_files(install_mockery):
spec = spack.concretize.concretize_one("trivial-install-test-package")
# Attempt installing log without the build log file
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
spack.installer.log(spec.package)

View File

@@ -470,7 +470,7 @@ def _repoerr(repo, name):
# The call to install_tree will raise the exception since not mocking
# creation of dependency package files within *install* directories.
with pytest.raises(OSError, match=path if sys.platform != "win32" else ""):
with pytest.raises(IOError, match=path if sys.platform != "win32" else ""):
inst.dump_packages(spec, path)
# Now try the error path, which requires the mock directory structure

View File

@@ -82,7 +82,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.copy("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
@@ -139,7 +139,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.install("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
@@ -220,7 +220,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.copy_tree("source/none", "dest")
def test_parent_dir(self, stage):
@@ -301,7 +301,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.install_tree("source/none", "dest")
def test_parent_dir(self, stage):

View File

@@ -93,26 +93,28 @@
pass
#: This is a list of filesystem locations to test locks in. Paths are
#: expanded so that %u is replaced with the current username. '~' is also
#: legal and will be expanded to the user's home directory.
#:
#: Tests are skipped for directories that don't exist, so you'll need to
#: update this with the locations of NFS, Lustre, and other mounts on your
#: system.
"""This is a list of filesystem locations to test locks in. Paths are
expanded so that %u is replaced with the current username. '~' is also
legal and will be expanded to the user's home directory.
Tests are skipped for directories that don't exist, so you'll need to
update this with the locations of NFS, Lustre, and other mounts on your
system.
"""
locations = [
tempfile.gettempdir(),
os.path.join("/nfs/tmp2/", getpass.getuser()),
os.path.join("/p/lscratch*/", getpass.getuser()),
]
#: This is the longest a failed multiproc test will take.
#: Barriers will time out and raise an exception after this interval.
#: In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
"""This is the longest a failed multiproc test will take.
Barriers will time out and raise an exception after this interval.
In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
"""
barrier_timeout = 5
#: This is the lock timeout for expected failures.
#: This may need to be higher for some filesystems.
"""This is the lock timeout for expected failures.
This may need to be higher for some filesystems."""
lock_fail_timeout = 0.1
@@ -284,8 +286,9 @@ def wait(self):
comm.Barrier() # barrier after each MPI test.
#: ``multiproc_test()`` should be called by tests below.
#: ``multiproc_test()`` will work for either MPI runs or for local runs.
"""``multiproc_test()`` should be called by tests below.
``multiproc_test()`` will work for either MPI runs or for local runs.
"""
multiproc_test = mpi_multiproc_test if mpi else local_multiproc_test
@@ -1336,7 +1339,7 @@ def test_poll_lock_exception(tmpdir, monkeypatch, err_num, err_msg):
"""Test poll lock exception handling."""
def _lockf(fd, cmd, len, start, whence):
raise OSError(err_num, err_msg)
raise IOError(err_num, err_msg)
with tmpdir.as_cwd():
lockfile = "lockfile"
@@ -1348,7 +1351,7 @@ def _lockf(fd, cmd, len, start, whence):
if err_num in [errno.EAGAIN, errno.EACCES]:
assert not lock._poll_lock(fcntl.LOCK_EX)
else:
with pytest.raises(OSError, match=err_msg):
with pytest.raises(IOError, match=err_msg):
lock._poll_lock(fcntl.LOCK_EX)
monkeypatch.undo()

View File

@@ -238,7 +238,10 @@ def test_exclude(self, modulefile_content, module_configuration, host_architectu
assert len([x for x in content if "module load " in x]) == 1
with pytest.raises(FileNotFoundError):
# Catch "Exception" to avoid using FileNotFoundError on Python 3
# and IOError on Python 2 or common bases like EnvironmentError
# which are not officially documented
with pytest.raises(Exception):
modulefile_content(f"callpath target={host_architecture_str}")
content = modulefile_content(f"zmpi target={host_architecture_str}")

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test class methods on Package objects.
This doesn't include methods on package *instances* (like do_patch(),
@@ -15,7 +16,6 @@
import llnl.util.filesystem as fs
import spack.binary_distribution
import spack.compilers
import spack.concretize
import spack.deptypes as dt
@@ -23,11 +23,15 @@
import spack.install_test
import spack.package
import spack.package_base
import spack.repo
import spack.spec
import spack.store
from spack.build_systems.generic import Package
from spack.error import InstallError
from spack.solver.input_analysis import NoStaticAnalysis, StaticAnalysis
@pytest.fixture(scope="module")
def mpi_names(mock_repo_path):
return [spec.name for spec in mock_repo_path.providers_for("mpi")]
@pytest.fixture()
@@ -49,94 +53,78 @@ def mpileaks_possible_deps(mock_packages, mpi_names):
return possible
@pytest.fixture(params=[NoStaticAnalysis, StaticAnalysis])
def mock_inspector(config, mock_packages, request):
inspector_cls = request.param
if inspector_cls is NoStaticAnalysis:
return inspector_cls(configuration=config, repo=mock_packages)
return inspector_cls(
configuration=config,
repo=mock_packages,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
expanded_possible_deps = pkg_cls.possible_dependencies(expand_virtuals=True)
assert mpileaks_possible_deps == expanded_possible_deps
assert {
"callpath": {"dyninst", "mpi"},
"dyninst": {"libdwarf", "libelf"},
"libdwarf": {"libelf"},
"libelf": set(),
"mpi": set(),
"mpileaks": {"callpath", "mpi"},
} == pkg_cls.possible_dependencies(expand_virtuals=False)
def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
deps = pkg_cls.possible_dependencies(transitive=False, expand_virtuals=False)
assert {"callpath": set(), "mpi": set(), "mpileaks": {"callpath", "mpi"}} == deps
def test_possible_dependencies_virtual(mock_packages, mpi_names):
expected = dict(
(name, set(dep for dep in spack.repo.PATH.get_pkg_class(name).dependencies_by_name()))
for name in mpi_names
)
# only one mock MPI has a dependency
expected["fake"] = set()
assert expected == spack.package_base.possible_dependencies("mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
pkg_cls = spack.repo.PATH.get_pkg_class("missing-dependency")
missing = {}
pkg_cls.possible_dependencies(transitive=True, missing=missing)
assert {"this-is-a-missing-dependency"} == missing["missing-dependency"]
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.PATH.get_pkg_class("dtbuild1")
assert {
"dtbuild1": {"dtrun2", "dtlink2"},
"dtlink2": set(),
"dtrun2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.LINK | dt.RUN)
assert {
"dtbuild1": {"dtbuild2", "dtlink2"},
"dtbuild2": set(),
"dtlink2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.BUILD)
assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies(
depflag=dt.LINK
)
@pytest.fixture
def mpi_names(mock_inspector):
return [spec.name for spec in mock_inspector.providers_for("mpi")]
@pytest.mark.parametrize(
"pkg_name,fn_kwargs,expected",
[
(
"mpileaks",
{"expand_virtuals": True, "allowed_deps": dt.ALL},
{
"fake",
"mpileaks",
"multi-provider-mpi",
"callpath",
"dyninst",
"mpich2",
"libdwarf",
"zmpi",
"low-priority-provider",
"intel-parallel-studio",
"mpich",
"libelf",
},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL},
{"callpath", "dyninst", "libdwarf", "libelf", "mpileaks"},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL, "transitive": False},
{"callpath", "mpileaks"},
),
("dtbuild1", {"allowed_deps": dt.LINK | dt.RUN}, {"dtbuild1", "dtrun2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.BUILD}, {"dtbuild1", "dtbuild2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.LINK}, {"dtbuild1", "dtlink2"}),
],
)
def test_possible_dependencies(pkg_name, fn_kwargs, expected, mock_runtimes, mock_inspector):
"""Tests possible nodes of mpileaks, under different scenarios."""
expected.update(mock_runtimes)
result, *_ = mock_inspector.possible_dependencies(pkg_name, **fn_kwargs)
assert expected == result
def test_possible_dependencies_virtual(mock_inspector, mock_packages, mock_runtimes, mpi_names):
expected = set(mpi_names)
for name in mpi_names:
expected.update(dep for dep in mock_packages.get_pkg_class(name).dependencies_by_name())
expected.update(mock_runtimes)
real_pkgs, *_ = mock_inspector.possible_dependencies(
"mpi", transitive=False, allowed_deps=dt.ALL
)
assert expected == real_pkgs
def test_possible_dependencies_missing(mock_inspector):
result, *_ = mock_inspector.possible_dependencies("missing-dependency", allowed_deps=dt.ALL)
assert "this-is-a-missing-dependency" not in result
def test_possible_dependencies_with_multiple_classes(
mock_inspector, mock_packages, mpileaks_possible_deps
):
def test_possible_dependencies_with_multiple_classes(mock_packages, mpileaks_possible_deps):
pkgs = ["dt-diamond", "mpileaks"]
expected = set(mpileaks_possible_deps)
expected.update({"dt-diamond", "dt-diamond-left", "dt-diamond-right", "dt-diamond-bottom"})
expected.update(mock_packages.packages_with_tags("runtime"))
expected = mpileaks_possible_deps.copy()
expected.update(
{
"dt-diamond": set(["dt-diamond-left", "dt-diamond-right"]),
"dt-diamond-left": set(["dt-diamond-bottom"]),
"dt-diamond-right": set(["dt-diamond-bottom"]),
"dt-diamond-bottom": set(),
}
)
real_pkgs, *_ = mock_inspector.possible_dependencies(*pkgs, allowed_deps=dt.ALL)
assert set(expected) == real_pkgs
assert expected == spack.package_base.possible_dependencies(*pkgs)
def setup_install_test(source_paths, test_root):

View File

@@ -132,8 +132,7 @@ def test_reporters_extract_skipped(state):
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
assert len(parts) == 1
assert parts[0]["completed"] == spack.reporters.extract.completed["skipped"]
parts[0]["completed"] == expected
def test_reporters_skip_new():

View File

@@ -198,7 +198,7 @@ def script_dir(sbang_line):
],
)
def test_shebang_interpreter_regex(shebang, interpreter):
assert sbang.get_interpreter(shebang) == interpreter
sbang.get_interpreter(shebang) == interpreter
def test_shebang_handling(script_dir, sbang_line):

View File

@@ -319,6 +319,7 @@ def test_dependents_and_dependencies_are_correct(self):
("mpich@1.0", "mpileaks ^mpich@2.0"),
("mpich%gcc", "mpileaks ^mpich%intel"),
("mpich%gcc@4.6", "mpileaks ^mpich%gcc@4.5"),
("mpich platform=test target=be", "mpileaks ^mpich platform=test target=fe"),
],
)
def test_unsatisfiable_cases(self, set_dependency, constraint_str, spec_str):
@@ -428,29 +429,31 @@ def test_copy_through_spec_build_interface(self):
c2 = s["mpileaks"]["mpileaks"].copy()
assert c0 == c1 == c2 == s
# Here is the graph with deptypes labeled (assume all packages have a 'dt'
# prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
# 'link', 'r' for 'run').
"""
Here is the graph with deptypes labeled (assume all packages have a 'dt'
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
'link', 'r' for 'run').
# use -bl-> top
use -bl-> top
# top -b-> build1
# top -bl-> link1
# top -r-> run1
top -b-> build1
top -bl-> link1
top -r-> run1
# build1 -b-> build2
# build1 -bl-> link2
# build1 -r-> run2
build1 -b-> build2
build1 -bl-> link2
build1 -r-> run2
# link1 -bl-> link3
link1 -bl-> link3
# run1 -bl-> link5
# run1 -r-> run3
run1 -bl-> link5
run1 -r-> run3
# link3 -b-> build2
# link3 -bl-> link4
link3 -b-> build2
link3 -bl-> link4
# run3 -b-> build3
run3 -b-> build3
"""
@pytest.mark.parametrize(
"spec_str,deptypes,expected",

View File

@@ -460,6 +460,7 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co
("foo platform=linux", "platform=test os=redhat6 target=x86"),
("foo os=redhat6", "platform=test os=debian6 target=x86_64"),
("foo target=x86_64", "platform=test os=redhat6 target=x86"),
("foo arch=test-frontend-frontend", "platform=test os=frontend target=backend"),
("foo%intel", "%gcc"),
("foo%intel", "%gcc"),
("foo%gcc@4.3", "%gcc@4.4:4.6"),
@@ -486,6 +487,7 @@ def test_concrete_specs_which_satisfies_abstract(self, lhs, rhs, default_mock_co
("libelf+debug", "libelf~debug"),
("libelf+debug~foo", "libelf+debug+foo"),
("libelf debug=True", "libelf debug=False"),
("libelf platform=test target=be os=be", "libelf target=fe os=fe"),
("namespace=builtin.mock", "namespace=builtin"),
],
)
@@ -1989,26 +1991,3 @@ def test_equality_discriminate_on_propagation(lhs, rhs):
def test_comparison_multivalued_variants():
assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c")
def test_comparison_after_breaking_hash_change():
# We simulate a breaking change in DAG hash computation in Spack. We have two specs that are
# entirely equal modulo DAG hash. When deserializing these specs, we don't want them to compare
# as equal, because DAG hash is used throughout in Spack to distinguish between specs
# (e.g. database, build caches, install dir).
s = Spec("example@=1.0")
s._mark_concrete(True)
# compute the dag hash and a change to it
dag_hash = s.dag_hash()
new_dag_hash = f"{'b' if dag_hash[0] == 'a' else 'a'}{dag_hash[1:]}"
before_breakage = s.to_dict()
after_breakage = s.to_dict()
after_breakage["spec"]["nodes"][0]["hash"] = new_dag_hash
assert before_breakage != after_breakage
x = Spec.from_dict(before_breakage)
y = Spec.from_dict(after_breakage)
assert x != y
assert len({x, y}) == 2

View File

@@ -306,7 +306,7 @@ def _specfile_for(spec_str, filename):
(
r"os=fe", # Various translations associated with the architecture
[Token(SpecTokens.KEY_VALUE_PAIR, value="os=fe")],
"arch=test-debian6-None",
"arch=test-redhat6-None",
),
(
r"os=default_os",
@@ -999,14 +999,14 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk
("x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64", "two architectures"),
("y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64", "two architectures"),
("y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le", "two architectures"),
("x os=redhat6 os=debian6", "'os'"),
("x os=debian6 os=redhat6", "'os'"),
("x target=core2 target=x86_64", "'target'"),
("x target=x86_64 target=core2", "'target'"),
("x os=fe os=fe", "'os'"),
("x os=fe os=be", "'os'"),
("x target=fe target=fe", "'target'"),
("x target=fe target=be", "'target'"),
("x platform=test platform=test", "'platform'"),
# TODO: these two seem wrong: need to change how arch is initialized (should fail on os)
("x os=debian6 platform=test target=default_target os=redhat6", "two architectures"),
("x target=default_target platform=test os=redhat6 os=debian6", "'platform'"),
("x os=fe platform=test target=fe os=fe", "'platform'"),
("x target=be platform=test os=be os=fe", "'platform'"),
# Dependencies
("^[@foo] zlib", "edge attributes"),
("x ^[deptypes=link]foo ^[deptypes=run]foo", "conflicting dependency types"),

View File

@@ -125,7 +125,7 @@ def check_expand_archive(stage, stage_name, expected_file_list):
assert os.path.isfile(fn)
with open(fn, encoding="utf-8") as _file:
assert _file.read() == contents
_file.read() == contents
def check_fetch(stage, stage_name):

View File

@@ -20,7 +20,12 @@
datadir = os.path.join(spack_root, "lib", "spack", "spack", "test", "data", "compression")
ext_archive = {ext: f"Foo.{ext}" for ext in llnl.url.ALLOWED_ARCHIVE_TYPES if "TAR" not in ext}
ext_archive = {}
[
ext_archive.update({ext: ".".join(["Foo", ext])})
for ext in llnl.url.ALLOWED_ARCHIVE_TYPES
if "TAR" not in ext
]
# Spack does not use Python native handling for tarballs or zip
# Don't test tarballs or zip in native test
native_archive_list = [

View File

@@ -204,13 +204,13 @@ def test_no_editor():
def assert_exec(exe, args):
assert False
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor("/path/to/file", exec_fn=assert_exec)
def assert_exec(exe, args):
return False
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor("/path/to/file", exec_fn=assert_exec)
@@ -220,5 +220,5 @@ def test_exec_fn_executable(editor_var, good_exe, bad_exe):
assert ed.editor(exec_fn=ed.executable)
os.environ[editor_var] = bad_exe
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor(exec_fn=ed.executable)

View File

@@ -201,15 +201,3 @@ def test_drop_redundant_rpath(tmpdir, binary_with_rpaths):
new_rpaths = elf.get_rpaths(binary)
assert set(existing_dirs).issubset(new_rpaths)
assert set(non_existing_dirs).isdisjoint(new_rpaths)
def test_elf_invalid_e_shnum(tmp_path):
# from llvm/test/Object/Inputs/invalid-e_shnum.elf
path = tmp_path / "invalid-e_shnum.elf"
with open(path, "wb") as file:
file.write(
b"\x7fELF\x02\x010000000000\x03\x00>\x0000000000000000000000"
b"\x00\x00\x00\x00\x00\x00\x00\x000000000000@\x000000"
)
with open(path, "rb") as file, pytest.raises(elf.ElfParsingError):
elf.parse_elf(file)

Some files were not shown because too many files have changed in this diff Show More