Compare commits

..

5 Commits

Author SHA1 Message Date
Wouter Deconinck
474cca3005 python: urls return [None] if not pypi 2024-08-11 09:54:18 -05:00
Wouter Deconinck
868cb442e9 python: urls return [] if not pypi 2024-08-10 17:13:07 -05:00
wdconinc
c0e64718c7 [@spackbot] updating style on behalf of wdconinc 2024-08-10 21:28:42 +00:00
Wouter Deconinck
8b2749f95c python: return urls with also dash -> underscore 2024-08-10 14:16:32 -07:00
Wouter Deconinck
fbfbb9710d package_base: all_urls includes not only first of urls 2024-08-10 14:08:32 -07:00
566 changed files with 8331 additions and 8532 deletions

View File

@@ -113,7 +113,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
uses: docker/build-push-action@16ebe778df0e7752d2cfcbd924afdbbd89c1a755
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -16,27 +16,38 @@ jobs:
matrix:
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
concretizer: ['clingo']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- python-version: '3.11'
os: ubuntu-latest
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.6'
os: ubuntu-20.04
concretizer: clingo
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.8'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.9'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.10'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.11'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
steps:
@@ -74,6 +85,7 @@ jobs:
- name: Run unit tests
env:
SPACK_PYTHON: python
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2
COVERAGE: true
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
@@ -170,6 +182,7 @@ jobs:
- name: Run unit tests (full suite with coverage)
env:
COVERAGE: true
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
@@ -200,6 +213,7 @@ jobs:
brew install dash fish gcc gnupg2 kcov
- name: Run unit tests
env:
SPACK_TEST_SOLVER: clingo
SPACK_TEST_PARALLEL: 4
run: |
git --version

View File

@@ -170,6 +170,23 @@ config:
# If set to true, Spack will use ccache to cache C compiles.
ccache: false
# The concretization algorithm to use in Spack. Options are:
#
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
# backtracking and optimization for user preferences. Spack will
# try to bootstrap the logic solver, if not already available.
#
# 'original': Spack's original greedy, fixed-point concretizer. This
# algorithm can make decisions too early and will not backtrack
# sufficiently for many specs. This will soon be deprecated in
# favor of clingo.
#
# See `concretizer.yaml` for more settings you can fine-tune when
# using clingo.
concretizer: clingo
# How long to wait to lock the Spack installation database. This lock is used
# when Spack needs to manage its own package metadata and all operations are
# expected to complete within the default time limit. The timeout should

View File

@@ -20,14 +20,12 @@ packages:
awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis]
c: [gcc]
cxx: [gcc]
D: [ldc]
daal: [intel-oneapi-daal]
elf: [elfutils]
fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame]
fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse]
gl: [glx, osmesa]

View File

@@ -1,5 +1,6 @@
config:
locks: false
concretizer: clingo
build_stage::
- '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -206,7 +206,6 @@ def setup(sphinx):
("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
option ``--depth 1`` will be used if the version of git and the specified
transport protocol support it, and ``--single-branch`` will be used if the
version of git supports it.
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1366,41 +1361,6 @@ Submodules
For more information about git submodules see the manpage of git: ``man
git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
If ``git_sparse_paths`` is supplied and the git version is too old
then a warning will be issued and that package will use the standard cloning operations instead.
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
or a more complex package attribute using the ``@property`` decorator. The return value should be
a list for a callable implementation of ``git_sparse_paths``.
.. code-block:: python
def sparse_path_function(package)
"""a callable function that can be used in side a version"""
# paths can be directories or functions, all subdirectories and files are included
paths = ["doe", "rae", "me/file.cpp"]
if package.spec.version > Version("1.2.0"):
paths.extend(["fae"])
return paths
class MyPackage(package):
# can also be a package attribute that will be used if not specified in versions
git_sparse_paths = ["doe", "rae"]
# use the package attribute
version("1.0.0")
version("1.1.0")
# use the function
version("1.1.5", git_sparse_paths=sparse_path_func)
version("1.2.0", git_sparse_paths=sparse_path_func)
version("1.2.5", git_sparse_paths=sparse_path_func)
version("1.1.5", git_sparse_paths=sparse_path_func)
.. _github-fetch:
^^^^^^

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
astunparse
----------------

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch(
name: str = "",
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(),
compilers={},
generation=generation,
cpu_part=cpu_part,
)
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
)
if architecture in (PPC64LE, PPC64):
@@ -351,10 +345,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures."""
"""Types and functions to manage information
on CPU microarchitectures.
"""
import functools
import platform
import re
@@ -63,24 +65,21 @@ class Microarchitecture:
passed in as argument above.
* versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant.
cpu_part (str): cpu part of the architecture, if relevant.
generation (int): generation of the micro-architecture, if
relevant.
"""
# pylint: disable=too-many-arguments,too-many-instance-attributes
# pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name
self.parents = parents
self.vendor = vendor
self.features = features
self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation
# Only relevant for AArch64
self.cpu_part = cpu_part
# Cache the ancestor computation
self._ancestors = None
@@ -112,7 +111,6 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers
and self.generation == other.generation
and self.cpu_part == other.cpu_part
)
@coerce_target_names
@@ -145,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__
fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
"cpu_part={0.cpu_part!r})"
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
)
return fmt.format(self)
@@ -193,7 +190,6 @@ def to_dict(self):
"generation": self.generation,
"parents": [str(x) for x in self.parents],
"compilers": self.compilers,
"cpupart": self.cpu_part,
}
@staticmethod
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]),
compilers=data.get("compilers", {}),
generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
)
def optimization_flags(self, compiler, version):
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"])
compilers = values.get("compilers", {})
generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture(
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]

View File

@@ -2225,14 +2225,10 @@
],
"nvhpc": [
{
"versions": "21.11:23.8",
"versions": "21.11:",
"name": "zen3",
"flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
}
]
}
@@ -2715,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99"
}
]
},
"cpupart": "0x0af"
}
},
"a64fx": {
"from": ["armv8.2a"],
@@ -2784,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
}
]
},
"cpupart": "0x001"
}
},
"cortex_a72": {
"from": ["aarch64"],
@@ -2822,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72"
}
]
},
"cpupart": "0xd08"
}
},
"neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"],
@@ -2909,8 +2902,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd0c"
}
},
"neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"],
@@ -2934,6 +2926,8 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
@@ -3034,8 +3028,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd40"
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
@@ -3059,10 +3052,13 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
@@ -3070,12 +3066,18 @@
"sb",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
"bf16",
"dgh"
],
"compilers" : {
"gcc": [
@@ -3100,19 +3102,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.3.99",
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{
"versions": "12.0:12.2.99",
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "12.3:",
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
@@ -3147,113 +3145,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}
},
"m1": {
"from": ["armv8.4a"],
@@ -3319,8 +3211,7 @@
"flags" : "-mcpu=apple-m1"
}
]
},
"cpupart": "0x022"
}
},
"m2": {
"from": ["m1", "armv8.5a"],
@@ -3398,8 +3289,7 @@
"flags" : "-mcpu=apple-m2"
}
]
},
"cpupart": "0x032"
}
},
"arm": {
"from": [],

View File

@@ -52,9 +52,6 @@
}
}
}
},
"cpupart": {
"type": "string"
}
},
"required": [
@@ -110,4 +107,4 @@
"additionalProperties": false
}
}
}
}

View File

@@ -1624,12 +1624,6 @@ def remove_linked_tree(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
if sys.platform == "win32":
# Adding this prefix allows shutil to remove long paths on windows
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
long_path_pfx = "\\\\?\\"
if not path.startswith(long_path_pfx):
path = long_path_pfx + path
shutil.rmtree(path, **kwargs)

View File

@@ -84,6 +84,20 @@ def index_by(objects, *funcs):
return result
def caller_locals():
"""This will return the locals of the *parent* of the caller.
This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
del stack
def attr_setdefault(obj, name, value):
"""Like dict.setdefault, but for objects."""
if not hasattr(obj, name):

View File

@@ -10,7 +10,6 @@
import errno
import io
import multiprocessing
import multiprocessing.connection
import os
import re
import select

View File

@@ -42,7 +42,6 @@ def _search_duplicate_compilers(error_cls):
import inspect
import io
import itertools
import os
import pathlib
import pickle
import re
@@ -211,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
)
#: Sanity checks on packages.yaml
config_repos = AuditClass(
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
)
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
@@ -373,27 +367,6 @@ def _ensure_all_virtual_packages_have_default_providers(error_cls):
]
@config_repos
def _ensure_no_folders_without_package_py(error_cls):
"""Check that we don't leave any folder without a package.py in repos"""
errors = []
for repository in spack.repo.PATH.repos:
missing = []
for entry in os.scandir(repository.packages_path):
if not entry.is_dir():
continue
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
if not package_py.exists():
missing.append(entry.path)
if missing:
summary = (
f"The '{repository.namespace}' repository misses a package.py file"
f" in the following folders"
)
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
@@ -554,7 +527,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, []))
return errors

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package"""
import fnmatch
import importlib
import os.path
import re
import sys
@@ -29,7 +28,7 @@
def _python_import(module: str) -> bool:
try:
importlib.import_module(module)
__import__(module)
except ImportError:
return False
return True

View File

@@ -1,154 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap concrete specs for clingo
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
we need to rely on another mechanism to get a concrete spec that fits the current host.
This module contains the logic to get a concrete spec for clingo, starting from a prototype
JSON file for a similar platform.
"""
import pathlib
import sys
from typing import Dict, Optional, Tuple
import archspec.cpu
import spack.compiler
import spack.compilers
import spack.platforms
import spack.spec
import spack.traverse
from .config import spec_for_current_python
class ClingoBootstrapConcretizer:
def __init__(self, configuration):
self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec()
if str(self.host_platform) == "linux":
self.host_libc = self.libc_external_spec()
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux":
compiler_name = "gcc"
elif str(self.host_platform) == "darwin":
compiler_name = "apple-clang"
elif str(self.host_platform) == "windows":
compiler_name = "msvc"
elif str(self.host_platform) == "freebsd":
compiler_name = "clang"
else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture
)
if not candidates:
raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
)
candidates.sort(key=lambda x: x.spec.version, reverse=True)
return candidates[0]
def _externals_from_yaml(
self, configuration: "spack.config.Configuration"
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
packages_yaml = configuration.get("packages")
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
for pkg_name in ["cmake", "bison"]:
if pkg_name not in packages_yaml:
continue
candidates = packages_yaml[pkg_name].get("externals", [])
for candidate in candidates:
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
if not s.satisfies(requirements[pkg_name]):
continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"):
continue
selected[pkg_name] = self._external_spec(s)
break
return selected["cmake"], selected["bison"]
def prototype_path(self) -> pathlib.Path:
"""Path to a prototype concrete specfile for clingo"""
parent_dir = pathlib.Path(__file__).parent
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
if str(self.host_platform) == "linux":
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
if not result.exists():
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
elif str(self.host_platform) == "freebsd":
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
elif not result.exists():
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
return result
def concretize(self) -> "spack.spec.Spec":
# Read the prototype and mark it NOT concrete
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
s._mark_concrete(False)
# Tweak it to conform to the host architecture
for node in s.traverse():
node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture
if node.name == "gcc-runtime":
node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python":
edge.spec = self.host_python
if edge.spec.name == "bison" and self.external_bison:
edge.spec = self.external_bison
if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake
if "libc" in edge.virtuals:
edge.spec = self.host_libc
s._finalize_concretization()
# Work around the fact that the installer calls Spec.dependents() and
# we modified edges inconsistently
return s.copy()
def python_external_spec(self) -> "spack.spec.Spec":
"""Python external spec corresponding to the current running interpreter"""
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec":
result = self.host_compiler.default_libc
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = []
return spack.spec.parse_with_version_concrete(initial_spec)

View File

@@ -143,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers)
@contextlib.contextmanager

View File

@@ -54,7 +54,6 @@
import spack.version
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
from .clingo import ClingoBootstrapConcretizer
from .config import spack_python_interpreter, spec_for_current_python
#: Name of the file containing metadata about the bootstrapping source
@@ -269,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Try to build and install from sources
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
if module == "clingo":
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
concrete_spec = bootstrapper.concretize()
else:
concrete_spec = spack.spec.Spec(
abstract_spec_str + " ^" + spec_for_current_python()
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
concrete_spec._old_concretize( # pylint: disable=protected-access
deprecation_warning=False
)
else:
concrete_spec.concretize()
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
@@ -302,7 +303,14 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
# might reduce compilation time by a fair amount
_add_externals_if_missing()
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
concrete_spec = spack.spec.Spec(abstract_spec_str)
if concrete_spec.name == "patchelf":
concrete_spec._old_concretize( # pylint: disable=protected-access
deprecation_warning=False
)
else:
concrete_spec.concretize()
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
tty.debug(msg.format(abstract_spec_str))
with spack.config.override(self.mirror_scope):

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
# Ensure we trigger environment modifications if we have an environment
if BootstrapEnvironment.spack_yaml().exists():
with BootstrapEnvironment() as env:
env.load()
env.update_syspath_and_environ()
return [
_required_executable(

View File

@@ -457,12 +457,9 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common utilities for managing intel oneapi packages."""
import getpass
import os
import platform
import shutil
@@ -12,7 +13,6 @@
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
from llnl.util.link_tree import LinkTree
import spack.util.path
from spack.build_environment import dso_suffix
from spack.directives import conflicts, license, redistribute, variant
from spack.package_base import InstallError
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
# with other install depends on the userid. For root, we
# delete the installercache before and after install. For
# non root we redefine the HOME environment variable.
if spack.util.path.get_user() == "root":
if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
bash = Executable("bash")
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
self.prefix,
)
if spack.util.path.get_user() == "root":
if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing

View File

@@ -354,10 +354,18 @@ def homepage(cls) -> Optional[str]: # type: ignore[override]
return None
@lang.classproperty
def url(cls) -> Optional[str]:
def urls(cls) -> Optional[List[str]]:
if cls.pypi:
return f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"
return None
urls = [f"https://files.pythonhosted.org/packages/source/{cls.pypi[0]}/{cls.pypi}"]
assert cls.pypi.count("/") == 1, "PyPI class attribute must include a single slash"
name, file = cls.pypi.split("/")
name_dash_count = name.count("-")
if name_dash_count > 0:
# replace all but last dash with underscores for pypi.org listing changes
pypi = "/".join([name, file.replace("-", "_", name_dash_count)])
urls.append(f"https://files.pythonhosted.org/packages/source/{pypi[0]}/{pypi}")
return urls
return [None]
@lang.classproperty
def list_url(cls) -> Optional[str]: # type: ignore[override]

View File

@@ -12,7 +12,6 @@
from llnl.util import lang
import spack.build_environment
import spack.multimethod
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS = {}
@@ -296,11 +295,7 @@ def _decorator(fn):
return _decorator
class BuilderMeta(
PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore
):
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
pass

View File

@@ -9,11 +9,11 @@
import llnl.util.lang
from llnl.util.filesystem import mkdirp
from llnl.util.symlink import symlink
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.paths
import spack.util.file_cache
import spack.util.path
@@ -74,6 +74,23 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def symlink(self, mirror_ref):
"""Symlink a human readible path in our mirror to the actual
storage location."""
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
# In this case the link itself exists but it is broken: remove
# it and recreate it (in order to fix any symlinks broken prior
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
symlink(relative_dst, cosmetic_path)
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (

View File

@@ -38,7 +38,6 @@
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
@@ -1108,7 +1107,7 @@ def main_script_replacements(cmd):
if cdash_handler and cdash_handler.auth_token:
try:
cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
except (SpackError, HTTPError, URLError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
@@ -1371,6 +1370,15 @@ def can_verify_binaries():
return len(gpg_util.public_keys()) >= 1
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
"""Unchecked version of the public API, for easier mocking"""
bindist.push_or_raise(
spec,
spack.mirror.Mirror.from_url(mirror_url).push_url,
bindist.PushOptions(force=True, unsigned=not sign_binaries),
)
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
"""Push one or more binary packages to the mirror.
@@ -1381,15 +1389,20 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
sign_binaries: If True, spack will attempt to sign binary package before pushing.
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
_push_to_build_cache(spec, sign_binaries, mirror_url)
return True
except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}")
tty.error(str(e))
return False
except Exception as e:
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
# exception instead of parsing str(e)...
msg = str(e)
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
return False
raise
def remove_other_mirrors(mirrors_to_keep, scope=None):
@@ -1435,6 +1448,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_log_dir: path into which build log should be copied
"""
tty.debug(f"job spec: {job_spec}")
if not job_spec:
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
tty.error(msg)
return
try:
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
@@ -2066,7 +2083,7 @@ def read_broken_spec(broken_spec_url):
"""
try:
_, _, fs = web_util.read_from_url(broken_spec_url)
except web_util.SpackWebError:
except (URLError, web_util.SpackWebError, HTTPError):
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import importlib
import os
import re
import sys
@@ -115,8 +114,8 @@ def get_module(cmd_name):
try:
# Try to import the command from the built-in directory
module_name = f"{__name__}.{pname}"
module = importlib.import_module(module_name)
module_name = "%s.%s" % (__name__, pname)
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)

View File

@@ -3,24 +3,28 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import copy
import glob
import hashlib
import json
import multiprocessing
import multiprocessing.pool
import os
import shutil
import sys
import tempfile
from typing import List, Tuple
from typing import Dict, List, Optional, Tuple, Union
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.lang import elide_list, stable_partition
from llnl.util.lang import elide_list
import spack.binary_distribution as bindist
import spack.cmd
import spack.config
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.hash_types as ht
import spack.mirror
import spack.oci.oci
import spack.oci.opener
@@ -31,12 +35,28 @@
import spack.store
import spack.user_environment
import spack.util.crypto
import spack.util.parallel
import spack.util.url as url_util
import spack.util.web as web_util
from spack import traverse
from spack.build_environment import determine_number_of_jobs
from spack.cmd import display_specs
from spack.cmd.common import arguments
from spack.oci.image import (
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
list_tags,
upload_blob_with_retry,
upload_manifest_with_retry,
)
from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages"
@@ -92,17 +112,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
"Alternatively, one can decide to build a cache for only the package or only the "
"dependencies",
)
with_or_without_build_deps = push.add_mutually_exclusive_group()
with_or_without_build_deps.add_argument(
"--with-build-dependencies",
action="store_true",
help="include build dependencies in the buildcache",
)
with_or_without_build_deps.add_argument(
"--without-build-dependencies",
action="store_true",
help="exclude build dependencies from the buildcache",
)
push.add_argument(
"--fail-fast",
action="store_true",
@@ -320,6 +329,39 @@ def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
def _progress(i: int, total: int):
if total > 1:
digits = len(str(total))
return f"[{i+1:{digits}}/{total}] "
return ""
class NoPool:
def map(self, func, args):
return [func(a) for a in args]
def starmap(self, func, args):
return [func(*a) for a in args]
def __enter__(self):
return self
def __exit__(self, *args):
pass
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
def _make_pool() -> MaybePool:
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
That leaves only forking"""
if multiprocessing.get_start_method() == "fork":
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
else:
return NoPool()
def _skip_no_redistribute_for_public(specs):
remaining_specs = list()
removed_specs = list()
@@ -339,45 +381,6 @@ def _skip_no_redistribute_for_public(specs):
return remaining_specs
class PackagesAreNotInstalledError(spack.error.SpackError):
"""Raised when a list of specs is not installed but picked to be packaged."""
def __init__(self, specs: List[Spec]):
super().__init__(
"Cannot push non-installed packages",
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
)
class PackageNotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
def _specs_to_be_packaged(
requested: List[Spec], things_to_install: str, build_deps: bool
) -> List[Spec]:
"""Collect all non-external with or without roots and dependencies"""
if "dependencies" not in things_to_install:
deptype = dt.NONE
elif build_deps:
deptype = dt.ALL
else:
deptype = dt.RUN | dt.LINK | dt.TEST
specs = [
s
for s in traverse.traverse_nodes(
requested,
root="package" in things_to_install,
deptype=deptype,
order="breadth",
key=traverse.by_dag_hash,
)
if not s.external
]
specs.reverse()
return specs
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
@@ -391,8 +394,13 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
mirror: spack.mirror.Mirror = args.mirror
# Check if this is an OCI image.
try:
target_image = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
target_image = None
push_url = mirror.push_url
@@ -403,52 +411,92 @@ def push_fn(args):
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
if mirror.push_url.startswith("oci://") and not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
unsigned = True
if target_image:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
# Select a signing key, or None if unsigned.
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
specs = _specs_to_be_packaged(
# This is a list of installed, non-external specs.
specs = bindist.specs_to_be_packaged(
roots,
things_to_install=args.things_to_install,
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
if not args.private:
specs = _skip_no_redistribute_for_public(specs)
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
# Pushing not installed specs is an error. Either fail fast or populate the error list and
# push installed package in best effort mode.
failed: List[Tuple[Spec, BaseException]] = []
with spack.store.STORE.db.read_transaction():
if any(not s.installed for s in specs):
specs, not_installed = stable_partition(specs, lambda s: s.installed)
if args.fail_fast:
raise PackagesAreNotInstalledError(not_installed)
else:
failed.extend(
(s, PackageNotInstalledError("package not installed")) for s in not_installed
failed = []
# TODO: unify this logic in the future.
if target_image:
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
skipped, base_images, checksums = _push_oci(
target_image=target_image,
base_image=base_image,
installed_specs_with_deps=specs,
force=args.force,
tmpdir=tmpdir,
pool=pool,
)
# Apart from creating manifests for each individual spec, we allow users to create a
# separate image tag for all root specs and their runtime dependencies.
if args.tag:
tagged_image = target_image.with_tag(args.tag)
# _push_oci may not populate base_images if binaries were already in the registry
for spec in roots:
_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
tty.info(f"Tagged {tagged_image}")
else:
skipped = []
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
push_url,
bindist.PushOptions(
force=args.force,
unsigned=unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
with bindist.make_uploader(
mirror=mirror,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
base_image=args.base_image,
) as uploader:
skipped, upload_errors = uploader.push(specs=specs)
failed.extend(upload_errors)
if not upload_errors and args.tag:
uploader.tag(args.tag, roots)
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
if len(specs) == 1:
msg += f" to {push_url}"
tty.info(msg)
except bindist.NoOverwriteException:
skipped.append(_format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(
e, (bindist.PickKeyException, bindist.NoKeyException)
):
raise
failed.append((_format_spec(spec), e))
if skipped:
if len(specs) == 1:
@@ -471,16 +519,390 @@ def push_fn(args):
raise spack.error.SpackError(
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
"\n".join(
elide_list(
[
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
for spec, e in failed
],
5,
)
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
),
)
# Update the index if requested
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
# not once per spec.
if target_image and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(target_image, tmpdir, pool)
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
"""Get the spack tarball layer digests and size if it exists"""
try:
manifest, config = get_manifest_and_config_with_retry(image_ref)
return spack.oci.oci.Blob(
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
size=manifest["layers"][-1]["size"],
)
except Exception:
return None
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
# Create an oci.image.layer aka tarball of the package
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
blob = spack.oci.oci.Blob(
Digest.from_sha256(compressed_tarfile_checksum),
Digest.from_sha256(tarfile_checksum),
os.path.getsize(filename),
)
# Upload the blob
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
# delete the file
os.unlink(filename)
return blob
def _retrieve_env_dict_from_config(config: dict) -> dict:
"""Retrieve the environment variables from the image config file.
Sets a default value for PATH if it is not present.
Args:
config (dict): The image config file.
Returns:
dict: The environment variables.
"""
env = {"PATH": "/bin:/usr/bin"}
if "Env" in config.get("config", {}):
for entry in config["config"]["Env"]:
key, value = entry.split("=", 1)
env[key] = value
return env
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
name = spec.target.family.name
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
return name_map.get(name, name)
def _put_manifest(
base_images: Dict[str, Tuple[dict, dict]],
checksums: Dict[str, spack.oci.oci.Blob],
image_ref: ImageReference,
tmpdir: str,
extra_config: Optional[dict],
annotations: Optional[dict],
*specs: spack.spec.Spec,
):
architecture = _archspec_to_gooarch(specs[0])
dependencies = list(
reversed(
list(
s
for s in traverse.traverse_nodes(
specs, order="topo", deptype=("link", "run"), root=True
)
if not s.external
)
)
)
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
# This is because Singularity / Apptainer is very strict about not mixing them.
base_manifest_mediaType = base_manifest.get(
"mediaType", "application/vnd.oci.image.manifest.v1+json"
)
use_docker_format = (
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
)
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
# Create an oci.image.config file
config = copy.deepcopy(base_config)
# Add the diff ids of the dependencies
for s in dependencies:
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
if extra_config:
# From the OCI v1.0 spec:
# > Any extra fields in the Image JSON struct are considered implementation
# > specific and MUST be ignored by any implementations which are unable to
# > interpret them.
config.update(extra_config)
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, config_file)
)
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
manifest = {
"mediaType": base_manifest_mediaType,
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
"digest": str(config_file_checksum),
"size": os.path.getsize(config_file),
},
"layers": [
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": (
"application/vnd.docker.image.rootfs.diff.tar.gzip"
if use_docker_format
else "application/vnd.oci.image.layer.v1.tar+gzip"
),
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
for s in dependencies
),
],
}
if not use_docker_format and annotations:
manifest["annotations"] = annotations
# Finally upload the manifest
upload_manifest_with_retry(image_ref, manifest=manifest)
# delete the config file
os.unlink(config_file)
def _update_base_images(
*,
base_image: Optional[ImageReference],
target_image: ImageReference,
spec: spack.spec.Spec,
base_image_cache: Dict[str, Tuple[dict, dict]],
):
"""For a given spec and base image, copy the missing layers of the base image with matching
arch to the registry of the target image. If no base image is specified, create a dummy
manifest and config file."""
architecture = _archspec_to_gooarch(spec)
if architecture in base_image_cache:
return
if base_image is None:
base_image_cache[architecture] = (
default_manifest(),
default_config(architecture, "linux"),
)
else:
base_image_cache[architecture] = copy_missing_layers_with_retry(
base_image, target_image, architecture
)
def _push_oci(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
installed_specs_with_deps: List[Spec],
tmpdir: str,
pool: MaybePool,
force: bool = False,
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
"""Push specs to an OCI registry
Args:
image_ref: The target OCI image
base_image: Optional base image, which will be copied to the target registry.
installed_specs_with_deps: The installed specs to push, excluding externals,
including deps, ordered from roots to leaves.
force: Whether to overwrite existing layers and manifests in the buildcache.
Returns:
A tuple consisting of the list of skipped specs already in the build cache,
a dictionary mapping architectures to base image manifests and configs,
and a dictionary mapping each spec's dag hash to a blob.
"""
# Reverse the order
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
# arch -> (manifest, config)
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped = []
if not force:
tty.info("Checking for existing specs in the buildcache")
to_be_uploaded = []
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
if maybe_blob is not None:
checksums[spec.dag_hash()] = maybe_blob
skipped.append(_format_spec(spec))
else:
to_be_uploaded.append(spec)
else:
to_be_uploaded = installed_specs_with_deps
if not to_be_uploaded:
return skipped, base_images, checksums
tty.info(
f"{len(to_be_uploaded)} specs need to be pushed to "
f"{target_image.domain}/{target_image.name}"
)
# Upload blobs
new_blobs = pool.starmap(
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
)
# And update the spec to blob mapping
for spec, blob in zip(to_be_uploaded, new_blobs):
checksums[spec.dag_hash()] = blob
# Copy base images if necessary
for spec in to_be_uploaded:
_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
}
return spec_dict
# Upload manifests
tty.info("Uploading manifests")
pool.starmap(
_put_manifest,
(
(
base_images,
checksums,
target_image.with_tag(default_tag(spec)),
tmpdir,
extra_config(spec),
{"org.opencontainers.image.description": spec.format()},
spec,
)
for spec in to_be_uploaded
),
)
# Print the image names of the top-level specs
for spec in to_be_uploaded:
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
return skipped, base_images, checksums
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
# Don't allow recursion here, since Spack itself always uploads
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
# Do very basic validation: if "spec" is a key in the config, it
# must be a Spec object too.
return config if "spec" in config else None
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
tags = list_tags(image_ref)
# Fetch all image config files in parallel
spec_dicts = pool.starmap(
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
)
# Populate the database
db_root_dir = os.path.join(tmpdir, "db_root")
db = bindist.BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = Spec.from_dict(spec_dict)
db.add(spec, directory_layout=None)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
empty_config_json_path = os.path.join(tmpdir, "config.json")
with open(empty_config_json_path, "wb") as f:
f.write(b"{}")
# Upload the index.json file
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
# Upload the config.json file
empty_config_digest = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
)
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
# Push a manifest file that references the index.json file as a layer
# Notice that we push this as if it is an image, which it of course is not.
# When the ORAS spec becomes official, we can use that instead of a fake image.
# For now we just use the OCI image spec, so that we don't run into issues with
# automatic garbage collection of blobs that are not referenced by any image manifest.
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
# Config is just an empty {} file for now, and irrelevant
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"digest": str(empty_config_digest),
"size": os.path.getsize(empty_config_json_path),
},
# The buildcache index is the only layer, and is not a tarball, we lie here.
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(index_shasum),
"size": os.path.getsize(index_json_path),
}
],
}
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
def install_fn(args):
"""install from a binary package"""
@@ -760,15 +1182,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
bindist._oci_update_index(image_ref, tmpdir, executor)
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
return
# Otherwise, assume a normal mirror.
url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist._url_generate_package_index(url, tmpdir)
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
if update_keys:
keys_url = url_util.join(
@@ -776,8 +1197,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
)
try:
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist.generate_key_index(keys_url, tmpdir)
bindist.generate_key_index(keys_url)
except bindist.CannotListKeys as e:
# Do not error out if listing keys went wrong. This usually means that the _gpg path
# does not exist. TODO: distinguish between this and other errors.

View File

@@ -50,7 +50,6 @@ def setup_parser(subparser):
default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
arguments.add_common_arguments(find_parser, ["jobs"])
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
@@ -79,21 +78,25 @@ def setup_parser(subparser):
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
# None signals spack.compiler.find_compilers to use its default logic
paths = args.add_paths or None
new_compilers = spack.compilers.find_compilers(
path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
n = len(new_compilers)
s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4)
config = spack.config.CONFIG
filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
else:
tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:")

View File

@@ -6,7 +6,6 @@
import re
import sys
import urllib.parse
from typing import List
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -15,15 +14,9 @@
import spack.stage
import spack.util.web
from spack.spec import Spec
from spack.url import (
UndetectableNameError,
UndetectableVersionError,
find_versions_of_archive,
parse_name,
parse_version,
)
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
from spack.util.editor import editor
from spack.util.executable import which
from spack.util.executable import ProcessError, which
from spack.util.format import get_version_lines
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
@@ -96,20 +89,14 @@ class BundlePackageTemplate:
url_def = " # There is no URL since there is no code to download."
body_def = " # There is no need for install() since there is no code."
def __init__(self, name: str, versions, languages: List[str]):
def __init__(self, name, versions):
self.name = name
self.class_name = mod_to_class(name)
self.versions = versions
self.languages = languages
def write(self, pkg_path):
"""Writes the new package file."""
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
if all_deps and self.dependencies:
all_deps.append("")
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
@@ -119,7 +106,7 @@ def write(self, pkg_path):
base_class_name=self.base_class_name,
url_def=self.url_def,
versions=self.versions,
dependencies="\n".join(all_deps),
dependencies=self.dependencies,
body_def=self.body_def,
)
)
@@ -138,8 +125,8 @@ def install(self, spec, prefix):
url_line = ' url = "{url}"'
def __init__(self, name, url, versions, languages: List[str]):
super().__init__(name, versions, languages)
def __init__(self, name, url, versions):
super().__init__(name, versions)
self.url_def = self.url_line.format(url=url)
@@ -227,13 +214,13 @@ def luarocks_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
if not name.startswith("lua-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to lua-{0}".format(name))
name = "lua-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class MesonPackageTemplate(PackageTemplate):
@@ -334,14 +321,14 @@ class RacketPackageTemplate(PackageTemplate):
# subdirectory = None
"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
if not name.startswith("rkt-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
name = "rkt-{0}".format(name)
self.body_def = self.body_def.format(name[4:])
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
@@ -374,7 +361,7 @@ def config_settings(self, spec, prefix):
settings = {}
return settings"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
@@ -428,7 +415,7 @@ def __init__(self, name, url, versions, languages: List[str]):
+ self.url_line
)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class RPackageTemplate(PackageTemplate):
@@ -447,7 +434,7 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
@@ -467,7 +454,7 @@ def __init__(self, name, url, versions, languages: List[str]):
if bioc:
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PerlmakePackageTemplate(PackageTemplate):
@@ -487,14 +474,14 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
@@ -519,7 +506,7 @@ class OctavePackageTemplate(PackageTemplate):
# FIXME: Add additional dependencies if required.
# depends_on("octave-foo", type=("build", "run"))"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
@@ -527,7 +514,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to octave-{0}".format(name))
name = "octave-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class RubyPackageTemplate(PackageTemplate):
@@ -547,7 +534,7 @@ def build(self, spec, prefix):
# FIXME: If not needed delete this function
pass"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name ruby-numpy`, don't rename it
# ruby-ruby-numpy
if not name.startswith("ruby-"):
@@ -555,7 +542,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
name = "ruby-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class MakefilePackageTemplate(PackageTemplate):
@@ -593,14 +580,14 @@ def configure_args(self, spec, prefix):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
templates = {
@@ -671,48 +658,8 @@ def setup_parser(subparser):
)
#: C file extensions
C_EXT = {".c"}
#: C++ file extensions
CXX_EXT = {
".C",
".c++",
".cc",
".ccm",
".cpp",
".CPP",
".cxx",
".h++",
".hh",
".hpp",
".hxx",
".inl",
".ipp",
".ixx",
".tcc",
".tpp",
}
#: Fortran file extensions
FORTRAN_EXT = {
".f77",
".F77",
".f90",
".F90",
".f95",
".F95",
".f",
".F",
".for",
".FOR",
".ftn",
".FTN",
}
class BuildSystemAndLanguageGuesser:
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
class BuildSystemGuesser:
"""An instance of BuildSystemGuesser provides a callable object to be used
during ``spack create``. By passing this object to ``spack checksum``, we
can take a peek at the fetched tarball and discern the build system it uses
"""
@@ -720,119 +667,81 @@ class BuildSystemAndLanguageGuesser:
def __init__(self):
"""Sets the default build system."""
self.build_system = "generic"
self._c = False
self._cxx = False
self._fortran = False
# List of files in the archive ordered by their depth in the directory tree.
self._file_entries: List[str] = []
def __call__(self, archive: str, url: str) -> None:
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
if url is not None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
return
if url.endswith(".gem"):
self.build_system = "ruby"
return
if url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
return
if url.endswith(".rock"):
self.build_system = "lua"
return
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
]
# Peek inside the compressed file.
if archive.endswith(".zip") or ".zip#" in archive:
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
try:
unzip = which("unzip")
assert unzip is not None
output = unzip("-lq", archive, output=str)
except Exception:
output = unzip("-lq", stage.archive_file, output=str)
except ProcessError:
output = ""
else:
try:
tar = which("tar")
assert tar is not None
output = tar("tf", archive, output=str)
except Exception:
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
output = ""
self._file_entries[:] = output.splitlines()
lines = output.splitlines()
# Files closest to the root should be considered first when determining build system.
self._file_entries.sort(key=lambda p: p.count("/"))
self._determine_build_system(url)
self._determine_language()
def _determine_build_system(self, url: str) -> None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
elif url.endswith(".gem"):
self.build_system = "ruby"
elif url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
elif url.endswith(".rock"):
self.build_system = "lua"
elif self._file_entries:
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(re.compile(pattern), build_system)
for pattern, build_system in (
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
)
]
# Determine the build system based on the files contained in the archive.
for file in self._file_entries:
for pattern, build_system in clues:
if pattern.search(file):
self.build_system = build_system
return
def _determine_language(self):
for entry in self._file_entries:
_, ext = os.path.splitext(entry)
if not self._c and ext in C_EXT:
self._c = True
elif not self._cxx and ext in CXX_EXT:
self._cxx = True
elif not self._fortran and ext in FORTRAN_EXT:
self._fortran = True
if self._c and self._cxx and self._fortran:
return
@property
def languages(self) -> List[str]:
langs: List[str] = []
if self._c:
langs.append("c")
if self._cxx:
langs.append("cxx")
if self._fortran:
langs.append("fortran")
return langs
# Determine the build system based on the files contained
# in the archive.
for pattern, bs in clues:
if any(re.search(pattern, line) for line in lines):
self.build_system = bs
break
def get_name(name, url):
@@ -902,7 +811,7 @@ def get_url(url):
def get_versions(args, name):
"""Returns a list of versions and hashes for a package.
Also returns a BuildSystemAndLanguageGuesser object.
Also returns a BuildSystemGuesser object.
Returns default values if no URL is provided.
@@ -911,7 +820,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@@ -925,7 +834,7 @@ def get_versions(args, name):
# version("1.2.4")"""
# Default guesser
guesser = BuildSystemAndLanguageGuesser()
guesser = BuildSystemGuesser()
valid_url = True
try:
@@ -938,7 +847,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = find_versions_of_archive(args.url)
url_dict = spack.url.find_versions_of_archive(args.url)
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
if url_dict_filtered is None:
@@ -965,7 +874,7 @@ def get_versions(args, name):
return versions, guesser
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
def get_build_system(template, url, guesser):
"""Determine the build system template.
If a template is specified, always use that. Otherwise, if a URL
@@ -973,10 +882,11 @@ def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGue
build system it uses. Otherwise, use a generic template by default.
Args:
template: ``--template`` argument given to ``spack create``
url: ``url`` argument given to ``spack create``
guesser: The first_stage_function given to ``spack checksum`` which records the build
system it detects
template (str): ``--template`` argument given to ``spack create``
url (str): ``url`` argument given to ``spack create``
args (argparse.Namespace): The arguments given to ``spack create``
guesser (BuildSystemGuesser): The first_stage_function given to
``spack checksum`` which records the build system it detects
Returns:
str: The name of the build system template to use
@@ -1050,7 +960,7 @@ def create(parser, args):
build_system = get_build_system(args.template, url, guesser)
# Create the package template object
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
constr_args = {"name": name, "versions": versions}
package_class = templates[build_system]
if package_class != BundlePackageTemplate:
constr_args["url"] = url

View File

@@ -6,7 +6,6 @@
import os
import platform
import re
import sys
from datetime import datetime
from glob import glob
@@ -63,10 +62,9 @@ def create_db_tarball(args):
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
@@ -92,6 +90,7 @@ def report(args):
print("* **Spack:**", get_version())
print("* **Python:**", platform.python_version())
print("* **Platform:**", architecture)
print("* **Concretizer:**", spack.config.get("config:concretizer"))
def debug(parser, args):

View File

@@ -10,10 +10,8 @@
import spack.cmd
import spack.config
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.stage
import spack.util.path
import spack.version
from spack.cmd.common import arguments
@@ -64,7 +62,7 @@ def change_fn(section):
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
def _retrieve_develop_source(spec, abspath):
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
# better if we can create the `source_path` directly into its final
@@ -73,13 +71,13 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
source_stage: spack.stage.Stage = package.stage[0]
source_stage = package.stage[0]
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
source_stage.fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
source_stage.default_fetcher_only = True
source_stage.disable_mirrors()
source_stage.fetcher.set_package(package)
package.stage.steal_source(abspath)

View File

@@ -468,30 +468,32 @@ def env_remove(args):
This removes an environment managed by Spack. Directory environments
and manifests embedded in repositories should be removed manually.
"""
remove_envs = []
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
try:
env = ev.read(env_name)
valid_envs.append(env)
valid_envs.append(env_name)
if env_name in args.rm_env:
remove_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
if name == env_name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
@@ -504,7 +506,7 @@ def env_remove(args):
if not answer:
tty.die("Will not remove any environments")
for env in remove_envs:
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")

View File

@@ -5,12 +5,10 @@
import argparse
import os
import tempfile
import spack.binary_distribution
import spack.mirror
import spack.paths
import spack.stage
import spack.util.gpg
import spack.util.url
from spack.cmd.common import arguments
@@ -117,7 +115,6 @@ def setup_parser(subparser):
help="URL of the mirror where keys will be published",
)
publish.add_argument(
"--update-index",
"--rebuild-index",
action="store_true",
default=False,
@@ -223,10 +220,9 @@ def gpg_publish(args):
elif args.mirror_url:
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
)
spack.binary_distribution.push_keys(
mirror, keys=args.keys, regenerate_index=args.rebuild_index
)
def gpg(parser, args):

View File

@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
def info(parser, args):
spec = spack.spec.Spec(args.package)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
pkg = pkg_cls(spec)
# Output core package information

View File

@@ -23,6 +23,11 @@ def setup_parser(subparser):
output.add_argument(
"-s", "--safe", action="store_true", help="only list safe versions of the package"
)
output.add_argument(
"--safe-only",
action="store_true",
help="[deprecated] only list safe versions of the package",
)
output.add_argument(
"-r", "--remote", action="store_true", help="only list remote versions of the package"
)
@@ -42,13 +47,17 @@ def versions(parser, args):
safe_versions = pkg.versions
if args.safe_only:
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
args.safe = args.safe_only
if not (args.remote or args.new):
if sys.stdout.isatty():
tty.msg("Safe versions (already checksummed):")
if not safe_versions:
if sys.stdout.isatty():
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug("Manually add versions to the package.")
else:
colify(sorted(safe_versions, reverse=True), indent=2)
@@ -74,12 +83,12 @@ def versions(parser, args):
if not remote_versions:
if sys.stdout.isatty():
if not fetched_versions:
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug(
"Check the list_url and list_depth attributes of "
"the package to help Spack find versions."
)
else:
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@@ -7,11 +7,11 @@
system and configuring Spack to use multiple compilers.
"""
import collections
import importlib
import itertools
import multiprocessing.pool
import os
import sys
import warnings
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -22,12 +22,11 @@
import spack.compiler
import spack.config
import spack.error
import spack.operating_systems
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
import spack.version
from spack.operating_systems import windows_os
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
@@ -64,10 +63,6 @@
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def pkg_spec_for_compiler(cspec):
"""Return the spec of the package that provides the compiler."""
for spec, package in _compiler_to_pkg.items():
@@ -132,7 +127,7 @@ def get_compiler_config(
# Do not init config because there is a non-empty scope
return config
find_compilers(scope=scope)
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
return config
@@ -141,8 +136,125 @@ def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
"""Return the compiler configuration from packages.yaml"""
packages_yaml = configuration.get("packages", scope=scope)
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
config = configuration.get("packages", scope=scope)
if not config:
return []
packages = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in config.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
packages.extend(_compiler_config_from_package_config(externals_config))
return packages
def _compiler_config_from_package_config(config):
compilers = []
for entry in config:
compiler = _compiler_config_from_external(entry)
if compiler:
compilers.append(compiler)
return compilers
def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"])
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
err_header = f"The external spec '{spec}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if extra_attributes_key not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": config.get("modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
@@ -166,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -215,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -264,77 +380,79 @@ def all_compiler_specs(scope=None, init_config=True):
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
mixed_toolchain: bool = False,
max_workers: Optional[int] = None,
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
) -> List["spack.compiler.Compiler"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
"""Return the list of compilers found in the paths given as arguments.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
max_workers: number of processes used to search for compilers
"""
import spack.detection
known_compilers = set(all_compilers(init_config=False))
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
def remove_errors(
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
) -> DetectVersionArgs:
value, _ = item
assert value is not None
return value
return make_compiler_list(
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
mixed_toolchain=mixed_toolchain,
)
valid_compilers = {}
for name, detected in detected_packages.items():
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x.spec)]
if not compilers:
continue
valid_compilers[name] = compilers
def _has_fortran_compilers(x):
if "compilers" not in x.spec.extra_attributes:
return False
def find_new_compilers(
path_hints: Optional[List[str]] = None,
scope: Optional[str] = None,
*,
mixed_toolchain: bool = False,
):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
return "fortran" in x.spec.extra_attributes["compilers"]
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: scope to look for a compiler. If None consider the merged configuration.
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
"""
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
if mixed_toolchain:
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
if gccs:
best_gcc = sorted(
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x.spec).version
)[-1]
gfortran = best_gcc.spec.extra_attributes["compilers"]["fortran"]
for name in ("llvm", "apple-clang"):
if name not in valid_compilers:
continue
candidates = valid_compilers[name]
for candidate in candidates:
if _has_fortran_compilers(candidate):
continue
candidate.spec.extra_attributes["compilers"]["fortran"] = gfortran
new_compilers = []
for name, detected in valid_compilers.items():
for config in CompilerConfigFactory.from_specs([x.spec for x in detected]):
c = _compiler_from_config_entry(config["compiler"])
if c in known_compilers:
continue
new_compilers.append(c)
add_compilers_to_config(new_compilers, scope=scope)
return new_compilers
return select_new_compilers(compilers, scope)
def select_new_compilers(compilers, scope=None):
@@ -344,9 +462,7 @@ def select_new_compilers(compilers, scope=None):
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
same_specs = compilers_for_spec(
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
)
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
if not same_specs:
compilers_not_in_config.append(c)
@@ -415,12 +531,7 @@ def find(compiler_spec, scope=None, init_config=True):
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
compiler spec. Return an empty list if nothing found."""
return [
c.spec
for c in compilers_for_spec(
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
)
]
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
def all_compilers(scope=None, init_config=True):
@@ -442,11 +553,14 @@ def all_compilers_from(configuration, scope=None, init_config=True):
@_auto_compiler_spec
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
def compilers_for_spec(
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
for cspec in matches:
@@ -455,7 +569,7 @@ def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
config = all_compilers_config(spack.config.CONFIG, scope=scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -652,7 +766,7 @@ def class_for_compiler_name(compiler_name):
submodule_name = compiler_name.replace("-", "_")
module_name = ".".join(["spack", "compilers", submodule_name])
module_obj = importlib.import_module(module_name)
module_obj = __import__(module_name, fromlist=[None])
cls = getattr(module_obj, mod_to_class(compiler_name))
# make a note of the name in the module so we can get to it easily.
@@ -705,6 +819,228 @@ def all_compiler_types():
)
def arguments_to_detect_version_fn(
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
) -> List[DetectVersionArgs]:
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
The ``operating_system`` instance can customize the behavior of this
function by providing a method called with the same name.
Args:
operating_system: the operating system on which we are looking for compilers
paths: paths to search for compilers
Returns:
List of DetectVersionArgs tuples. Each item in the list will be later
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
command_arguments: List[DetectVersionArgs] = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
compiler_id = CompilerID(operating_system, compiler_name, None)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
language=language,
path=full_path,
)
command_arguments.append(detect_version_args)
return command_arguments
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
return fn(paths)
def detect_version(
detect_version_args: DetectVersionArgs,
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
"""Computes the version of a compiler and adds it to the information
passed as input.
As this function is meant to be executed by worker processes it won't
raise any exception but instead will return a (value, error) tuple that
needs to be checked by the code dispatching the calls.
Args:
detect_version_args: information on the compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
version of the compiler was computed correctly and the first argument
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, f"{language}_version")
try:
version = callback(path)
if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = f"Couldn't get version for compiler {path}".format(path)
except spack.util.executable.ProcessError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except spack.util.executable.ProcessTimeoutError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, str(e)
)
return None, error
operating_system = detect_version_args.id.os
fn = getattr(operating_system, "detect_version", _default)
return fn(detect_version_args)
def make_compiler_list(
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
) -> List["spack.compiler.Compiler"]:
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions: list of DetectVersionArgs containing a valid version
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
Returns:
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
# https://github.com/spack/spack/pull/33385/files#r1040036318
target = archspec.cpu.host()
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
"cc" not in by_compiler_id[variation], # None last
"cxx" not in by_compiler_id[variation], # None last
getattr(variation, "prefix", None),
getattr(variation, "suffix", None),
)
# Flatten to a list of compiler id, primary variation and compiler dictionary
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# Fill any missing parts from subsequent entries (without mixing toolchains)
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
)
if next_lang:
selected[lang] = next_lang
flat_compilers.append((compiler_id, selected_variation, selected))
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
if mixed_toolchain:
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
return compilers
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
"""Add missing compilers across toolchains when they are missing for a particular language.
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
fortran compiler (no flang)."""
# First collect the clangs that are missing a fortran compiler
clangs_without_flang = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name in ("clang", "apple-clang")
and "f77" not in compiler
and "fc" not in compiler
]
if not clangs_without_flang:
return
# Filter on GCCs with fortran compiler
gccs_with_fortran = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
]
# Sort these GCCs by "best variation" (no prefix / suffix first)
gccs_with_fortran.sort(
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
)
# Attach the optimal GCC fortran compiler to the clangs that don't have one
for clang_id, _, clang_compiler in clangs_without_flang:
gcc_compiler = next(
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
)
if not gcc_compiler:
continue
# Update the fc / f77 entries
clang_compiler["f77"] = gcc_compiler["f77"]
clang_compiler["fc"] = gcc_compiler["fc"]
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.
@@ -751,155 +1087,6 @@ def name_matches(name, name_list):
return False
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
class CompilerConfigFactory:
"""Class aggregating all ways of constructing a list of compiler config entries."""
@staticmethod
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
result = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for s in specs:
if s.name not in compiler_package_names:
continue
candidate = CompilerConfigFactory.from_external_spec(s)
if candidate is None:
continue
result.append(candidate)
return result
@staticmethod
def from_packages_yaml(packages_yaml) -> List[dict]:
compiler_specs = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
current_specs = []
for current_external in externals_config:
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
if compiler:
current_specs.append(compiler)
compiler_specs.extend(current_specs)
return CompilerConfigFactory.from_specs(compiler_specs)
@staticmethod
def _spec_from_external_config(config):
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
return result
@staticmethod
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
spec = spack.spec.parse_with_version_concrete(spec)
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
if extra_attributes is None:
return None
paths = CompilerConfigFactory._extract_compiler_paths(spec)
if paths is None:
return None
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": getattr(spec, "external_modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
@staticmethod
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
err_header = f"The external spec '{spec}' cannot be used as a compiler"
extra_attributes = spec.extra_attributes
# If I have 'extra_attributes' warn if 'compilers' is missing,
# or we don't have a C compiler
if _COMPILERS_KEY not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
)
return None
attribute_compilers = extra_attributes[_COMPILERS_KEY]
if _C_KEY not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
)
return None
c_compiler = attribute_compilers[_C_KEY]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if _CXX_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if _FORTRAN_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
return {
"cc": c_compiler,
"cxx": attribute_compilers.get(_CXX_KEY, None),
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
}
@staticmethod
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return operating_system, target
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__(

View File

@@ -223,30 +223,6 @@ def get_oneapi_root(pth: str):
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
@property
def cxx11_flag(self):
return "/std:c++11"
@property
def cxx14_flag(self):
return "/std:c++14"
@property
def cxx17_flag(self):
return "/std:c++17"
@property
def cxx20_flag(self):
return "/std:c++20"
@property
def c11_flag(self):
return "/std:c11"
@property
def c17_flag(self):
return "/std:c17"
@property
def msvc_version(self):
"""This is the VCToolset version *NOT* the actual version of the cl compiler

View File

@@ -2,11 +2,29 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""
(DEPRECATED) Used to contain the code for the original concretizer
Functions here are used to take abstract specs and make them concrete.
For example, if a spec asks for a version between 1.8 and 1.9, these
functions might take will take the most recent 1.9 version of the
package available. Or, if the user didn't specify a compiler for a
spec, then this will assign a compiler to the spec based on defaults
or user preferences.
TODO: make this customizable and allow users to configure
concretization policies.
"""
import functools
import platform
import tempfile
from contextlib import contextmanager
from itertools import chain
from typing import Union
import archspec.cpu
import llnl.util.lang
import llnl.util.tty as tty
import spack.abi
import spack.compilers
@@ -19,20 +37,639 @@
import spack.target
import spack.tengine
import spack.util.path
import spack.variant as vt
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
from spack.version import ClosedOpenRange, VersionList, ver
#: impements rudimentary logic for ABI compatibility
_abi: Union[spack.abi.ABI, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
lambda: spack.abi.ABI()
)
@functools.total_ordering
class reverse_order:
"""Helper for creating key functions.
This is a wrapper that inverts the sense of the natural
comparisons on the object.
"""
def __init__(self, value):
self.value = value
def __eq__(self, other):
return other.value == self.value
def __lt__(self, other):
return other.value < self.value
class Concretizer:
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
"""You can subclass this class to override some of the default
concretization strategies, or you can override all of them.
"""
#: Controls whether we check that compiler versions actually exist
#: during concretization. Used for testing and for mirror creation
check_for_compiler_existence = None
def __init__(self):
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
#: Those will not be considered as providers for virtuals.
non_buildable_packages = {"glibc", "musl"}
def __init__(self, abstract_spec=None):
if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not spack.config.get(
"config:install_missing_compilers", False
)
self.abstract_spec = abstract_spec
self._adjust_target_answer_generator = None
def concretize_develop(self, spec):
"""
Add ``dev_path=*`` variant to packages built from local source.
"""
env = spack.environment.active_environment()
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
if not dev_info:
return False
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
if "dev_path" in spec.variants:
assert spec.variants["dev_path"].value == path
changed = False
else:
spec.variants.setdefault("dev_path", vt.SingleValuedVariant("dev_path", path))
changed = True
changed |= spec.constrain(dev_info["spec"])
return changed
def _valid_virtuals_and_externals(self, spec):
"""Returns a list of candidate virtual dep providers and external
packages that coiuld be used to concretize a spec.
Preferred specs come first in the list.
"""
# First construct a list of concrete candidates to replace spec with.
candidates = [spec]
pref_key = lambda spec: 0 # no-op pref key
if spec.virtual:
candidates = [
s
for s in spack.repo.PATH.providers_for(spec)
if s.name not in self.non_buildable_packages
]
if not candidates:
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
# Find nearest spec in the DAG (up then down) that has prefs.
spec_w_prefs = find_spec(
spec, lambda p: PackagePrefs.has_preferred_providers(p.name, spec.name), spec
) # default to spec itself.
# Create a key to sort candidates by the prefs we found
pref_key = PackagePrefs(spec_w_prefs.name, "providers", spec.name)
# For each candidate package, if it has externals, add those
# to the usable list. if it's not buildable, then *only* add
# the externals.
usable = []
for cspec in candidates:
if is_spec_buildable(cspec):
usable.append(cspec)
externals = spec_externals(cspec)
for ext in externals:
if ext.intersects(spec):
usable.append(ext)
# If nothing is in the usable list now, it's because we aren't
# allowed to build anything.
if not usable:
raise NoBuildError(spec)
# Use a sort key to order the results
return sorted(
usable,
key=lambda spec: (
not spec.external, # prefer externals
pref_key(spec), # respect prefs
spec.name, # group by name
reverse_order(spec.versions), # latest version
spec, # natural order
),
)
def choose_virtual_or_external(self, spec: spack.spec.Spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
"""
candidates = self._valid_virtuals_and_externals(spec)
if not candidates:
return candidates
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
abi_exemplar = find_spec(spec, lambda x: x.compiler)
if abi_exemplar is None:
abi_exemplar = spec.root
# Sort candidates from most to least compatibility.
# We reverse because True > False.
# Sort is stable, so candidates keep their order.
return sorted(
candidates,
reverse=True,
key=lambda spec: (
_abi.compatible(spec, abi_exemplar, loose=True),
_abi.compatible(spec, abi_exemplar),
),
)
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's
version if there are no available versions.
TODO: In many cases we probably want to look for installed
versions of each package and use an installed version
if we can link to it. The policy implemented here will
tend to rebuild a lot of stuff becasue it will prefer
a compiler in the spec to any compiler already-
installed things were built with. There is likely
some better policy that finds some middle ground
between these two extremes.
"""
# return if already concrete.
if spec.versions.concrete:
return False
# List of versions we could consider, in sorted order
pkg_versions = spec.package_class.versions
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
yaml_prefs = PackagePrefs(spec.name, "version")
# The keys below show the order of precedence of factors used
# to select a version when concretizing. The item with
# the "largest" key will be selected.
#
# NOTE: When COMPARING VERSIONS, the '@develop' version is always
# larger than other versions. BUT when CONCRETIZING,
# the largest NON-develop version is selected by default.
keyfn = lambda v: (
# ------- Special direction from the user
# Respect order listed in packages.yaml
-yaml_prefs(v),
# The preferred=True flag (packages or packages.yaml or both?)
pkg_versions.get(v).get("preferred", False),
# ------- Regular case: use latest non-develop version by default.
# Avoid @develop version, which would otherwise be the "largest"
# in straight version comparisons
not v.isdevelop(),
# Compare the version itself
# This includes the logic:
# a) develop > everything (disabled by "not v.isdevelop() above)
# b) numeric > non-numeric
# c) Numeric or string comparison
v,
)
usable.sort(key=keyfn, reverse=True)
if usable:
spec.versions = ver([usable[0]])
else:
# We don't know of any SAFE versions that match the given
# spec. Grab the spec's versions and grab the highest
# *non-open* part of the range of versions it specifies.
# Someone else can raise an error if this happens,
# e.g. when we go to fetch it and don't know how. But it
# *might* work.
if not spec.versions or spec.versions == VersionList([":"]):
raise NoValidVersionError(spec)
else:
last = spec.versions[-1]
if isinstance(last, ClosedOpenRange):
range_as_version = VersionList([last]).concrete_range_as_version
if range_as_version:
spec.versions = ver([range_as_version])
else:
raise NoValidVersionError(spec)
else:
spec.versions = ver([last])
return True # Things changed
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
architecture is not a string type, then check if either the platform,
target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False.
If the target is a string type, then convert the string into a
concretized architecture. If it has no architecture and the root of the
DAG has an architecture, then use the root otherwise use the defaults
on the platform.
"""
# ensure type safety for the architecture
if spec.architecture is None:
spec.architecture = spack.spec.ArchSpec()
if spec.architecture.concrete:
return False
# Get platform of nearest spec with a platform, including spec
# If spec has a platform, easy
if spec.architecture.platform:
new_plat = spack.platforms.by_name(spec.architecture.platform)
else:
# Else if anyone else has a platform, take the closest one
# Search up, then down, along build/link deps first
# Then any nearest. Algorithm from compilerspec search
platform_spec = find_spec(spec, lambda x: x.architecture and x.architecture.platform)
if platform_spec:
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
else:
# If no platform anywhere in this spec, grab the default
new_plat = spack.platforms.host()
# Get nearest spec with relevant platform and an os
# Generally, same algorithm as finding platform, except we only
# consider specs that have a platform
if spec.architecture.os:
new_os = spec.architecture.os
else:
new_os_spec = find_spec(
spec,
lambda x: (
x.architecture
and x.architecture.platform == str(new_plat)
and x.architecture.os
),
)
if new_os_spec:
new_os = new_os_spec.architecture.os
else:
new_os = new_plat.operating_system("default_os")
# Get the nearest spec with relevant platform and a target
# Generally, same algorithm as finding os
curr_target = None
if spec.architecture.target:
curr_target = spec.architecture.target
if spec.architecture.target and spec.architecture.target_concrete:
new_target = spec.architecture.target
else:
new_target_spec = find_spec(
spec,
lambda x: (
x.architecture
and x.architecture.platform == str(new_plat)
and x.architecture.target
and x.architecture.target != curr_target
),
)
if new_target_spec:
if curr_target:
# constrain one target by the other
new_target_arch = spack.spec.ArchSpec(
(None, None, new_target_spec.architecture.target)
)
curr_target_arch = spack.spec.ArchSpec((None, None, curr_target))
curr_target_arch.constrain(new_target_arch)
new_target = curr_target_arch.target
else:
new_target = new_target_spec.architecture.target
else:
# To get default platform, consider package prefs
if PackagePrefs.has_preferred_targets(spec.name):
new_target = self.target_from_package_preferences(spec)
else:
new_target = new_plat.target("default_target")
if curr_target:
# convert to ArchSpec to compare satisfaction
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
if not new_target_arch.intersects(curr_target_arch):
# new_target is an incorrect guess based on preferences
# and/or default
valid_target_ranges = str(curr_target).split(",")
for target_range in valid_target_ranges:
t_min, t_sep, t_max = target_range.partition(":")
if not t_sep:
new_target = t_min
break
elif t_max:
new_target = t_max
break
elif t_min:
# TODO: something better than picking first
new_target = t_min
break
# Construct new architecture, compute whether spec changed
arch_spec = (str(new_plat), str(new_os), str(new_target))
new_arch = spack.spec.ArchSpec(arch_spec)
spec_changed = new_arch != spec.architecture
spec.architecture = new_arch
return spec_changed
def target_from_package_preferences(self, spec):
"""Returns the preferred target from the package preferences if
there's any.
Args:
spec: abstract spec to be concretized
"""
target_prefs = PackagePrefs(spec.name, "target")
target_specs = [spack.spec.Spec("target=%s" % tname) for tname in archspec.cpu.TARGETS]
def tspec_filter(s):
# Filter target specs by whether the architecture
# family is the current machine type. This ensures
# we only consider x86_64 targets when on an
# x86_64 machine, etc. This may need to change to
# enable setting cross compiling as a default
target = archspec.cpu.TARGETS[str(s.architecture.target)]
arch_family_name = target.family.name
return arch_family_name == platform.machine()
# Sort filtered targets by package prefs
target_specs = list(filter(tspec_filter, target_specs))
target_specs.sort(key=target_prefs)
new_target = target_specs[0].architecture.target
return new_target
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
the user preferences from packages.yaml or the default variants from
the package specification.
"""
changed = False
preferred_variants = PackagePrefs.preferred_variants(spec.name)
pkg_cls = spec.package_class
for name, entry in pkg_cls.variants.items():
variant, when = entry
var = spec.variants.get(name, None)
if var and "*" in var:
# remove variant wildcard before concretizing
# wildcard cannot be combined with other variables in a
# multivalue variant, a concrete variant cannot have the value
# wildcard, and a wildcard does not constrain a variant
spec.variants.pop(name)
if name not in spec.variants and any(spec.satisfies(w) for w in when):
changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
spec.variants[name] = variant.make_default()
if name in spec.variants and not any(spec.satisfies(w) for w in when):
raise vt.InvalidVariantForSpecError(name, when, spec)
return changed
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
spec and use that. If the ancestor's compiler is not
concrete, then used the preferred compiler as specified in
spackconfig.
Intuition: Use the spackconfig default if no package that depends on
this one has a strict compiler requirement. Otherwise, try to
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
# Pass on concretizing the compiler if the target or operating system
# is not yet determined
if not spec.architecture.concrete:
# We haven't changed, but other changes need to happen before we
# continue. `return True` here to force concretization to keep
# running.
return True
# Only use a matching compiler if it is of the proper style
# Takes advantage of the proper logic already existing in
# compiler_for_spec Should think whether this can be more
# efficient
def _proper_compiler_style(cspec, aspec):
compilers = spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
# If the spec passed as argument is concrete we want to check
# the versions match exactly
if (
cspec.concrete
and compilers
and cspec.version not in [c.version for c in compilers]
):
return []
return compilers
if spec.compiler and spec.compiler.concrete:
if self.check_for_compiler_existence and not _proper_compiler_style(
spec.compiler, spec.architecture
):
_compiler_concretization_failure(spec.compiler, spec.architecture)
return False
# Find another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(spec, lambda x: x.compiler, spec.root)
other_compiler = other_spec.compiler
assert other_spec
# Check if the compiler is already fully specified
if other_compiler and other_compiler.concrete:
if self.check_for_compiler_existence and not _proper_compiler_style(
other_compiler, spec.architecture
):
_compiler_concretization_failure(other_compiler, spec.architecture)
spec.compiler = other_compiler
return True
if other_compiler: # Another node has abstract compiler information
compiler_list = spack.compilers.find_specs_by_arch(other_compiler, spec.architecture)
if not compiler_list:
# We don't have a matching compiler installed
if not self.check_for_compiler_existence:
# Concretize compiler spec versions as a package to build
cpkg_spec = spack.compilers.pkg_spec_for_compiler(other_compiler)
self.concretize_version(cpkg_spec)
spec.compiler = spack.spec.CompilerSpec(
other_compiler.name, cpkg_spec.versions
)
return True
else:
# No compiler with a satisfactory spec was found
raise UnavailableCompilerVersionError(other_compiler, spec.architecture)
else:
# We have no hints to go by, grab any compiler
compiler_list = spack.compilers.all_compiler_specs()
if not compiler_list:
# Spack has no compilers.
raise spack.compilers.NoCompilersError()
# By default, prefer later versions of compilers
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
ppk = PackagePrefs(other_spec.name, "compiler")
matches = sorted(compiler_list, key=ppk)
# copy concrete version into other_compiler
try:
spec.compiler = next(
c for c in matches if _proper_compiler_style(c, spec.architecture)
).copy()
except StopIteration:
# No compiler with a satisfactory spec has a suitable arch
_compiler_concretization_failure(other_compiler, spec.architecture)
assert spec.compiler.concrete
return True # things changed.
def concretize_compiler_flags(self, spec):
"""
The compiler flags are updated to match those of the spec whose
compiler is used, defaulting to no compiler flags in the spec.
Default specs set at the compiler level will still be added later.
"""
# Pass on concretizing the compiler flags if the target or operating
# system is not set.
if not spec.architecture.concrete:
# We haven't changed, but other changes need to happen before we
# continue. `return True` here to force concretization to keep
# running.
return True
compiler_match = lambda other: (
spec.compiler == other.compiler and spec.architecture == other.architecture
)
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = list()
try:
nearest = next(
p
for p in spec.traverse(direction="parents")
if (compiler_match(p) and (p is not spec) and flag in p.compiler_flags)
)
nearest_flags = nearest.compiler_flags.get(flag, [])
flags = spec.compiler_flags.get(flag, [])
if set(nearest_flags) - set(flags):
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(nearest_flags + flags))
ret = True
except StopIteration:
pass
# Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
try:
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
except spack.compilers.NoCompilerForSpecError:
if self.check_for_compiler_existence:
raise
return ret
for flag in compiler.flags:
config_flags = compiler.flags.get(flag, [])
flags = spec.compiler_flags.get(flag, [])
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(config_flags + flags))
if set(config_flags) - set(flags):
ret = True
return ret
def adjust_target(self, spec):
"""Adjusts the target microarchitecture if the compiler is too old
to support the default one.
Args:
spec: spec to be concretized
Returns:
True if spec was modified, False otherwise
"""
# To minimize the impact on performance this function will attempt
# to adjust the target only at the very first call once necessary
# information is set. It will just return False on subsequent calls.
# The way this is achieved is by initializing a generator and making
# this function return the next answer.
if not (spec.architecture and spec.architecture.concrete):
# Not ready, but keep going because we have work to do later
return True
def _make_only_one_call(spec):
yield self._adjust_target(spec)
while True:
yield False
if self._adjust_target_answer_generator is None:
self._adjust_target_answer_generator = _make_only_one_call(spec)
return next(self._adjust_target_answer_generator)
def _adjust_target(self, spec):
"""Assumes that the architecture and the compiler have been
set already and checks if the current target microarchitecture
is the default and can be optimized by the compiler.
If not, downgrades the microarchitecture until a suitable one
is found. If none can be found raise an error.
Args:
spec: spec to be concretized
Returns:
True if any modification happened, False otherwise
"""
import archspec.cpu
# Try to adjust the target only if it is the default
# target for this platform
current_target = spec.architecture.target
current_platform = spack.platforms.by_name(spec.architecture.platform)
default_target = current_platform.target("default_target")
if PackagePrefs.has_preferred_targets(spec.name):
default_target = self.target_from_package_preferences(spec)
if current_target != default_target or (
self.abstract_spec
and self.abstract_spec.architecture
and self.abstract_spec.architecture.concrete
):
return False
try:
current_target.optimization_flags(spec.compiler)
except archspec.cpu.UnsupportedMicroarchitecture:
microarchitecture = current_target.microarchitecture
for ancestor in microarchitecture.ancestors:
candidate = None
try:
candidate = spack.target.Target(ancestor)
candidate.optimization_flags(spec.compiler)
except archspec.cpu.UnsupportedMicroarchitecture:
continue
if candidate is not None:
msg = (
"{0.name}@{0.version} cannot build optimized "
'binaries for "{1}". Using best target possible: '
'"{2}"'
)
msg = msg.format(spec.compiler, current_target, candidate)
tty.warn(msg)
spec.architecture.target = candidate
return True
else:
raise
return False
@contextmanager
@@ -82,6 +719,19 @@ def find_spec(spec, condition, default=None):
return default # Nothing matched the condition; return default.
def _compiler_concretization_failure(compiler_spec, arch):
# Distinguish between the case that there are compilers for
# the arch but not with the given compiler spec and the case that
# there are no compilers for the arch at all
if not spack.compilers.compilers_for_arch(arch):
available_os_targets = set(
(c.operating_system, c.target) for c in spack.compilers.all_compilers()
)
raise NoCompilersForArchError(arch, available_os_targets)
else:
raise UnavailableCompilerVersionError(compiler_spec, arch)
def concretize_specs_together(*abstract_specs, **kwargs):
"""Given a number of specs as input, tries to concretize them together.
@@ -94,6 +744,12 @@ def concretize_specs_together(*abstract_specs, **kwargs):
Returns:
List of concretized specs
"""
if spack.config.get("config:concretizer", "clingo") == "original":
return _concretize_specs_together_original(*abstract_specs, **kwargs)
return _concretize_specs_together_new(*abstract_specs, **kwargs)
def _concretize_specs_together_new(*abstract_specs, **kwargs):
import spack.solver.asp
allow_deprecated = spack.config.get("config:deprecated", False)
@@ -104,6 +760,51 @@ def concretize_specs_together(*abstract_specs, **kwargs):
return [s.copy() for s in result.specs]
def _concretize_specs_together_original(*abstract_specs, **kwargs):
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
tmpdir = tempfile.mkdtemp()
builder = spack.repo.MockRepositoryBuilder(tmpdir)
# Split recursive specs, as it seems the concretizer has issue
# respecting conditions on dependents expressed like
# depends_on('foo ^bar@1.0'), see issue #11160
split_specs = [
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
]
builder.add_package(
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
)
with spack.repo.use_repositories(builder.root, override=False):
# Spec from a helper package that depends on all the abstract_specs
concretization_root = spack.spec.Spec("concretizationroot")
concretization_root.concretize(tests=kwargs.get("tests", False))
# Retrieve the direct dependencies
concrete_specs = [concretization_root[spec.name].copy() for spec in abstract_specs]
return concrete_specs
class NoCompilersForArchError(spack.error.SpackError):
def __init__(self, arch, available_os_targets):
err_msg = (
"No compilers found"
" for operating system %s and target %s."
"\nIf previous installations have succeeded, the"
" operating system may have been updated." % (arch.os, arch.target)
)
available_os_target_strs = list()
for operating_system, t in available_os_targets:
os_target_str = "%s-%s" % (operating_system, t) if t else operating_system
available_os_target_strs.append(os_target_str)
err_msg += (
"\nCompilers are defined for the following"
" operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
)
super().__init__(err_msg, "Run 'spack compiler find' to add compilers.")
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
@@ -119,3 +820,37 @@ def __init__(self, compiler_spec, arch=None):
"'spack compilers' to see which compilers are already recognized"
" by spack.",
)
class NoValidVersionError(spack.error.SpackError):
"""Raised when there is no way to have a concrete version for a
particular spec."""
def __init__(self, spec):
super().__init__(
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
)
class InsufficientArchitectureInfoError(spack.error.SpackError):
"""Raised when details on architecture cannot be collected from the
system"""
def __init__(self, spec, archs):
super().__init__(
"Cannot determine necessary architecture information for '%s': %s"
% (spec.name, str(archs))
)
class NoBuildError(spack.error.SpecError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found
"""
def __init__(self, spec):
msg = (
"The spec\n '%s'\n is configured as not buildable, "
"and no matching external installs were found"
)
super().__init__(msg % spec)

View File

@@ -99,6 +99,7 @@
"dirty": False,
"build_jobs": min(16, cpus_available()),
"build_stage": "$tempdir/spack-stage",
"concretizer": "clingo",
"license_dir": spack.paths.default_license_dir,
}
}

View File

@@ -136,10 +136,10 @@ def path_to_dict(search_paths: List[str]):
# entry overrides later entries
for search_path in reversed(search_paths):
try:
with os.scandir(search_path) as entries:
path_to_lib.update(
{entry.path: entry.name for entry in entries if entry.is_file()}
)
for lib in os.listdir(search_path):
lib_path = os.path.join(search_path, lib)
if llnl.util.filesystem.is_readable_file(lib_path):
path_to_lib[lib_path] = lib
except OSError as e:
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
llnl.util.tty.debug(msg)
@@ -239,7 +239,7 @@ def update_configuration(
external_entries = pkg_config.get("externals", [])
assert not isinstance(external_entries, bool), "unexpected value for external entry"
all_new_specs.extend([x.spec for x in new_entries])
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
if buildable is False:
pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config

View File

@@ -12,7 +12,7 @@
import re
import sys
import warnings
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
from typing import Dict, List, Optional, Set, Tuple, Type
import llnl.util.filesystem
import llnl.util.lang
@@ -62,7 +62,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
def file_identifier(path):
s = os.stat(path)
return s.st_dev, s.st_ino
return (s.st_dev, s.st_ino)
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
@@ -80,8 +80,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
constructed based on the PATH environment variable.
"""
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
# Make use we don't doubly list /usr/lib and /lib etc
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
return path_to_dict(search_paths)
@@ -189,7 +187,7 @@ def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[s
return path_to_dict(search_paths)
def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
groups = collections.defaultdict(set)
for p in paths:
groups[os.path.dirname(p)].add(p)
@@ -245,9 +243,7 @@ def detect_specs(
return []
result = []
for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths)
).items():
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
# TODO: multiple instances of a package can live in the same
# prefix, and a package implementation can return multiple specs
# for one prefix, but without additional details (e.g. about the
@@ -303,17 +299,19 @@ def detect_specs(
return result
def find(
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
) -> List[DetectedPackage]:
"""For a given package, returns a list of detected specs.
Args:
pkg_name: package being detected
repository: repository to retrieve the package
initial_guess: initial list of paths to search from the caller if None, default paths
are searched. If this is an empty list, nothing will be searched.
initial_guess: initial list of paths to search from the caller
if None, default paths are searched. If this
is an empty list, nothing will be searched.
"""
pkg_cls = repository.get_pkg_class(pkg_name)
import spack.repo
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
patterns = self.search_patterns(pkg=pkg_cls)
if not patterns:
return []
@@ -337,10 +335,13 @@ def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> Lis
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
executables_by_path = executables_in_path(path_hints=paths)
joined_pattern = re.compile(r"|".join(patterns))
result = [path for path, exe in executables_by_path.items() if joined_pattern.search(exe)]
result.sort()
return result
patterns = [re.compile(x) for x in patterns]
result = []
for compiled_re in patterns:
for path, exe in executables_by_path.items():
if compiled_re.search(exe):
result.append(path)
return list(sorted(set(result)))
def prefix_from_path(self, *, path: str) -> str:
result = executable_prefix(path)
@@ -384,7 +385,7 @@ def prefix_from_path(self, *, path: str) -> str:
def by_path(
packages_to_search: Iterable[str],
packages_to_search: List[str],
*,
path_hints: Optional[List[str]] = None,
max_workers: Optional[int] = None,
@@ -398,28 +399,19 @@ def by_path(
path_hints: initial list of paths to be searched
max_workers: maximum number of workers to search for packages in parallel
"""
import spack.repo
# TODO: Packages should be able to define both .libraries and .executables in the future
# TODO: determine_spec_details should get all relevant libraries and executables in one call
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
result = collections.defaultdict(list)
repository = spack.repo.PATH.ensure_unwrapped()
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
for pkg in packages_to_search:
executable_future = executor.submit(
executables_finder.find,
pkg_name=pkg,
initial_guess=path_hints,
repository=repository,
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
)
library_future = executor.submit(
libraries_finder.find,
pkg_name=pkg,
initial_guess=path_hints,
repository=repository,
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
)
detected_specs_by_package[pkg] = executable_future, library_future

View File

@@ -32,9 +32,10 @@ class OpenMpi(Package):
"""
import collections
import collections.abc
import functools
import os.path
import re
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
import llnl.util.lang
import llnl.util.tty.color
@@ -47,7 +48,6 @@ class OpenMpi(Package):
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency
from spack.directives_meta import DirectiveError, DirectiveMeta
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
@@ -80,6 +80,22 @@ class OpenMpi(Package):
"redistribute",
]
#: These are variant names used by Spack internally; packages can't use them
reserved_names = [
"arch",
"architecture",
"dev_path",
"namespace",
"operating_system",
"os",
"patches",
"platform",
"target",
]
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
_patch_order_index = 0
@@ -139,6 +155,219 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
return spack.spec.Spec(value)
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
directive = DirectiveMeta.directive
@@ -617,7 +846,7 @@ def format_error(msg, pkg):
msg += " @*r{{[{0}, variant '{1}']}}"
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
if name in spack.variant.reserved_names:
if name in reserved_names:
def _raise_reserved_name(pkg):
msg = "The name '%s' is reserved by Spack" % name
@@ -881,6 +1110,10 @@ def _execute_languages(pkg: "spack.package_base.PackageBase"):
return _execute_languages
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
class DependencyError(DirectiveError):
"""This is raised when a dependency specification is invalid."""

View File

@@ -1,234 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import functools
from typing import List, Set
import llnl.util.lang
import spack.error
import spack.spec
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -1214,6 +1214,7 @@ def scope_name(self):
def include_concrete_envs(self):
"""Copy and save the included envs' specs internally"""
lockfile_meta = None
root_hash_seen = set()
concrete_hash_seen = set()
self.included_concrete_spec_data = {}
@@ -1224,26 +1225,37 @@ def include_concrete_envs(self):
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
env = Environment(env_path)
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
with open(env.lock_path) as f:
lockfile_as_dict = env._read_lockfile(f)
# Lockfile_meta must match each env and use at least format version 5
if lockfile_meta is None:
lockfile_meta = lockfile_as_dict["_meta"]
elif lockfile_meta != lockfile_as_dict["_meta"]:
raise SpackEnvironmentError("All lockfile _meta values must match")
elif lockfile_meta["lockfile-version"] < 5:
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
# Copy unique root specs from env
for root_dict in env._concrete_roots_dict():
self.included_concrete_spec_data[env_path] = {"roots": []}
for root_dict in lockfile_as_dict["roots"]:
if root_dict["hash"] not in root_hash_seen:
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
root_hash_seen.add(root_dict["hash"])
# Copy unique concrete specs from env
for dag_hash, spec_details in env._concrete_specs_dict().items():
if dag_hash not in concrete_hash_seen:
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
{dag_hash: spec_details}
for concrete_spec in lockfile_as_dict["concrete_specs"]:
if concrete_spec not in concrete_hash_seen:
self.included_concrete_spec_data[env_path].update(
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
)
concrete_hash_seen.add(dag_hash)
concrete_hash_seen.add(concrete_spec)
# Copy transitive include data
transitive = env.included_concrete_spec_data
if transitive:
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
if "include_concrete" in lockfile_as_dict.keys():
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
"include_concrete"
]
self._read_lockfile_dict(self._to_lockfile_dict())
self.write()
@@ -1632,8 +1644,9 @@ def _concretize_separately(self, tests=False):
i += 1
# Ensure we don't try to bootstrap clingo in parallel
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
if spack.config.get("config:concretizer", "clingo") == "clingo":
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
# Ensure all the indexes have been built or updated, since
# otherwise the processes in the pool may timeout on waiting
@@ -1644,7 +1657,7 @@ def _concretize_separately(self, tests=False):
# Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
# Early return if there is nothing to do
if len(args) == 0:
@@ -2161,23 +2174,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
return specs
def _concrete_specs_dict(self):
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {}
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
# Assumes no legacy formats, since this was just created.
spec_dict[ht.dag_hash.name] = s.dag_hash()
concrete_specs[s.dag_hash()] = spec_dict
return concrete_specs
def _concrete_roots_dict(self):
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = self._concrete_specs_dict()
root_specs = self._concrete_roots_dict()
spack_dict = {"version": spack.spack_version}
spack_commit = spack.main.get_spack_commit()
@@ -2198,7 +2204,7 @@ def _to_lockfile_dict(self):
# spack version information
"spack": spack_dict,
# users specs + hashes are the 'roots' of the environment
"roots": root_specs,
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
"concrete_specs": concrete_specs,
}

View File

@@ -30,7 +30,6 @@
import shutil
import urllib.error
import urllib.parse
import urllib.request
from pathlib import PurePath
from typing import List, Optional
@@ -54,7 +53,7 @@
import spack.version
import spack.version.git_ref_lookup
from spack.util.compression import decompressor_for
from spack.util.executable import CommandNotFoundError, Executable, which
from spack.util.executable import CommandNotFoundError, which
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
@@ -246,30 +245,38 @@ class URLFetchStrategy(FetchStrategy):
# these are checksum types. The generic 'checksum' is deprecated for
# specific hash names, but we need it for backward compatibility
optional_attrs = [*crypto.hashes.keys(), "checksum"]
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(**kwargs)
self.url = url
# Prefer values in kwargs to the positionals.
self.url = kwargs.get("url", url)
self.mirrors = kwargs.get("mirrors", [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
self.digest: Optional[str] = checksum
self.digest = kwargs.get("checksum", checksum)
for h in self.optional_attrs:
if h in kwargs:
self.digest = kwargs[h]
self.expand_archive: bool = kwargs.get("expand", True)
self.extra_options: dict = kwargs.get("fetch_options", {})
self._curl: Optional[Executable] = None
self.extension: Optional[str] = kwargs.get("extension", None)
self.expand_archive = kwargs.get("expand", True)
self.extra_options = kwargs.get("fetch_options", {})
self._curl = None
self.extension = kwargs.get("extension", None)
if not self.url:
raise ValueError("URLFetchStrategy requires a url for fetching.")
@property
def curl(self) -> Executable:
def curl(self):
if not self._curl:
self._curl = web_util.require_curl()
try:
self._curl = which("curl", required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
return self._curl
def source_id(self):
@@ -290,23 +297,27 @@ def candidate_urls(self):
@_needs_stage
def fetch(self):
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
tty.debug("Already downloaded {0}".format(self.archive_file))
return
errors: List[Exception] = []
url = None
errors = []
for url in self.candidate_urls:
if not web_util.url_exists(url):
tty.debug("URL does not exist: " + url)
continue
try:
self._fetch_from_url(url)
break
except FailedDownloadError as e:
errors.extend(e.exceptions)
else:
raise FailedDownloadError(*errors)
errors.append(str(e))
for msg in errors:
tty.debug(msg)
if not self.archive_file:
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
raise FailedDownloadError(url)
def _fetch_from_url(self, url):
if spack.config.get("config:url_fetch_method") == "curl":
@@ -325,28 +336,27 @@ def _check_headers(self, headers):
@_needs_stage
def _fetch_urllib(self, url):
save_file = self.stage.save_filename
tty.msg("Fetching {0}".format(url))
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
# Run urllib but grab the mime type from the http headers
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
url, headers, response = web_util.read_from_url(url)
except web_util.SpackWebError as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
if os.path.lexists(save_file):
os.remove(save_file)
raise FailedDownloadError(e) from e
tty.msg(f"Fetching {url}")
msg = "urllib failed to fetch with error {0}".format(e)
raise FailedDownloadError(url, msg)
if os.path.lexists(save_file):
os.remove(save_file)
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(response.headers))
self._check_headers(str(headers))
@_needs_stage
def _fetch_curl(self, url):
@@ -355,7 +365,7 @@ def _fetch_curl(self, url):
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + ".part"
tty.msg(f"Fetching {url}")
tty.msg("Fetching {0}".format(url))
if partial_file:
save_args = [
"-C",
@@ -395,8 +405,8 @@ def _fetch_curl(self, url):
try:
web_util.check_curl_code(curl.returncode)
except spack.error.FetchError as e:
raise FailedDownloadError(e) from e
except spack.error.FetchError as err:
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
self._check_headers(headers)
@@ -463,7 +473,7 @@ def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if not self.digest:
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
verify_checksum(self.archive_file, self.digest)
@@ -474,8 +484,8 @@ def reset(self):
"""
if not self.archive_file:
raise NoArchiveFileError(
f"Tried to reset {self.__class__.__name__} before fetching",
f"Failed on reset() for URL{self.url}",
"Tried to reset URLFetchStrategy before fetching",
"Failed on reset() for URL %s" % self.url,
)
# Remove everything but the archive from the stage
@@ -488,10 +498,14 @@ def reset(self):
self.expand()
def __repr__(self):
return f"{self.__class__.__name__}<{self.url}>"
url = self.url if self.url else "no url"
return "%s<%s>" % (self.__class__.__name__, url)
def __str__(self):
return self.url
if self.url:
return self.url
else:
return "[no url]"
@fetcher
@@ -504,7 +518,7 @@ def fetch(self):
# check whether the cache file exists.
if not os.path.isfile(path):
raise NoCacheError(f"No cache of {path}")
raise NoCacheError("No cache of %s" % path)
# remove old symlink if one is there.
filename = self.stage.save_filename
@@ -514,8 +528,8 @@ def fetch(self):
# Symlink to local cached archive.
symlink(path, filename)
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
# download.
# Remove link if checksum fails, or subsequent fetchers
# will assume they don't need to download.
if self.digest:
try:
self.check()
@@ -524,12 +538,12 @@ def fetch(self):
raise
# Notify the user how we fetched.
tty.msg(f"Using cached archive: {path}")
tty.msg("Using cached archive: {0}".format(path))
class OCIRegistryFetchStrategy(URLFetchStrategy):
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
super().__init__(url=url, checksum=checksum, **kwargs)
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(url, checksum, **kwargs)
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
@@ -540,13 +554,13 @@ def fetch(self):
try:
response = self._urlopen(self.url)
except (TimeoutError, urllib.error.URLError) as e:
except urllib.error.URLError as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
if os.path.lexists(file):
os.remove(file)
raise FailedDownloadError(e) from e
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
if os.path.lexists(file):
os.remove(file)
@@ -574,18 +588,18 @@ def __init__(self, **kwargs):
# Set a URL based on the type of fetch strategy.
self.url = kwargs.get(self.url_attr, None)
if not self.url:
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
for attr in self.optional_attrs:
setattr(self, attr, kwargs.get(attr, None))
@_needs_stage
def check(self):
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
@_needs_stage
def expand(self):
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
@_needs_stage
def archive(self, destination, *, exclude: Optional[str] = None):
@@ -605,10 +619,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
)
def __str__(self):
return f"VCS: {self.url}"
return "VCS: %s" % self.url
def __repr__(self):
return f"{self.__class__}<{self.url}>"
return "%s<%s>" % (self.__class__, self.url)
@fetcher
@@ -711,17 +725,11 @@ class GitFetchStrategy(VCSFetchStrategy):
"submodules",
"get_full_repo",
"submodules_delete",
"git_sparse_paths",
]
git_version_re = r"git version (\S+)"
def __init__(self, **kwargs):
self.commit: Optional[str] = None
self.tag: Optional[str] = None
self.branch: Optional[str] = None
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
@@ -732,7 +740,6 @@ def __init__(self, **kwargs):
self.submodules = kwargs.get("submodules", False)
self.submodules_delete = kwargs.get("submodules_delete", False)
self.get_full_repo = kwargs.get("get_full_repo", False)
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
@property
def git_version(self):
@@ -770,71 +777,68 @@ def git(self):
@property
def cachable(self):
return self.cache_enabled and bool(self.commit)
return self.cache_enabled and bool(self.commit or self.tag)
def source_id(self):
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
return self.commit
return self.commit or self.tag
def mirror_id(self):
if self.commit:
repo_ref = self.commit or self.tag or self.branch
if repo_ref:
repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["git", repo_path, self.commit])
result = os.path.sep.join(["git", repo_path, repo_ref])
return result
def _repo_info(self):
args = ""
if self.commit:
args = f" at commit {self.commit}"
elif self.tag:
args = f" at tag {self.tag}"
elif self.branch:
args = f" on branch {self.branch}"
return f"{self.url}{args}"
if self.commit:
args = " at commit {0}".format(self.commit)
elif self.tag:
args = " at tag {0}".format(self.tag)
elif self.branch:
args = " on branch {0}".format(self.branch)
return "{0}{1}".format(self.url, args)
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug(f"Already fetched {self.stage.source_path}")
tty.debug("Already fetched {0}".format(self.stage.source_path))
return
if self.git_sparse_paths:
self._sparse_clone_src()
else:
self._clone_src()
self.submodule_operations()
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
def bare_clone(self, dest: str) -> None:
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
"""
Execute a bare clone for metadata only
Clone a repository to a path.
Requires a destination since bare cloning does not provide source
and shouldn't be used for staging.
This method handles cloning from git, but does not require a stage.
Arguments:
dest (str or None): The path into which the code is cloned. If None,
requires a stage and uses the stage's source path.
commit (str or None): A commit to fetch from the remote. Only one of
commit, branch, and tag may be non-None.
branch (str or None): A branch to fetch from the remote.
tag (str or None): A tag to fetch from the remote.
bare (bool): Execute a "bare" git clone (--bare option to git)
"""
# Default to spack source path
tty.debug(f"Cloning git repository: {self._repo_info()}")
dest = dest or self.stage.source_path
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
debug = spack.config.get("config:debug")
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
def _clone_src(self) -> None:
"""Clone a repository to a path using git."""
# Default to spack source path
dest = self.stage.source_path
tty.debug(f"Cloning git repository: {self._repo_info()}")
git = self.git
debug = spack.config.get("config:debug")
if self.commit:
if bare:
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
elif commit:
# Need to do a regular clone and check out everything if
# they asked for a particular commit.
clone_args = ["clone", self.url]
@@ -853,7 +857,7 @@ def _clone_src(self) -> None:
)
with working_dir(dest):
checkout_args = ["checkout", self.commit]
checkout_args = ["checkout", commit]
if not debug:
checkout_args.insert(1, "--quiet")
git(*checkout_args)
@@ -865,10 +869,10 @@ def _clone_src(self) -> None:
args.append("--quiet")
# If we want a particular branch ask for it.
if self.branch:
args.extend(["--branch", self.branch])
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", self.tag])
if branch:
args.extend(["--branch", branch])
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", tag])
# Try to be efficient if we're using a new enough git.
# This checks out only one branch's history
@@ -900,7 +904,7 @@ def _clone_src(self) -> None:
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
if tag and self.git_version < spack.version.Version("1.8.5.2"):
# pull --tags returns a "special" error code of 1 in
# older versions that we have to ignore.
# see: https://github.com/git/git/commit/19d122b
@@ -913,79 +917,6 @@ def _clone_src(self) -> None:
git(*pull_args, ignore_errors=1)
git(*co_args)
def _sparse_clone_src(self, **kwargs):
"""Use git's sparse checkout feature to clone portions of a git repository"""
dest = self.stage.source_path
git = self.git
if self.git_version < spack.version.Version("2.26.0"):
# technically this should be supported for 2.25, but bumping for OS issues
# see https://github.com/spack/spack/issues/45771
# code paths exist where the package is not set. Assure some indentifier for the
# package that was configured for sparse checkout exists in the error message
identifier = str(self.url)
if self.package:
identifier += f" ({self.package.name})"
tty.warn(
(
f"{identifier} is configured for git sparse-checkout "
"but the git version is too old to support sparse cloning. "
"Cloning the full repository instead."
)
)
self._clone_src()
else:
# default to depth=2 to allow for retention of some git properties
depth = kwargs.get("depth", 2)
needs_fetch = self.branch or self.tag
git_ref = self.branch or self.tag or self.commit
assert git_ref
clone_args = ["clone"]
if needs_fetch:
clone_args.extend(["--branch", git_ref])
if self.get_full_repo:
clone_args.append("--no-single-branch")
else:
clone_args.append("--single-branch")
clone_args.extend(
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
)
sparse_args = ["sparse-checkout", "set"]
if callable(self.git_sparse_paths):
sparse_args.extend(self.git_sparse_paths())
else:
sparse_args.extend([p for p in self.git_sparse_paths])
sparse_args.append("--cone")
checkout_args = ["checkout", git_ref]
if not spack.config.get("config:debug"):
clone_args.insert(1, "--quiet")
checkout_args.insert(1, "--quiet")
with temp_cwd():
git(*clone_args)
repo_name = get_single_file(".")
if self.stage:
self.stage.srcdir = repo_name
shutil.move(repo_name, dest)
with working_dir(dest):
git(*sparse_args)
git(*checkout_args)
def submodule_operations(self):
dest = self.stage.source_path
git = self.git
if self.submodules_delete:
with working_dir(dest):
for submodule_to_delete in self.submodules_delete:
@@ -1038,7 +969,7 @@ def protocol_supports_shallow_clone(self):
return not (self.url.startswith("http://") or self.url.startswith("/"))
def __str__(self):
return f"[git] {self._repo_info()}"
return "[git] {0}".format(self._repo_info())
@fetcher
@@ -1362,7 +1293,7 @@ def reset(self):
shutil.move(scrubbed, source_path)
def __str__(self):
return f"[hg] {self.url}"
return "[hg] %s" % self.url
@fetcher
@@ -1371,21 +1302,46 @@ class S3FetchStrategy(URLFetchStrategy):
url_attr = "s3"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("S3FetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("s3://"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from s3:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
tty.debug("Already downloaded {0}".format(self.archive_file))
return
self._fetch_urllib(self.url)
if not self.archive_file:
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
with working_dir(self.stage.path):
_, headers, stream = web_util.read_from_url(self.url)
with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
content_type = web_util.get_header(headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
llnl.util.filesystem.rename(
os.path.join(self.stage.path, basename), self.stage.save_filename
)
if not self.archive_file:
raise FailedDownloadError(self.url)
@fetcher
class GCSFetchStrategy(URLFetchStrategy):
@@ -1393,22 +1349,43 @@ class GCSFetchStrategy(URLFetchStrategy):
url_attr = "gs"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("GCSFetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("gs"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from gs:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
tty.debug("Already downloaded {0}".format(self.archive_file))
return
self._fetch_urllib(self.url)
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
with working_dir(self.stage.path):
_, headers, stream = web_util.read_from_url(self.url)
with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
content_type = web_util.get_header(headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
raise FailedDownloadError(self.url)
@fetcher
@@ -1417,7 +1394,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
as well as after expanding it."""
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
super().__init__(url=url, checksum=archive_sha256)
super().__init__(url, archive_sha256)
self.expanded_sha256 = expanded_sha256
def expand(self):
@@ -1459,14 +1436,14 @@ def stable_target(fetcher):
return False
def from_url(url: str) -> URLFetchStrategy:
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
Currently just gives you a URLFetchStrategy that uses curl.
TODO: make this return appropriate fetch strategies for other
types of URLs.
"""
return URLFetchStrategy(url=url)
return URLFetchStrategy(url)
def from_kwargs(**kwargs):
@@ -1535,12 +1512,10 @@ def _check_version_attributes(fetcher, pkg, version):
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
except spack.package_base.NoURLError:
raise ExtrapolationError(
f"Can't extrapolate a URL for version {version} because "
f"package {pkg.name} defines no URLs"
)
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
raise ExtrapolationError(msg % (version, pkg.name))
def _from_merged_attrs(fetcher, pkg, version):
@@ -1557,11 +1532,8 @@ def _from_merged_attrs(fetcher, pkg, version):
attrs["fetch_options"] = pkg.fetch_options
attrs.update(pkg.versions[version])
if fetcher.url_attr == "git":
pkg_attr_list = ["submodules", "git_sparse_paths"]
for pkg_attr in pkg_attr_list:
if hasattr(pkg, pkg_attr):
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
attrs.setdefault("submodules", pkg.submodules)
return fetcher(**attrs)
@@ -1656,9 +1628,11 @@ def for_package_version(pkg, version=None):
raise InvalidArgsError(pkg, version, **args)
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
def from_url_scheme(url, *args, **kwargs):
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
in the given url."""
url = kwargs.get("url", url)
parsed_url = urllib.parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or {
@@ -1675,9 +1649,11 @@ def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
for fetcher in all_strategies:
url_attr = getattr(fetcher, "url_attr", None)
if url_attr and url_attr == scheme:
return fetcher(url=url, **kwargs)
return fetcher(url, *args, **kwargs)
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
raise ValueError(
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
)
def from_list_url(pkg):
@@ -1702,9 +1678,7 @@ def from_list_url(pkg):
)
# construct a fetcher
return URLFetchStrategy(
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
)
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
@@ -1732,10 +1706,10 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
def fetcher(self, target_path, digest, **kwargs):
path = os.path.join(self.root, target_path)
url = url_util.path_to_file_url(path)
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)
@@ -1748,9 +1722,9 @@ class NoCacheError(spack.error.FetchError):
class FailedDownloadError(spack.error.FetchError):
"""Raised when a download fails."""
def __init__(self, *exceptions: Exception):
super().__init__("Failed to download")
self.exceptions = exceptions
def __init__(self, url, msg=""):
super().__init__("Failed to fetch file from URL: %s" % url, msg)
self.url = url
class NoArchiveFileError(spack.error.FetchError):

View File

@@ -37,12 +37,6 @@ def __call__(self, spec):
"""Run this hash on the provided spec."""
return spec.spec_hash(self)
def __repr__(self):
return (
f"SpecHashDescriptor(depflag={self.depflag!r}, "
f"package_hash={self.package_hash!r}, name={self.name!r}, override={self.override!r})"
)
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")

View File

@@ -20,7 +20,6 @@
systems (e.g. modules, lmod, etc.) or to add other custom
features.
"""
import importlib
from llnl.util.lang import ensure_last, list_modules
@@ -47,7 +46,11 @@ def _populate_hooks(cls):
for name in relative_names:
module_name = __name__ + "." + name
module_obj = importlib.import_module(module_name)
# When importing a module from a package, __import__('A.B', ...)
# returns package A when 'fromlist' is empty. If fromlist is not
# empty it returns the submodule B instead
# See: https://stackoverflow.com/a/2725668/771663
module_obj = __import__(module_name, fromlist=[None])
cls._hooks.append((module_name, module_obj))
@property

View File

@@ -23,7 +23,9 @@ def post_install(spec, explicit):
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
bindist.push_or_raise(
spec,
mirror.push_url,
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
)
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@@ -757,10 +757,6 @@ def test_process(pkg: Pb, kwargs):
pkg.tester.status(pkg.spec.name, TestStatus.SKIPPED)
return
# Make sure properly named build-time test methods actually run as
# stand-alone tests.
pkg.run_tests = True
# run test methods from the package and all virtuals it provides
v_names = virtuals(pkg)
test_specs = [pkg.spec] + [spack.spec.Spec(v_name) for v_name in sorted(v_names)]

View File

@@ -1611,7 +1611,9 @@ def _add_tasks(self, request: BuildRequest, all_deps):
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
spack.compilers.find_compilers([compiler_search_prefix])
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([compiler_search_prefix])
)
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
"""

View File

@@ -21,7 +21,6 @@
from typing import List, Optional, Union
import llnl.url
import llnl.util.symlink
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -31,7 +30,6 @@
import spack.fetch_strategy
import spack.mirror
import spack.oci.image
import spack.repo
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
@@ -428,74 +426,51 @@ def _determine_extension(fetcher):
return ext
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
class MirrorReference:
"""A ``MirrorReference`` stores the relative paths where you can store a
package/resource in a mirror directory.
def __init__(self, path: str) -> None:
self.path = path
The appropriate storage location is given by ``storage_path``. The
``cosmetic_path`` property provides a reference that a human could generate
themselves based on reading the details of the package.
A user can iterate over a ``MirrorReference`` object to get all the
possible names that might be used to refer to the resource in a mirror;
this includes names generated by previous naming schemes that are no-longer
reported by ``storage_path`` or ``cosmetic_path``.
"""
def __init__(self, cosmetic_path, global_path=None):
self.global_path = global_path
self.cosmetic_path = cosmetic_path
@property
def storage_path(self):
if self.global_path:
return self.global_path
else:
return self.cosmetic_path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
if self.global_path:
yield self.global_path
yield self.cosmetic_path
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
class OCIImageLayout:
"""Follow the OCI Image Layout Specification to archive blobs
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
Paths are of the form `blobs/<algorithm>/<digest>`
"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
def __iter__(self):
yield self.storage_path
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
@@ -519,7 +494,7 @@ def default_mirror_layout(
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)
return MirrorReference(per_package_ref, global_ref)
def get_all_versions(specs):

View File

@@ -28,9 +28,11 @@
import inspect
from contextlib import contextmanager
import spack.directives_meta
from llnl.util.lang import caller_locals
import spack.directives
import spack.error
import spack.spec
from spack.spec import Spec
class MultiMethodMeta(type):
@@ -163,9 +165,9 @@ def __init__(self, condition):
condition (str): condition to be met
"""
if isinstance(condition, bool):
self.spec = spack.spec.Spec() if condition else None
self.spec = Spec() if condition else None
else:
self.spec = spack.spec.Spec(condition)
self.spec = Spec(condition)
def __call__(self, method):
"""This annotation lets packages declare multiple versions of
@@ -227,9 +229,11 @@ def install(self, prefix):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
assert (
MultiMethodMeta._locals is not None
), "cannot use multimethod, missing MultiMethodMeta metaclass?"
# In Python 2, Get the first definition of the method in the
# calling scope by looking at the caller's locals. In Python 3,
# we handle this using MultiMethodMeta.__prepare__.
if MultiMethodMeta._locals is None:
MultiMethodMeta._locals = caller_locals()
# Create a multimethod with this name if there is not one already
original_method = MultiMethodMeta._locals.get(method.__name__)
@@ -262,17 +266,17 @@ def __enter__(self):
and add their constraint to whatever may be already present in the directive
`when=` argument.
"""
spack.directives_meta.DirectiveMeta.push_to_context(str(self.spec))
spack.directives.DirectiveMeta.push_to_context(str(self.spec))
def __exit__(self, exc_type, exc_val, exc_tb):
spack.directives_meta.DirectiveMeta.pop_from_context()
spack.directives.DirectiveMeta.pop_from_context()
@contextmanager
def default_args(**kwargs):
spack.directives_meta.DirectiveMeta.push_default_args(kwargs)
spack.directives.DirectiveMeta.push_default_args(kwargs)
yield
spack.directives_meta.DirectiveMeta.pop_default_args()
spack.directives.DirectiveMeta.pop_default_args()
class MultiMethodError(spack.error.SpackError):

View File

@@ -6,6 +6,7 @@
import hashlib
import json
import os
import time
import urllib.error
import urllib.parse
import urllib.request
@@ -42,6 +43,11 @@ def create_tarball(spec: spack.spec.Spec, tarfile_path):
return spack.binary_distribution._do_create_tarball(tarfile_path, spec.prefix, buildinfo)
def _log_upload_progress(digest: Digest, size: int, elapsed: float):
elapsed = max(elapsed, 0.001) # guard against division by zero
tty.info(f"Uploaded {digest} ({elapsed:.2f}s, {size / elapsed / 1024 / 1024:.2f} MB/s)")
def with_query_param(url: str, param: str, value: str) -> str:
"""Add a query parameter to a URL
@@ -135,6 +141,8 @@ def upload_blob(
if not force and blob_exists(ref, digest, _urlopen):
return False
start = time.time()
with open(file, "rb") as f:
file_size = os.fstat(f.fileno()).st_size
@@ -159,6 +167,7 @@ def upload_blob(
# Created the blob in one go.
if response.status == 201:
_log_upload_progress(digest, file_size, time.time() - start)
return True
# Otherwise, do another PUT request.
@@ -182,6 +191,8 @@ def upload_blob(
spack.oci.opener.ensure_status(request, response, 201)
# print elapsed time and # MB/s
_log_upload_progress(digest, file_size, time.time() - start)
return True
@@ -390,12 +401,15 @@ def make_stage(
) -> spack.stage.Stage:
_urlopen = _urlopen or spack.oci.opener.urlopen
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
url=url, checksum=digest.digest, _urlopen=_urlopen
url, checksum=digest.digest, _urlopen=_urlopen
)
# Use blobs/<alg>/<encoded> as the cache path, which follows
# the OCI Image Layout Specification. What's missing though,
# is the `oci-layout` and `index.json` files, which are
# required by the spec.
return spack.stage.Stage(
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
fetch_strategy,
mirror_paths=spack.mirror.OCIImageLayout(digest),
name=digest.digest,
keep=keep,
)

View File

@@ -15,7 +15,6 @@
import functools
import glob
import hashlib
import importlib
import inspect
import io
import os
@@ -198,12 +197,13 @@ def __init__(cls, name, bases, attr_dict):
# that "foo" was a possible executable.
# If a package has the executables or libraries attribute then it's
# assumed to be detectable. Add a tag, so finding them is faster
# assumed to be detectable
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
# To add the tag, we need to copy the tags attribute, and attach it to
# the current class. We don't use append, since it might modify base classes,
# if "tags" is retrieved following the MRO.
cls.tags = getattr(cls, "tags", []) + [DetectablePackageMeta.TAG]
# Append a tag to each detectable package, so that finding them is faster
if not hasattr(cls, "tags"):
setattr(cls, "tags", [DetectablePackageMeta.TAG])
elif DetectablePackageMeta.TAG not in cls.tags:
cls.tags.append(DetectablePackageMeta.TAG)
@classmethod
def platform_executables(cls):
@@ -741,7 +741,7 @@ def __init__(self, spec):
raise ValueError(msg.format(self))
# init internal variables
self._stage: Optional[StageComposite] = None
self._stage = None
self._fetcher = None
self._tester: Optional["PackageTest"] = None
@@ -869,7 +869,7 @@ def module(cls):
We use this to add variables to package modules. This makes
install() methods easier to write (e.g., can call configure())
"""
return importlib.import_module(cls.__module__)
return __import__(cls.__module__, fromlist=[cls.__name__])
@classproperty
def namespace(cls):
@@ -1099,10 +1099,9 @@ def _make_resource_stage(self, root_stage, resource):
root=root_stage,
resource=resource,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.default_mirror_layout(
mirror_paths=spack.mirror.mirror_archive_paths(
resource.fetcher, os.path.join(self.name, pretty_resource_name)
),
mirrors=spack.mirror.MirrorCollection(source=True).values(),
path=self.path,
)
@@ -1114,7 +1113,7 @@ def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
format_string = "{name}-{version}"
pretty_name = self.spec.format_path(format_string)
mirror_paths = spack.mirror.default_mirror_layout(
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher, os.path.join(self.name, pretty_name), self.spec
)
# Construct a path where the stage should build..
@@ -1123,7 +1122,6 @@ def _make_root_stage(self, fetcher):
stage = Stage(
fetcher,
mirror_paths=mirror_paths,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
name=stage_name,
path=self.path,
search_fn=self._download_search,
@@ -1180,7 +1178,7 @@ def stage(self):
return self._stage
@stage.setter
def stage(self, stage: StageComposite):
def stage(self, stage):
"""Allow a stage object to be set to override the default."""
self._stage = stage
@@ -2428,9 +2426,8 @@ def all_urls(self) -> List[str]:
if hasattr(self, "url") and self.url:
urls.append(self.url)
# fetch from first entry in urls to save time
if hasattr(self, "urls") and self.urls:
urls.append(self.urls[0])
urls.extend(self.urls)
for args in self.versions.values():
if "url" in args:

View File

@@ -319,19 +319,18 @@ def stage(self) -> "spack.stage.Stage":
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
)
else:
fetcher = fs.URLFetchStrategy(url=self.url, sha256=self.sha256, expand=False)
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
)
return self._stage

View File

@@ -149,12 +149,12 @@ def current_repository(self, value):
@contextlib.contextmanager
def switch_repo(self, substitute: "RepoType"):
"""Switch the current repository list for the duration of the context manager."""
old = self._repo
old = self.current_repository
try:
self._repo = substitute
self.current_repository = substitute
yield
finally:
self._repo = old
self.current_repository = old
def find_spec(self, fullname, python_path, target=None):
# "target" is not None only when calling importlib.reload()
@@ -365,9 +365,9 @@ def __init__(self, namespace):
def __getattr__(self, name):
"""Getattr lazily loads modules if they're not already loaded."""
submodule = f"{self.__package__}.{name}"
submodule = self.__package__ + "." + name
try:
setattr(self, name, importlib.import_module(submodule))
setattr(self, name, __import__(submodule))
except ImportError:
msg = "'{0}' object has no attribute {1}"
raise AttributeError(msg.format(type(self), name))
@@ -683,7 +683,7 @@ class RepoPath:
def __init__(
self,
*repos: Union[str, "Repo"],
cache: Optional["spack.caches.FileCacheType"],
cache: "spack.caches.FileCacheType",
overrides: Optional[Dict[str, Any]] = None,
) -> None:
self.repos: List[Repo] = []
@@ -696,7 +696,6 @@ def __init__(
for repo in repos:
try:
if isinstance(repo, str):
assert cache is not None, "cache must hold a value, when repo is a string"
repo = Repo(repo, cache=cache, overrides=overrides)
repo.finder(self)
self.put_last(repo)
@@ -708,10 +707,6 @@ def __init__(
f" spack repo rm {repo}",
)
def ensure_unwrapped(self) -> "RepoPath":
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
return self
def put_first(self, repo: "Repo") -> None:
"""Add repo first in the search path."""
if isinstance(repo, RepoPath):
@@ -935,16 +930,6 @@ def is_virtual_safe(self, pkg_name: str) -> bool:
def __contains__(self, pkg_name):
return self.exists(pkg_name)
def marshal(self):
return (self.repos,)
@staticmethod
def unmarshal(repos):
return RepoPath(*repos, cache=None)
def __reduce__(self):
return RepoPath.unmarshal, self.marshal()
class Repo:
"""Class representing a package repository in the filesystem.
@@ -1334,20 +1319,6 @@ def __repr__(self) -> str:
def __contains__(self, pkg_name: str) -> bool:
return self.exists(pkg_name)
@staticmethod
def unmarshal(root, cache, overrides):
"""Helper method to unmarshal keyword arguments"""
return Repo(root, cache=cache, overrides=overrides)
def marshal(self):
cache = self._cache
if isinstance(cache, llnl.util.lang.Singleton):
cache = cache.instance
return self.root, cache, self.overrides
def __reduce__(self):
return Repo.unmarshal, self.marshal()
RepoType = Union[Repo, RepoPath]

View File

@@ -11,26 +11,6 @@
import spack.schema.environment
flags: Dict[str, Any] = {
"type": "object",
"additionalProperties": False,
"properties": {
"cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]},
},
}
extra_rpaths: Dict[str, Any] = {"type": "array", "default": [], "items": {"type": "string"}}
implicit_rpaths: Dict[str, Any] = {
"anyOf": [{"type": "array", "items": {"type": "string"}}, {"type": "boolean"}]
}
#: Properties for inclusion in other schemas
properties: Dict[str, Any] = {
"compilers": {
@@ -55,7 +35,18 @@
"fc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
},
},
"flags": flags,
"flags": {
"type": "object",
"additionalProperties": False,
"properties": {
"cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
"ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]},
},
},
"spec": {"type": "string"},
"operating_system": {"type": "string"},
"target": {"type": "string"},
@@ -63,9 +54,18 @@
"modules": {
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
},
"implicit_rpaths": implicit_rpaths,
"implicit_rpaths": {
"anyOf": [
{"type": "array", "items": {"type": "string"}},
{"type": "boolean"},
]
},
"environment": spack.schema.environment.definition,
"extra_rpaths": extra_rpaths,
"extra_rpaths": {
"type": "array",
"default": [],
"items": {"type": "string"},
},
},
}
},

View File

@@ -84,6 +84,7 @@
"build_language": {"type": "string"},
"build_jobs": {"type": "integer", "minimum": 1},
"ccache": {"type": "boolean"},
"concretizer": {"type": "string", "enum": ["original", "clingo"]},
"db_lock_timeout": {"type": "integer", "minimum": 1},
"package_lock_timeout": {
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}]
@@ -97,9 +98,9 @@
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
},
"deprecatedProperties": {
"properties": ["concretizer"],
"message": "Spack supports only clingo as a concretizer from v0.23. "
"The config:concretizer config option is ignored.",
"properties": ["terminal_title"],
"message": "config:terminal_title has been replaced by "
"install_status and is ignored",
"error": False,
},
}

View File

@@ -11,8 +11,6 @@
import spack.schema.environment
from .compilers import extra_rpaths, flags, implicit_rpaths
permissions = {
"type": "object",
"additionalProperties": False,
@@ -186,16 +184,7 @@
"type": "object",
"additionalProperties": True,
"properties": {
"compilers": {
"type": "object",
"patternProperties": {
r"(^\w[\w-]*)": {"type": "string"}
},
},
"environment": spack.schema.environment.definition,
"extra_rpaths": extra_rpaths,
"implicit_rpaths": implicit_rpaths,
"flags": flags,
"environment": spack.schema.environment.definition
},
},
},

View File

@@ -32,6 +32,7 @@
import spack.config
import spack.config as sc
import spack.deptypes as dt
import spack.directives
import spack.environment as ev
import spack.error
import spack.package_base
@@ -284,14 +285,16 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
return NoDuplicatesCounter(specs, tests=tests)
def all_compilers_in_config(configuration):
return spack.compilers.all_compilers_from(configuration)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
libcs = {
c.default_libc
for c in spack.compilers.all_compilers_from(spack.config.CONFIG)
if c.default_libc
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
}
if libcs:
@@ -610,7 +613,7 @@ def _external_config_with_implicit_externals(configuration):
if not using_libc_compatibility():
return packages_yaml
for compiler in spack.compilers.all_compilers_from(configuration):
for compiler in all_compilers_in_config(configuration):
libc = compiler.default_libc
if libc:
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
@@ -1877,7 +1880,8 @@ def _spec_clauses(
# validate variant value only if spec not concrete
if not spec.concrete:
if not spec.virtual and vname not in spack.variant.reserved_names:
reserved_names = spack.directives.reserved_names
if not spec.virtual and vname not in reserved_names:
pkg_cls = self.pkg_class(spec.name)
try:
variant_def, _ = pkg_cls.variants[vname]
@@ -2998,7 +3002,7 @@ class CompilerParser:
def __init__(self, configuration) -> None:
self.compilers: Set[KnownCompiler] = set()
for c in spack.compilers.all_compilers_from(configuration):
for c in all_compilers_in_config(configuration):
if using_libc_compatibility() and not c_compiler_runs(c):
tty.debug(
f"the C compiler {c.cc} does not exist, or does not run correctly."
@@ -3462,7 +3466,7 @@ def reorder_flags(self):
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict(
(c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG))
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
)
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())

View File

@@ -70,6 +70,7 @@
import spack.compiler
import spack.compilers
import spack.config
import spack.dependency as dp
import spack.deptypes as dt
import spack.error
import spack.hash_types as ht
@@ -1639,7 +1640,7 @@ def _add_flag(self, name, value, propagate):
Known flags currently include "arch"
"""
if propagate and name in vt.reserved_names:
if propagate and name in spack.directives.reserved_names:
raise UnsupportedPropagationError(
f"Propagation with '==' is not supported for '{name}'."
)
@@ -2614,6 +2615,294 @@ def validate_detection(self):
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _concretize_helper(self, concretizer, presets=None, visited=None):
"""Recursive helper function for concretize().
This concretizes everything bottom-up. As things are
concretized, they're added to the presets, and ancestors
will prefer the settings of their children.
"""
if presets is None:
presets = {}
if visited is None:
visited = set()
if self.name in visited:
return False
if self.concrete:
visited.add(self.name)
return False
changed = False
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self._dependencies):
# WARNING: This function is an implementation detail of the
# WARNING: original concretizer. Since with that greedy
# WARNING: algorithm we don't allow multiple nodes from
# WARNING: the same package in a DAG, here we hard-code
# WARNING: using index 0 i.e. we assume that we have only
# WARNING: one edge from package "name"
changed |= self._dependencies[name][0].spec._concretize_helper(
concretizer, presets, visited
)
if self.name in presets:
changed |= self.constrain(presets[self.name])
else:
# Concretize virtual dependencies last. Because they're added
# to presets below, their constraints will all be merged, but we'll
# still need to select a concrete package later.
if not self.virtual:
changed |= any(
(
concretizer.concretize_develop(self), # special variant
concretizer.concretize_architecture(self),
concretizer.concretize_compiler(self),
concretizer.adjust_target(self),
# flags must be concretized after compiler
concretizer.concretize_compiler_flags(self),
concretizer.concretize_version(self),
concretizer.concretize_variants(self),
)
)
presets[self.name] = self
visited.add(self.name)
return changed
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert self.virtual
virtuals = (self.name,)
for dep_spec in itertools.chain.from_iterable(self._dependents.values()):
dependent = dep_spec.parent
depflag = dep_spec.depflag
# remove self from all dependents, unless it is already removed
if self.name in dependent._dependencies:
del dependent._dependencies.edges[self.name]
# add the replacement, unless it is already a dep of dependent.
if concrete.name not in dependent._dependencies:
dependent._add_dependency(concrete, depflag=depflag, virtuals=virtuals)
else:
dependent.edges_to_dependencies(name=concrete.name)[0].update_virtuals(
virtuals=virtuals
)
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
and normalize again to include the provider's (potentially virtual)
dependencies. Repeat until there are no virtual deps.
Precondition: spec is normalized.
.. todo::
If a provider depends on something that conflicts with
other dependencies in the spec being expanded, this can
produce a conflicting spec. For example, if mpich depends
on hwloc@:1.3 but something in the spec needs hwloc1.4:,
then we should choose an MPI other than mpich. Cases like
this are infrequent, but should implement this before it is
a problem.
"""
# Make an index of stuff this spec already provides
self_index = spack.provider_index.ProviderIndex(
repository=spack.repo.PATH, specs=self.traverse(), restrict=True
)
changed = False
done = False
while not done:
done = True
for spec in list(self.traverse()):
replacement = None
if spec.external:
continue
if spec.virtual:
replacement = self._find_provider(spec, self_index)
if replacement:
# TODO: may break if in-place on self but
# shouldn't happen if root is traversed first.
spec._replace_with(replacement)
done = False
break
if not replacement:
# Get a list of possible replacements in order of
# preference.
candidates = concretizer.choose_virtual_or_external(spec)
# Try the replacements in order, skipping any that cause
# satisfiability problems.
for replacement in candidates:
if replacement is spec:
break
# Replace spec with the candidate and normalize
copy = self.copy()
copy[spec.name]._dup(replacement, deps=False)
try:
# If there are duplicate providers or duplicate
# provider deps, consolidate them and merge
# constraints.
copy.normalize(force=True)
break
except spack.error.SpecError:
# On error, we'll try the next replacement.
continue
# If replacement is external then trim the dependencies
if replacement.external:
if spec._dependencies:
for dep in spec.dependencies():
del dep._dependents.edges[spec.name]
changed = True
spec.clear_dependencies()
replacement.clear_dependencies()
replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
def feq(cfield, sfield):
return (not cfield) or (cfield == sfield)
if replacement is spec or (
feq(replacement.name, spec.name)
and feq(replacement.versions, spec.versions)
and feq(replacement.compiler, spec.compiler)
and feq(replacement.architecture, spec.architecture)
and feq(replacement._dependencies, spec._dependencies)
and feq(replacement.variants, spec.variants)
and feq(replacement.external_path, spec.external_path)
and feq(replacement.external_modules, spec.external_modules)
):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
# place. TODO: make this more efficient.
if spec.virtual:
spec._replace_with(replacement)
changed = True
if spec._dup(replacement, deps=False, cleardeps=False):
changed = True
self_index.update(spec)
done = False
break
return changed
def _old_concretize(self, tests=False, deprecation_warning=True):
"""A spec is concrete if it describes one build of a package uniquely.
This will ensure that this spec is concrete.
Args:
tests (list or bool): list of packages that will need test
dependencies, or True/False for test all/none
deprecation_warning (bool): enable or disable the deprecation
warning for the old concretizer
If this spec could describe more than one version, variant, or build
of a package, this will add constraints to make it concrete.
Some rigorous validation and checks are also performed on the spec.
Concretizing ensures that it is self-consistent and that it's
consistent with requirements of its packages. See flatten() and
normalize() for more details on this.
"""
import spack.concretize
# Add a warning message to inform users that the original concretizer
# will be removed
if deprecation_warning:
msg = (
"the original concretizer is currently being used.\n\tUpgrade to "
'"clingo" at your earliest convenience. The original concretizer '
"will be removed from Spack in a future version."
)
warnings.warn(msg)
self.replace_hash()
if not self.name:
raise spack.error.SpecError("Attempting to concretize anonymous spec")
if self._concrete:
return
# take the spec apart once before starting the main concretization loop and resolving
# deps, but don't break dependencies during concretization as the spec is built.
user_spec_deps = self.flat_dependencies(disconnect=True)
changed = True
force = False
concretizer = spack.concretize.Concretizer(self.copy())
while changed:
changes = (
self.normalize(force, tests, user_spec_deps, disconnect=False),
self._expand_virtual_packages(concretizer),
self._concretize_helper(concretizer),
)
changed = any(changes)
force = True
visited_user_specs = set()
for dep in self.traverse():
visited_user_specs.add(dep.name)
pkg_cls = spack.repo.PATH.get_pkg_class(dep.name)
visited_user_specs.update(pkg_cls(dep).provided_virtual_names())
extra = set(user_spec_deps.keys()).difference(visited_user_specs)
if extra:
raise InvalidDependencyError(self.name, extra)
Spec.inject_patches_variant(self)
for s in self.traverse():
# TODO: Refactor this into a common method to build external specs
# TODO: or turn external_path into a lazy property
Spec.ensure_external_path_if_external(s)
# assign hashes and mark concrete
self._finalize_concretization()
# If any spec in the DAG is deprecated, throw an error
Spec.ensure_no_deprecated(self)
# Update externals as needed
for dep in self.traverse():
if dep.external:
dep.package.update_external_dependencies()
# Now that the spec is concrete we should check if
# there are declared conflicts
#
# TODO: this needs rethinking, as currently we can only express
# TODO: internal configuration conflicts within one package.
matches = []
for x in self.traverse():
if x.external:
# external specs are already built, don't worry about whether
# it's possible to build that configuration with Spack
continue
for when_spec, conflict_list in x.package_class.conflicts.items():
if x.satisfies(when_spec):
for conflict_spec, msg in conflict_list:
if x.satisfies(conflict_spec):
when = when_spec.copy()
when.name = x.name
matches.append((x, conflict_spec, when, msg))
if matches:
raise ConflictsInSpecError(self, matches)
# Check if we can produce an optimized binary (will throw if
# there are declared inconsistencies)
self.architecture.target.optimization_flags(self.compiler)
def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2743,13 +3032,7 @@ def ensure_no_deprecated(root):
msg += " For each package listed, choose another spec\n"
raise SpecDeprecatedError(msg)
def concretize(self, tests: Union[bool, List[str]] = False) -> None:
"""Concretize the current spec.
Args:
tests: if False disregard 'test' dependencies, if a list of names activate them for
the packages in the list, if True activate 'test' dependencies for all packages.
"""
def _new_concretize(self, tests=False):
import spack.solver.asp
self.replace_hash()
@@ -2783,6 +3066,19 @@ def concretize(self, tests: Union[bool, List[str]] = False) -> None:
concretized = answer[node]
self._dup(concretized)
def concretize(self, tests=False):
"""Concretize the current spec.
Args:
tests (bool or list): if False disregard 'test' dependencies,
if a list of names activate them for the packages in the list,
if True activate 'test' dependencies for all packages.
"""
if spack.config.get("config:concretizer", "clingo") == "clingo":
self._new_concretize(tests)
else:
self._old_concretize(tests)
def _mark_root_concrete(self, value=True):
"""Mark just this spec (not dependencies) concrete."""
if (not value) and self.concrete and self.installed:
@@ -2886,6 +3182,34 @@ def concretized(self, tests=False):
clone.concretize(tests=tests)
return clone
def flat_dependencies(self, disconnect: bool = False):
"""Build DependencyMap of all of this spec's dependencies with their constraints merged.
Arguments:
disconnect: if True, disconnect all dependents and dependencies among nodes in this
spec's DAG.
"""
flat_deps = {}
deptree = self.traverse(root=False)
for spec in deptree:
if spec.name not in flat_deps:
flat_deps[spec.name] = spec
else:
try:
flat_deps[spec.name].constrain(spec)
except spack.error.UnsatisfiableSpecError as e:
# DAG contains two instances of the same package with inconsistent constraints.
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message) from e
if disconnect:
for spec in flat_deps.values():
if not spec.concrete:
spec.clear_edges()
self.clear_dependencies()
return flat_deps
def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this
spec.
@@ -2895,6 +3219,312 @@ def index(self, deptype="all"):
dm[spec.name].append(spec)
return dm
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
Args:
name (str): name of dependency to evaluate conditions on.
Returns:
(Dependency): new Dependency object combining all constraints.
If the package depends on <name> in the current spec
configuration, return the constrained dependency and
corresponding dependency types.
If no conditions are True (and we don't depend on it), return
``(None, None)``.
"""
vt.substitute_abstract_variants(self)
# evaluate when specs to figure out constraints on the dependency.
dep = None
for when_spec, deps_by_name in self.package_class.dependencies.items():
if not self.satisfies(when_spec):
continue
for dep_name, dependency in deps_by_name.items():
if dep_name != name:
continue
if dep is None:
dep = dp.Dependency(Spec(self.name), Spec(name), depflag=0)
try:
dep.merge(dependency)
except spack.error.UnsatisfiableSpecError as e:
e.message = (
"Conflicting conditional dependencies for spec"
"\n\n\t{0}\n\n"
"Cannot merge constraint"
"\n\n\t{1}\n\n"
"into"
"\n\n\t{2}".format(self, dependency.spec, dep.spec)
)
raise e
return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
Raise an exception if there is a conflicting virtual
dependency already in this spec.
"""
assert spack.repo.PATH.is_virtual_safe(vdep.name), vdep
# note that this defensively copies.
providers = provider_index.providers_for(vdep)
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
# Remove duplicate providers that can concretize to the same
# result.
for provider in providers:
for spec in providers:
if spec is not provider and provider.intersects(spec):
providers.remove(spec)
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
raise MultipleProviderError(vdep, providers)
return providers[0]
else:
# The user might have required something insufficient for
# pkg_dep -- so we'll get a conflict. e.g., user asked for
# mpi@:1.1 but some package required mpi@2.1:.
required = provider_index.providers_for(vdep.name)
if len(required) > 1:
raise MultipleProviderError(vdep, required)
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
def _merge_dependency(self, dependency, visited, spec_deps, provider_index, tests):
"""Merge dependency information from a Package into this Spec.
Args:
dependency (Dependency): dependency metadata from a package;
this is typically the result of merging *all* matching
dependency constraints from the package.
visited (set): set of dependency nodes already visited by
``normalize()``.
spec_deps (dict): ``dict`` of all dependencies from the spec
being normalized.
provider_index (dict): ``provider_index`` of virtual dep
providers in the ``Spec`` as normalized so far.
NOTE: Caller should assume that this routine owns the
``dependency`` parameter, i.e., it needs to be a copy of any
internal structures.
This is the core of ``normalize()``. There are some basic steps:
* If dep is virtual, evaluate whether it corresponds to an
existing concrete dependency, and merge if so.
* If it's real and it provides some virtual dep, see if it provides
what some virtual dependency wants and merge if so.
* Finally, if none of the above, merge dependency and its
constraints into this spec.
This method returns True if the spec was changed, False otherwise.
"""
changed = False
dep = dependency.spec
# If it's a virtual dependency, try to find an existing
# provider in the spec, and merge that.
virtuals = ()
if spack.repo.PATH.is_virtual_safe(dep.name):
virtuals = (dep.name,)
visited.add(dep.name)
provider = self._find_provider(dep, provider_index)
if provider:
dep = provider
else:
index = spack.provider_index.ProviderIndex(
repository=spack.repo.PATH, specs=[dep], restrict=True
)
items = list(spec_deps.items())
for name, vspec in items:
if not spack.repo.PATH.is_virtual_safe(vspec.name):
continue
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
changed = True
else:
required = index.providers_for(vspec.name)
if required:
raise UnsatisfiableProviderSpecError(required[0], dep)
provider_index.update(dep)
# If the spec isn't already in the set of dependencies, add it.
# Note: dep is always owned by this method. If it's from the
# caller, it's a copy from _evaluate_dependency_conditions. If it
# comes from a vdep, it's a defensive copy from _find_provider.
if dep.name not in spec_deps:
if self.concrete:
return False
spec_deps[dep.name] = dep
changed = True
else:
# merge package/vdep information into spec
try:
tty.debug("{0} applying constraint {1}".format(self.name, str(dep)))
changed |= spec_deps[dep.name].constrain(dep)
except spack.error.UnsatisfiableSpecError as e:
fmt = "An unsatisfiable {0}".format(e.constraint_type)
fmt += " constraint has been detected for spec:"
fmt += "\n\n{0}\n\n".format(spec_deps[dep.name].tree(indent=4))
fmt += "while trying to concretize the partial spec:"
fmt += "\n\n{0}\n\n".format(self.tree(indent=4))
fmt += "{0} requires {1} {2} {3}, but spec asked for {4}"
e.message = fmt.format(
self.name, dep.name, e.constraint_type, e.required, e.provided
)
raise
# Add merged spec to my deps and recurse
spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, depflag=dependency.depflag, virtuals=virtuals)
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
def _normalize_helper(self, visited, spec_deps, provider_index, tests):
"""Recursive helper function for _normalize."""
if self.name in visited:
return False
visited.add(self.name)
# If we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later.
if self.virtual or self.external:
return False
# Avoid recursively adding constraints for already-installed packages:
# these may include build dependencies which are not needed for this
# install (since this package is already installed).
if self.concrete and self.installed:
return False
# Combine constraints from package deps with constraints from
# the spec, until nothing changes.
any_change = False
changed = True
while changed:
changed = False
for dep_name in self.package_class.dependency_names():
# Do we depend on dep_name? If so pkg_dep is not None.
dep = self._evaluate_dependency_conditions(dep_name)
# If dep is a needed dependency, merge it.
if dep:
merge = (
# caller requested test dependencies
tests is True
or (tests and self.name in tests)
or
# this is not a test-only dependency
(dep.depflag & ~dt.TEST)
)
if merge:
changed |= self._merge_dependency(
dep, visited, spec_deps, provider_index, tests
)
any_change |= changed
return any_change
def normalize(self, force=False, tests=False, user_spec_deps=None, disconnect=True):
"""When specs are parsed, any dependencies specified are hanging off
the root, and ONLY the ones that were explicitly provided are there.
Normalization turns a partial flat spec into a DAG, where:
1. Known dependencies of the root package are in the DAG.
2. Each node's dependencies dict only contains its known direct
deps.
3. There is only ONE unique spec for each package in the DAG.
* This includes virtual packages. If there a non-virtual
package that provides a virtual package that is in the spec,
then we replace the virtual package with the non-virtual one.
TODO: normalize should probably implement some form of cycle
detection, to ensure that the spec is actually a DAG.
"""
if not self.name:
raise spack.error.SpecError("Attempting to normalize anonymous spec")
# Set _normal and _concrete to False when forced
if force and not self._concrete:
self._normal = False
if self._normal:
return False
# Ensure first that all packages & compilers in the DAG exist.
self.validate_or_raise()
# Clear the DAG and collect all dependencies in the DAG, which will be
# reapplied as constraints. All dependencies collected this way will
# have been created by a previous execution of 'normalize'.
# A dependency extracted here will only be reintegrated if it is
# discovered to apply according to _normalize_helper, so
# user-specified dependencies are recorded separately in case they
# refer to specs which take several normalization passes to
# materialize.
all_spec_deps = self.flat_dependencies(disconnect=disconnect)
if user_spec_deps:
for name, spec in user_spec_deps.items():
if not name:
msg = "Attempted to normalize anonymous dependency spec"
msg += " %s" % spec
raise InvalidSpecDetected(msg)
if name not in all_spec_deps:
all_spec_deps[name] = spec
else:
all_spec_deps[name].constrain(spec)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
provider_index = spack.provider_index.ProviderIndex(
repository=spack.repo.PATH, specs=[s for s in all_spec_deps.values()], restrict=True
)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set()
any_change = self._normalize_helper(visited, all_spec_deps, provider_index, tests)
# remove any leftover dependents outside the spec from, e.g., pruning externals
valid = {id(spec) for spec in all_spec_deps.values()} | {id(self)}
for spec in all_spec_deps.values():
remove = [dep for dep in spec.dependents() if id(dep) not in valid]
for dep in remove:
del spec._dependents.edges[dep.name]
del dep._dependencies.edges[spec.name]
# Mark the spec as normal once done.
self._normal = True
return any_change
def normalized(self):
"""
Return a normalized copy of this spec without modifying this spec.
"""
clone = self.copy()
clone.normalize()
return clone
def validate_or_raise(self):
"""Checks that names and values in this spec are real. If they're not,
it will raise an appropriate exception.
@@ -2935,7 +3565,9 @@ def ensure_valid_variants(spec):
pkg_variants = pkg_cls.variants
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names))
not_existing = set(spec.variants) - (
set(pkg_variants) | set(spack.directives.reserved_names)
)
if not_existing:
raise vt.UnknownVariantError(spec, not_existing)
@@ -4339,9 +4971,9 @@ def attach_git_version_lookup(self):
v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname))
def parse_with_version_concrete(spec_like: Union[str, Spec], compiler: bool = False):
def parse_with_version_concrete(string: str, compiler: bool = False):
"""Same as Spec(string), but interprets @x as @=x"""
s: Union[CompilerSpec, Spec] = CompilerSpec(spec_like) if compiler else Spec(spec_like)
s: Union[CompilerSpec, Spec] = CompilerSpec(string) if compiler else Spec(string)
interpreted_version = s.versions.concrete_range_as_version
if interpreted_version:
s.versions = vn.VersionList([interpreted_version])

View File

@@ -13,7 +13,7 @@
import stat
import sys
import tempfile
from typing import Callable, Dict, Generator, Iterable, List, Optional, Set
from typing import Callable, Dict, Iterable, Optional, Set
import llnl.string
import llnl.util.lang
@@ -40,7 +40,6 @@
import spack.resource
import spack.spec
import spack.stage
import spack.util.crypto
import spack.util.lock
import spack.util.path as sup
import spack.util.pattern as pattern
@@ -352,10 +351,8 @@ class Stage(LockableStagingDir):
def __init__(
self,
url_or_fetch_strategy,
*,
name=None,
mirror_paths: Optional[spack.mirror.MirrorLayout] = None,
mirrors: Optional[Iterable[spack.mirror.Mirror]] = None,
mirror_paths=None,
keep=False,
path=None,
lock=True,
@@ -364,30 +361,36 @@ def __init__(
"""Create a stage object.
Parameters:
url_or_fetch_strategy
URL of the archive to be downloaded into this stage, OR a valid FetchStrategy.
URL of the archive to be downloaded into this stage, OR
a valid FetchStrategy.
name
If a name is provided, then this stage is a named stage and will persist between runs
(or if you construct another stage object later). If name is not provided, then this
If a name is provided, then this stage is a named stage
and will persist between runs (or if you construct another
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
mirror_paths
If provided, Stage will search Spack's mirrors for this archive at each of the
provided relative mirror paths before using the default fetch strategy.
If provided, Stage will search Spack's mirrors for
this archive at each of the provided relative mirror paths
before using the default fetch strategy.
keep
By default, when used as a context manager, the Stage is deleted on exit when no
exceptions are raised. Pass True to keep the stage intact even if no exceptions are
raised.
By default, when used as a context manager, the Stage
is deleted on exit when no exceptions are raised.
Pass True to keep the stage intact even if no
exceptions are raised.
path
If provided, the stage path to use for associated builds.
lock
True if the stage directory file lock is to be used, False otherwise.
True if the stage directory file lock is to be used, False
otherwise.
search_fn
The search function that provides the fetch strategy instance.
The search function that provides the fetch strategy
instance.
"""
super().__init__(name, path, keep, lock)
@@ -403,37 +406,30 @@ def __init__(
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
self.search_fn = search_fn
# If we fetch from a mirror, but the original data is from say git, we can currently not
# prove that they are equal (we don't even have a tree hash in package.py). This bool is
# used to skip checksum verification and instead warn the user.
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
self.skip_checksum_for_mirror = not bool(self.default_fetcher.digest)
else:
self.skip_checksum_for_mirror = True
# used for mirrored archives of repositories.
self.skip_checksum_for_mirror = True
self.srcdir = None
self.mirror_layout = mirror_paths
self.mirrors = list(mirrors) if mirrors else []
# Allow users the disable both mirrors and download cache
self.default_fetcher_only = False
self.mirror_paths = mirror_paths
@property
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
fnames = []
expanded = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
expanded = self.default_fetcher.expand_archive
fnames.append(url_util.default_download_filename(self.default_fetcher.url))
if self.mirror_layout:
fnames.append(os.path.basename(self.mirror_layout.path))
if self.mirror_paths:
fnames.extend(os.path.basename(x) for x in self.mirror_paths)
paths = [os.path.join(self.path, f) for f in fnames]
paths.extend(os.path.join(self.path, f) for f in fnames)
if not expanded:
# If the download file is not compressed, the "archive" is a single file placed in
# Stage.source_path
# If the download file is not compressed, the "archive" is a
# single file placed in Stage.source_path
paths.extend(os.path.join(self.source_path, f) for f in fnames)
return paths
@@ -466,84 +462,104 @@ def source_path(self):
"""Returns the well-known source directory path."""
return os.path.join(self.path, _source_path_subdir)
def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, None, None]:
fetchers: List[fs.FetchStrategy] = []
def disable_mirrors(self):
"""The Stage will not attempt to look for the associated fetcher
target in any of Spack's mirrors (including the local download cache).
"""
self.mirror_paths = []
def fetch(self, mirror_only=False, err_msg=None):
"""Retrieves the code or archive
Args:
mirror_only (bool): only fetch from a mirror
err_msg (str or None): the error message to display if all fetchers
fail or ``None`` for the default fetch failure message
"""
fetchers = []
if not mirror_only:
fetchers.append(self.default_fetcher)
# If this archive is normally fetched from a URL, then use the same digest.
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
extension = self.default_fetcher.extension
else:
digest = None
expand = True
extension = None
# TODO: move mirror logic out of here and clean it up!
# TODO: Or @alalazo may have some ideas about how to use a
# TODO: CompositeFetchStrategy here.
if not self.default_fetcher_only and self.mirror_layout and self.mirrors:
self.skip_checksum_for_mirror = True
if self.mirror_paths:
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
# the root, so we add a '/' if it is not present.
mirror_urls = [
url_util.join(mirror.fetch_url, rel_path)
for mirror in spack.mirror.MirrorCollection(source=True).values()
if not mirror.fetch_url.startswith("oci://")
for rel_path in self.mirror_paths
]
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
expand = True
extension = None
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
extension = self.default_fetcher.extension
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
self.skip_checksum_for_mirror = not bool(digest)
# Add URL strategies for all the mirrors with the digest
# Insert fetchers in the order that the URLs are provided.
fetchers[:0] = (
fs.from_url_scheme(
url_util.join(mirror.fetch_url, self.mirror_layout.path),
checksum=digest,
expand=expand,
extension=extension,
for url in reversed(mirror_urls):
fetchers.insert(
0, fs.from_url_scheme(url, digest, expand=expand, extension=extension)
)
for mirror in self.mirrors
if not mirror.fetch_url.startswith("oci://") # no support for mirrors yet
)
if not self.default_fetcher_only and self.mirror_layout and self.default_fetcher.cachable:
fetchers.insert(
0,
spack.caches.FETCH_CACHE.fetcher(
self.mirror_layout.path, digest, expand=expand, extension=extension
),
)
if self.default_fetcher.cachable:
for rel_path in reversed(list(self.mirror_paths)):
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
rel_path, digest, expand=expand, extension=extension
)
fetchers.insert(0, cache_fetcher)
yield from fetchers
def generate_fetchers():
for fetcher in fetchers:
yield fetcher
# The search function may be expensive, so wait until now to
# call it so the user can stop if a prior fetcher succeeded
if self.search_fn and not mirror_only:
dynamic_fetchers = self.search_fn()
for fetcher in dynamic_fetchers:
yield fetcher
# The search function may be expensive, so wait until now to call it so the user can stop
# if a prior fetcher succeeded
if self.search_fn and not mirror_only:
yield from self.search_fn()
def print_errors(errors):
for msg in errors:
tty.debug(msg)
def fetch(self, mirror_only: bool = False, err_msg: Optional[str] = None) -> None:
"""Retrieves the code or archive
Args:
mirror_only: only fetch from a mirror
err_msg: the error message to display if all fetchers fail or ``None`` for the default
fetch failure message
"""
errors: List[str] = []
for fetcher in self._generate_fetchers(mirror_only):
errors = []
for fetcher in generate_fetchers():
try:
fetcher.stage = self
self.fetcher = fetcher
self.fetcher.fetch()
break
except fs.NoCacheError:
except spack.fetch_strategy.NoCacheError:
# Don't bother reporting when something is not cached.
continue
except fs.FailedDownloadError as f:
errors.extend(f"{fetcher}: {e.__class__.__name__}: {e}" for e in f.exceptions)
continue
except spack.error.SpackError as e:
errors.append(f"{fetcher}: {e.__class__.__name__}: {e}")
errors.append("Fetching from {0} failed.".format(fetcher))
tty.debug(e)
continue
else:
print_errors(errors)
self.fetcher = self.default_fetcher
if err_msg:
raise spack.error.FetchError(err_msg)
raise spack.error.FetchError(
f"All fetchers failed for {self.name}", "\n".join(f" {e}" for e in errors)
)
default_msg = "All fetchers failed for {0}".format(self.name)
raise spack.error.FetchError(err_msg or default_msg, None)
print_errors(errors)
def steal_source(self, dest):
"""Copy the source_path directory in its entirety to directory dest
@@ -581,60 +597,56 @@ def steal_source(self, dest):
self.destroy()
def check(self):
"""Check the downloaded archive against a checksum digest."""
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
cache = isinstance(self.fetcher, fs.CacheURLFetchStrategy)
if cache:
secure_msg = "your download cache is in a secure location"
else:
secure_msg = "you trust this mirror and have a secure connection"
tty.warn(
f"Using {'download cache' if cache else 'a mirror'} instead of version control",
"The required sources are normally checked out from a version control system, "
f"but have been archived {'in download cache' if cache else 'on a mirror'}: "
f"{self.fetcher}. Spack lacks a tree hash to verify the integrity of this "
f"archive. Make sure {secure_msg}.",
"Fetching from mirror without a checksum!",
"This package is normally checked out from a version "
"control system, but it has been archived on a spack "
"mirror. This means we cannot know a checksum for the "
"tarball in advance. Be sure that your connection to "
"this mirror is secure!",
)
elif spack.config.get("config:checksum"):
self.fetcher.check()
def cache_local(self):
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_layout.path)
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
def cache_mirror(
self, mirror: spack.caches.MirrorCache, stats: spack.mirror.MirrorStats
) -> None:
def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached
Arguments:
mirror: the mirror to cache this Stage's resource in
stats: this is updated depending on whether the caching operation succeeded or failed
mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
resource in
stats (spack.mirror.MirrorStats): this is updated depending on whether the
caching operation succeeded or failed
"""
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
# BundleFetchStrategy has no source to fetch. The associated fetcher does nothing but
# the associated stage may still exist. There is currently no method available on the
# fetcher to distinguish this ('cachable' refers to whether the fetcher refers to a
# resource with a fixed ID, which is not the same concept as whether there is anything
# to fetch at all) so we must examine the type of the fetcher.
# BundleFetchStrategy has no source to fetch. The associated
# fetcher does nothing but the associated stage may still exist.
# There is currently no method available on the fetcher to
# distinguish this ('cachable' refers to whether the fetcher
# refers to a resource with a fixed ID, which is not the same
# concept as whether there is anything to fetch at all) so we
# must examine the type of the fetcher.
return
elif mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
if mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
return
elif not self.mirror_layout:
return
absolute_storage_path = os.path.join(mirror.root, self.mirror_layout.path)
absolute_storage_path = os.path.join(mirror.root, self.mirror_paths.storage_path)
if os.path.exists(absolute_storage_path):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
mirror.store(self.fetcher, self.mirror_layout.path)
mirror.store(self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)
self.mirror_layout.make_alias(mirror.root)
mirror.symlink(self.mirror_paths)
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
@@ -642,9 +654,9 @@ def expand_archive(self):
downloaded."""
if not self.expanded:
self.fetcher.expand()
tty.debug(f"Created stage in {self.path}")
tty.debug("Created stage in {0}".format(self.path))
else:
tty.debug(f"Already staged {self.name} in {self.path}")
tty.debug("Already staged {0} in {1}".format(self.name, self.path))
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
@@ -1163,22 +1175,20 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
try:
url_or_fs = url
if options:
url_or_fs = fs.URLFetchStrategy(url=url, fetch_options=options)
url_or_fs = fs.URLFetchStrategy(url, fetch_options=options)
with Stage(url_or_fs, keep=keep_stage) as stage:
# Fetch the archive
stage.fetch()
archive = stage.archive_file
assert archive is not None, f"Archive not found for {url}"
if action_fn is not None and archive:
if action_fn is not None:
# Only run first_stage_function the first time,
# no need to run it every time
action_fn(archive, url)
action_fn(stage, url)
# Checksum the archive and add it to the list
checksum = spack.util.crypto.checksum(hashlib.sha256, archive)
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
return checksum, None
except fs.FailedDownloadError:
except FailedDownloadError:
return None, f"[WORKER] Failed to fetch {url}"
except Exception as e:
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
@@ -1198,3 +1208,7 @@ class RestageError(StageError):
class VersionFetchError(StageError):
"""Raised when we can't determine a URL to fetch a package."""
# Keep this in namespace for convenience
FailedDownloadError = fs.FailedDownloadError

View File

@@ -12,7 +12,7 @@
modifications to global state in memory that must be replicated in the
child process.
"""
import importlib
import io
import multiprocessing
import pickle
@@ -118,7 +118,7 @@ def __init__(self, module_patches, class_patches):
def restore(self):
for module_name, attr_name, value in self.module_patches:
value = pickle.load(value)
module = importlib.import_module(module_name)
module = __import__(module_name)
setattr(module, attr_name, value)
for class_fqn, attr_name, value in self.class_patches:
value = pickle.load(value)

View File

@@ -208,6 +208,7 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
],
)
@pytest.mark.usefixtures("mock_packages", "config")
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
)

View File

@@ -8,9 +8,7 @@
import io
import json
import os
import pathlib
import platform
import shutil
import sys
import tarfile
import urllib.error
@@ -18,11 +16,12 @@
import urllib.response
from pathlib import Path, PurePath
import py
import pytest
import archspec.cpu
from llnl.util.filesystem import copy_tree, join_path, visit_directory_tree
from llnl.util.filesystem import join_path, visit_directory_tree
from llnl.util.symlink import readlink
import spack.binary_distribution as bindist
@@ -82,67 +81,72 @@ def test_mirror(mirror_dir):
@pytest.fixture(scope="module")
def config_directory(tmp_path_factory):
# Copy defaults to a temporary "site" scope
defaults_dir = tmp_path_factory.mktemp("test_configs")
config_path = pathlib.Path(spack.paths.etc_path)
copy_tree(str(config_path / "defaults"), str(defaults_dir / "site"))
# Create a "user" scope
(defaults_dir / "user").mkdir()
# Detect compilers
cfg_scopes = [
spack.config.DirectoryConfigScope(name, str(defaults_dir / name))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
with spack.config.use_configuration(*cfg_scopes):
_ = spack.compilers.find_compilers(scope="site")
yield defaults_dir
shutil.rmtree(str(defaults_dir))
def config_directory(tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("test_configs")
# restore some sane defaults for packages and config
config_path = py.path.local(spack.paths.etc_path)
modules_yaml = config_path.join("defaults", "modules.yaml")
os_modules_yaml = config_path.join(
"defaults", "%s" % platform.system().lower(), "modules.yaml"
)
packages_yaml = config_path.join("defaults", "packages.yaml")
config_yaml = config_path.join("defaults", "config.yaml")
repos_yaml = config_path.join("defaults", "repos.yaml")
tmpdir.ensure("site", dir=True)
tmpdir.ensure("user", dir=True)
tmpdir.ensure("site/%s" % platform.system().lower(), dir=True)
modules_yaml.copy(tmpdir.join("site", "modules.yaml"))
os_modules_yaml.copy(tmpdir.join("site/%s" % platform.system().lower(), "modules.yaml"))
packages_yaml.copy(tmpdir.join("site", "packages.yaml"))
config_yaml.copy(tmpdir.join("site", "config.yaml"))
repos_yaml.copy(tmpdir.join("site", "repos.yaml"))
yield tmpdir
tmpdir.remove()
@pytest.fixture(scope="function")
def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
def default_config(tmpdir, config_directory, monkeypatch, install_mockery):
# This fixture depends on install_mockery to ensure
# there is a clear order of initialization. The substitution of the
# config scopes here is done on top of the substitution that comes with
# install_mockery
mutable_dir = tmp_path / "mutable_config" / "tmp"
mutable_dir.mkdir(parents=True)
copy_tree(str(config_directory), str(mutable_dir))
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
config_directory.copy(mutable_dir)
scopes = [
spack.config.DirectoryConfigScope(name, str(mutable_dir / name))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
cfg = spack.config.Configuration(
*[
spack.config.DirectoryConfigScope(name, str(mutable_dir))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
)
with spack.config.use_configuration(*scopes):
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
extensions = spack.config.get("config:template_dirs")
if not extensions:
spack.config.set(
"config:template_dirs",
[os.path.join(spack.paths.share_path, "templates")],
scope="user",
)
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
extensions = spack.config.get("config:template_dirs")
if not extensions:
spack.config.set(
"config:template_dirs",
[os.path.join(spack.paths.share_path, "templates")],
scope="user",
)
(mutable_dir / "build_stage").mkdir()
build_stage = spack.config.get("config:build_stage")
if not build_stage:
spack.config.set(
"config:build_stage", [str(mutable_dir / "build_stage")], scope="user"
)
timeout = spack.config.get("config:connect_timeout")
if not timeout:
spack.config.set("config:connect_timeout", 10, scope="user")
mutable_dir.ensure("build_stage", dir=True)
build_stage = spack.config.get("config:build_stage")
if not build_stage:
spack.config.set(
"config:build_stage", [str(mutable_dir.join("build_stage"))], scope="user"
)
timeout = spack.config.get("config:connect_timeout")
if not timeout:
spack.config.set("config:connect_timeout", 10, scope="user")
yield spack.config.CONFIG
yield spack.config.CONFIG
spack.config.CONFIG = old_config
mutable_dir.remove()
@pytest.fixture(scope="function")
@@ -333,7 +337,7 @@ def test_relative_rpaths_install_nondefault(mirror_dir):
buildcache_cmd("install", "-uf", cspec.name)
def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
def test_push_and_fetch_keys(mock_gnupghome):
testpath = str(mock_gnupghome)
mirror = os.path.join(testpath, "mirror")
@@ -353,7 +357,7 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
assert len(keys) == 1
fpr = keys[0]
bindist._url_push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
bindist.push_keys(mirror, keys=[fpr], regenerate_index=True)
# dir 2: import the key from the mirror, and confirm that its fingerprint
# matches the one created above
@@ -460,7 +464,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
assert "libelf" not in cache_list
def test_generate_key_index_failure(monkeypatch, tmp_path):
def test_generate_key_index_failure(monkeypatch):
def list_url(url, recursive=False):
if "fails-listing" in url:
raise Exception("Couldn't list the directory")
@@ -473,13 +477,13 @@ def push_to_url(*args, **kwargs):
monkeypatch.setattr(web_util, "push_to_url", push_to_url)
with pytest.raises(CannotListKeys, match="Encountered problem listing keys"):
bindist.generate_key_index("s3://non-existent/fails-listing", str(tmp_path))
bindist.generate_key_index("s3://non-existent/fails-listing")
with pytest.raises(GenerateIndexError, match="problem pushing .* Couldn't upload"):
bindist.generate_key_index("s3://non-existent/fails-uploading", str(tmp_path))
bindist.generate_key_index("s3://non-existent/fails-uploading")
def test_generate_package_index_failure(monkeypatch, tmp_path, capfd):
def test_generate_package_index_failure(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
raise Exception("Some HTTP error")
@@ -488,16 +492,15 @@ def mock_list_url(url, recursive=False):
test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist._url_generate_package_index(test_url, str(tmp_path))
bindist.generate_package_index(test_url)
assert (
"Warning: Encountered problem listing packages at "
f"{test_url}/{bindist.BUILD_CACHE_RELATIVE_PATH}: Some HTTP error"
f"Warning: Encountered problem listing packages at {test_url}: Some HTTP error"
in capfd.readouterr().err
)
def test_generate_indices_exception(monkeypatch, tmp_path, capfd):
def test_generate_indices_exception(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
raise Exception("Test Exception handling")
@@ -506,10 +509,10 @@ def mock_list_url(url, recursive=False):
url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match=f"Encountered problem listing keys at {url}"):
bindist.generate_key_index(url, str(tmp_path))
bindist.generate_key_index(url)
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist._url_generate_package_index(url, str(tmp_path))
bindist.generate_package_index(url)
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err

View File

@@ -10,43 +10,37 @@
import spack.binary_distribution as bd
import spack.main
import spack.mirror
import spack.spec
import spack.util.url
install = spack.main.SpackCommand("install")
pytestmark = pytest.mark.not_on_windows("does not run on windows")
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_path):
spec = spack.spec.Spec("trivial-install-test-package").concretized()
spec.package.do_install(fake=True)
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
with tmpdir.as_cwd():
spec = spack.spec.Spec("trivial-install-test-package").concretized()
install(str(spec))
specs = [spec]
# Runs fine the first time, throws the second time
out_url = spack.util.url.path_to_file_url(str(tmpdir))
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))
with pytest.raises(bd.NoOverwriteException):
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))
# populate cache, everything is new
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
# Should work fine with force=True
bd.push_or_raise(spec, out_url, bd.PushOptions(force=True, unsigned=True))
# should skip all
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert skipped == specs
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file
os.remove(
os.path.join(
bd.build_cache_prefix("."),
bd.tarball_directory_name(spec),
bd.tarball_name(spec, ".spack"),
)
)
# with force=True none should be skipped
with bd.make_uploader(mirror, force=True) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
# Remove the tarball, which should cause push to push.
os.remove(
tmp_path
/ bd.BUILD_CACHE_RELATIVE_PATH
/ bd.tarball_directory_name(spec)
/ bd.tarball_name(spec, ".spack")
)
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
with pytest.raises(bd.NoOverwriteException):
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))

View File

@@ -56,6 +56,6 @@ def test_build_systems(url_and_build_system):
url, build_system = url_and_build_system
with spack.stage.Stage(url) as stage:
stage.fetch()
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser(stage.archive_file, url)
guesser = spack.cmd.create.BuildSystemGuesser()
guesser(stage, url)
assert build_system == guesser.build_system

View File

@@ -286,7 +286,7 @@ def _fail(self, args):
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
"""Test that create_buildcache returns a list of objects with the correct
keys and types."""
monkeypatch.setattr(ci, "push_to_build_cache", lambda a, b, c: True)
monkeypatch.setattr(spack.ci, "_push_to_build_cache", lambda a, b, c: True)
results = ci.create_buildcache(
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
from llnl.util.filesystem import working_dir
@@ -35,10 +33,11 @@ def test_blame_by_percent(mock_packages):
assert "EMAIL" in out
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
def test_blame_file(mock_packages):
"""Sanity check the blame command to make sure it works."""
with working_dir(spack.paths.prefix):
out = blame(os.path.join("bin", "spack"))
out = blame("bin/spack")
assert "LAST_COMMIT" in out
assert "AUTHOR" in out
assert "EMAIL" in out

View File

@@ -7,17 +7,13 @@
import json
import os
import shutil
from typing import List
import pytest
import spack.binary_distribution
import spack.cmd.buildcache
import spack.deptypes
import spack.environment as ev
import spack.error
import spack.main
import spack.mirror
import spack.spec
import spack.util.url
from spack.spec import Spec
@@ -382,22 +378,15 @@ def test_correct_specs_are_pushed(
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
slash_hash = f"/{spec.dag_hash()}"
slash_hash = "/{0}".format(spec.dag_hash())
class DontUpload(spack.binary_distribution.Uploader):
def __init__(self):
super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
self.pushed = []
packages_to_push = []
def push(self, specs: List[spack.spec.Spec]):
self.pushed.extend(s.name for s in specs)
return [], [] # nothing skipped, nothing errored
def fake_push(node, push_url, options):
assert isinstance(node, Spec)
packages_to_push.append(node.name)
uploader = DontUpload()
monkeypatch.setattr(
spack.binary_distribution, "make_uploader", lambda *args, **kwargs: uploader
)
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
buildcache_create_args = ["create", "--unsigned"]
@@ -409,10 +398,10 @@ def push(self, specs: List[spack.spec.Spec]):
buildcache(*buildcache_create_args)
# Order is not guaranteed, so we can't just compare lists
assert set(uploader.pushed) == set(expected)
assert set(packages_to_push) == set(expected)
# Ensure no duplicates
assert len(set(uploader.pushed)) == len(uploader.pushed)
assert len(set(packages_to_push)) == len(packages_to_push)
@pytest.mark.parametrize("signed", [True, False])
@@ -454,54 +443,3 @@ def test_skip_no_redistribute(mock_packages, config):
filtered = spack.cmd.buildcache._skip_no_redistribute_for_public(specs)
assert not any(s.name == "no-redistribute" for s in filtered)
assert any(s.name == "no-redistribute-dependent" for s in filtered)
def test_best_effort_vs_fail_fast_when_dep_not_installed(tmp_path, mutable_database):
"""When --fail-fast is passed, the push command should fail if it immediately finds an
uninstalled dependency. Otherwise, failure to push one dependency shouldn't prevent the
others from being pushed."""
mirror("add", "--unsigned", "my-mirror", str(tmp_path))
# Uninstall mpich so that its dependent mpileaks can't be pushed
for s in mutable_database.query_local("mpich"):
s.package.do_uninstall(force=True)
with pytest.raises(spack.cmd.buildcache.PackagesAreNotInstalledError, match="mpich"):
buildcache("push", "--update-index", "--fail-fast", "my-mirror", "mpileaks^mpich")
# nothing should be pushed due to --fail-fast.
assert not os.listdir(tmp_path)
assert not spack.binary_distribution.update_cache_and_get_specs()
with pytest.raises(spack.cmd.buildcache.PackageNotInstalledError):
buildcache("push", "--update-index", "my-mirror", "mpileaks^mpich")
specs = spack.binary_distribution.update_cache_and_get_specs()
# everything but mpich should be pushed
mpileaks = mutable_database.query_local("mpileaks^mpich")[0]
assert set(specs) == {s for s in mpileaks.traverse() if s.name != "mpich"}
def test_push_without_build_deps(tmp_path, temporary_store, mock_packages, mutable_config):
"""Spack should not error when build deps are uninstalled and --without-build-dependenies is
passed."""
mirror("add", "--unsigned", "my-mirror", str(tmp_path))
s = spack.spec.Spec("dtrun3").concretized()
s.package.do_install(fake=True)
s["dtbuild3"].package.do_uninstall()
# fails when build deps are required
with pytest.raises(spack.error.SpackError, match="package not installed"):
buildcache(
"push", "--update-index", "--with-build-dependencies", "my-mirror", f"/{s.dag_hash()}"
)
# succeeds when build deps are not required
buildcache(
"push", "--update-index", "--without-build-dependencies", "my-mirror", f"/{s.dag_hash()}"
)
assert spack.binary_distribution.update_cache_and_get_specs() == [s]

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
import filecmp
import os
import shutil
import subprocess
import pytest
@@ -155,6 +156,22 @@ def test_update_with_header(tmpdir):
commands("--update", str(update_file), "--header", str(filename))
@pytest.mark.xfail
def test_no_pipe_error():
"""Make sure we don't see any pipe errors when piping output."""
proc = subprocess.Popen(
["spack", "commands", "--format=rst"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
# Call close() on stdout to cause a broken pipe
proc.stdout.close()
proc.wait()
stderr = proc.stderr.read().decode("utf-8")
assert "Broken pipe" not in stderr
def test_bash_completion():
"""Test the bash completion writer."""
out1 = commands("--format=bash")

View File

@@ -81,6 +81,34 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_execut
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
Clang with a few tweaks.
"""
gcc_path = mock_executable(
"gcc",
output="""
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""",
)
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" not in output
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
@@ -103,7 +131,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
def test_compiler_add(mutable_config, mock_executable):
def test_compiler_add(mutable_config, mock_packages, mock_executable):
"""Tests that we can add a compiler to configuration."""
expected_version = "4.5.3"
gcc_path = mock_executable(
@@ -121,12 +149,7 @@ def test_compiler_add(mutable_config, mock_executable):
compilers_before_find = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None,
compiler_spec=None,
add_paths=[str(root_dir)],
scope=None,
mixed_toolchain=False,
jobs=1,
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False
)
spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compiler_specs())
@@ -206,7 +229,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
for name in ("gcc-8", "g++-8", "gfortran-8"):
shutil.copy(compilers_dir / name, new_dir / name)
# Set PATH to have the new folder searched first
os.environ["PATH"] = f"{str(new_dir)}:{str(compilers_dir)}"
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
compiler("find", "--scope=site")

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import tarfile
import pytest
@@ -155,24 +154,24 @@ def test_create_template_bad_name(mock_test_repo, name, expected):
def test_build_system_guesser_no_stage():
"""Test build system guesser when stage not provided."""
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
with pytest.raises(AttributeError, match="'NoneType' object has no attribute"):
guesser(None, "/the/url/does/not/matter")
def test_build_system_guesser_octave(tmp_path):
def test_build_system_guesser_octave():
"""
Test build system guesser for the special case, where the same base URL
identifies the build system rather than guessing the build system from
files contained in the archive.
"""
url, expected = "downloads.sourceforge.net/octave/", "octave"
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
guesser(str(tmp_path / "archive.tar.gz"), url)
guesser(None, url)
assert guesser.build_system == expected
# Also ensure get the correct template
@@ -208,40 +207,3 @@ def _parse_name_offset(path, v):
def test_no_url():
"""Test creation of package without a URL."""
create("--skip-editor", "-n", "create-new-package")
@pytest.mark.parametrize(
"source_files,languages",
[
(["fst.c", "snd.C"], ["c", "cxx"]),
(["fst.c", "snd.cxx"], ["c", "cxx"]),
(["fst.F", "snd.cc"], ["cxx", "fortran"]),
(["fst.f", "snd.c"], ["c", "fortran"]),
(["fst.jl", "snd.py"], []),
],
)
def test_language_and_build_system_detection(tmp_path, source_files, languages):
"""Test that languages are detected from tarball, and the build system is guessed from the
most top-level build system file."""
def add(tar: tarfile.TarFile, name: str, type):
tarinfo = tarfile.TarInfo(name)
tarinfo.type = type
tar.addfile(tarinfo)
tarball = str(tmp_path / "example.tar.gz")
with tarfile.open(tarball, "w:gz") as tar:
add(tar, "./third-party/", tarfile.DIRTYPE)
add(tar, "./third-party/example/", tarfile.DIRTYPE)
add(tar, "./third-party/example/CMakeLists.txt", tarfile.REGTYPE) # false positive
add(tar, "./configure", tarfile.REGTYPE) # actual build system
add(tar, "./src/", tarfile.DIRTYPE)
for file in source_files:
add(tar, f"src/{file}", tarfile.REGTYPE)
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser(str(tarball), "https://example.com")
assert guesser.build_system == "autotools"
assert guesser.languages == languages

View File

@@ -16,6 +16,8 @@
debug = SpackCommand("debug")
pytestmark = pytest.mark.not_on_windows("does not run on windows")
@pytest.mark.db
def test_create_db_tarball(tmpdir, database):
@@ -58,3 +60,4 @@ def test_report():
assert get_version() in out
assert platform.python_version() in out
assert str(architecture) in out
assert spack.config.get("config:concretizer") in out

View File

@@ -1057,6 +1057,7 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
@pytest.mark.only_clingo("original concretizer does not support requirements")
def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
"""Test ``config change`` with config in the ``spack.yaml`` as well as an
included file scope.
@@ -1132,6 +1133,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
spack.spec.Spec("bowtie@1.2.2").concretized()
@pytest.mark.only_clingo("original concretizer does not support requirements")
def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
spack_yaml = tmp_path / ev.manifest_name
spack_yaml.write_text(
@@ -1734,17 +1736,6 @@ def test_env_include_concrete_env_yaml(env_name):
assert test.path in combined_yaml["include_concrete"]
@pytest.mark.regression("45766")
@pytest.mark.parametrize("format", ["v1", "v2", "v3"])
def test_env_include_concrete_old_env(format, tmpdir):
lockfile = os.path.join(spack.paths.test_path, "data", "legacy_env", f"{format}.lock")
# create an env from old .lock file -- this does not update the format
env("create", "old-env", lockfile)
env("create", "--include-concrete", "old-env", "test")
assert ev.read("old-env").all_specs() == ev.read("test").all_specs()
def test_env_bad_include_concrete_env():
with pytest.raises(ev.SpackEnvironmentError):
env("create", "--include-concrete", "nonexistant_env", "combined_env")
@@ -2341,6 +2332,8 @@ def test_stack_concretize_extraneous_deps(tmpdir, mock_packages):
# FIXME: constraints for stacks
# FIXME: This now works for statically-determinable invalid deps
# FIXME: But it still does not work for dynamically determined invalid deps
# if spack.config.get('config:concretizer') == 'clingo':
# pytest.skip('Clingo concretizer does not support soft constraints')
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
@@ -3187,7 +3180,9 @@ def test_concretize_user_specs_together():
e.remove("mpich")
e.add("mpich2")
exc_cls = spack.error.UnsatisfiableSpecError
exc_cls = spack.error.SpackError
if spack.config.get("config:concretizer") == "clingo":
exc_cls = spack.error.UnsatisfiableSpecError
# Concretizing without invalidating the concrete spec for mpileaks fails
with pytest.raises(exc_cls):
@@ -3213,8 +3208,10 @@ def test_duplicate_packages_raise_when_concretizing_together():
e.add("mpileaks~opt")
e.add("mpich")
exc_cls = spack.error.UnsatisfiableSpecError
match = r"You could consider setting `concretizer:unify`"
exc_cls, match = spack.error.SpackError, None
if spack.config.get("config:concretizer") == "clingo":
exc_cls = spack.error.UnsatisfiableSpecError
match = r"You could consider setting `concretizer:unify`"
with pytest.raises(exc_cls, match=match):
e.concretize()

View File

@@ -14,7 +14,6 @@
import spack.cmd.external
import spack.detection
import spack.detection.path
import spack.repo
from spack.main import SpackCommand
from spack.spec import Spec
@@ -56,9 +55,7 @@ def test_find_external_two_instances_same_package(mock_executable):
search_paths = [str(cmake1.parent.parent), str(cmake2.parent.parent)]
finder = spack.detection.path.ExecutablesFinder()
detected_specs = finder.find(
pkg_name="cmake", initial_guess=search_paths, repository=spack.repo.PATH
)
detected_specs = finder.find(pkg_name="cmake", initial_guess=search_paths)
assert len(detected_specs) == 2
spec_to_path = {e.spec: e.prefix for e in detected_specs}
@@ -100,7 +97,7 @@ def test_get_executables(working_env, mock_executable):
# TODO: this test should be made to work, but in the meantime it is
# causing intermittent (spurious) CI failures on all PRs
@pytest.mark.not_on_windows("Test fails intermittently on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Test fails intermittently on Windows")
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
@@ -117,31 +114,11 @@ def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_execu
@pytest.mark.parametrize(
"names,tags,exclude,expected",
[
# find -all
(
None,
["detectable"],
[],
[
"builtin.mock.find-externals1",
"builtin.mock.gcc",
"builtin.mock.llvm",
"builtin.mock.intel-oneapi-compilers",
],
),
# find --all
(None, ["detectable"], [], ["builtin.mock.find-externals1"]),
# find --all --exclude find-externals1
(
None,
["detectable"],
["builtin.mock.find-externals1"],
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
),
(
None,
["detectable"],
["find-externals1"],
["builtin.mock.gcc", "builtin.mock.llvm", "builtin.mock.intel-oneapi-compilers"],
),
(None, ["detectable"], ["builtin.mock.find-externals1"], []),
(None, ["detectable"], ["find-externals1"], []),
# find cmake (and cmake is not detectable)
(["cmake"], ["detectable"], [], []),
],
@@ -266,9 +243,7 @@ def _determine_variants(cls, exes, version_str):
monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
finder = spack.detection.path.ExecutablesFinder()
detected_specs = finder.find(
pkg_name="gcc", initial_guess=[str(search_dir)], repository=spack.repo.PATH
)
detected_specs = finder.find(pkg_name="gcc", initial_guess=[str(search_dir)])
assert len(detected_specs) == 1

View File

@@ -2,13 +2,29 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.main import SpackCommand
@pytest.mark.xfail
def test_reuse_after_help():
"""Test `spack help` can be called twice with the same SpackCommand."""
help_cmd = SpackCommand("help", subprocess=True)
help_cmd()
# This second invocation will somehow fail because the parser no
# longer works after add_all_commands() is called in
# SpackArgumentParser.format_help_sections().
#
# TODO: figure out why this doesn't work properly and change this
# test to use a single SpackCommand.
#
# It seems that parse_known_args() finds "too few arguments" the
# second time through b/c add_all_commands() ends up leaving extra
# positionals in the parser. But this used to work before we loaded
# commands lazily.
help_cmd()

View File

@@ -30,6 +30,7 @@ def test_spec():
assert "mpich@3.0.4" in output
@pytest.mark.only_clingo("Known failure of the original concretizer")
def test_spec_concretizer_args(mutable_database, do_not_check_runtimes_on_reuse):
"""End-to-end test of CLI concretizer prefs.

View File

@@ -24,12 +24,6 @@
style = spack.main.SpackCommand("style")
ISORT = which("isort")
BLACK = which("black")
FLAKE8 = which("flake8")
MYPY = which("mypy")
@pytest.fixture(autouse=True)
def has_develop_branch(git):
"""spack style requires git and a develop branch to run -- skip if we're missing either."""
@@ -196,8 +190,8 @@ def external_style_root(git, flake8_package_with_errors, tmpdir):
yield tmpdir, py_file
@pytest.mark.skipif(not ISORT, reason="isort is not installed.")
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_fix_style(external_style_root):
"""Make sure spack style --fix works."""
tmpdir, py_file = external_style_root
@@ -215,10 +209,10 @@ def test_fix_style(external_style_root):
assert filecmp.cmp(broken_py, fixed_py)
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not ISORT, reason="isort is not installed.")
@pytest.mark.skipif(not MYPY, reason="mypy is not installed.")
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_external_root(external_style_root, capfd):
"""Ensure we can run in a separate root directory w/o configuration files."""
tmpdir, py_file = external_style_root
@@ -244,7 +238,7 @@ def test_external_root(external_style_root, capfd):
assert "lib/spack/spack/dummy.py:7: [F401] 'os' imported but unused" in output
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style(flake8_package, tmpdir):
root_relative = os.path.relpath(flake8_package, spack.paths.prefix)
@@ -270,7 +264,7 @@ def test_style(flake8_package, tmpdir):
assert "spack style checks were clean" in output
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_errors(flake8_package_with_errors):
root_relative = os.path.relpath(flake8_package_with_errors, spack.paths.prefix)
output = style(
@@ -281,8 +275,8 @@ def test_style_with_errors(flake8_package_with_errors):
assert "spack style found errors" in output
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_black(flake8_package_with_errors):
output = style("--tool", "black,flake8", flake8_package_with_errors, fail_on_error=False)
assert "black found errors" in output

View File

@@ -11,6 +11,13 @@
versions = SpackCommand("versions")
def test_safe_only_versions():
"""Only test the safe versions of a package.
(Using the deprecated command line argument)
"""
versions("--safe-only", "zlib")
def test_safe_versions():
"""Only test the safe versions of a package."""

View File

@@ -19,6 +19,27 @@
from spack.util.executable import Executable, ProcessError
@pytest.fixture()
def make_args_for_version(monkeypatch):
def _factory(version, path="/usr/bin/gcc"):
class MockOs:
pass
compiler_name = "gcc"
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
monkeypatch.setattr(compiler_cls, "cc_version", lambda x: version)
compiler_id = spack.compilers.CompilerID(
os=MockOs, compiler_name=compiler_name, version=None
)
variation = spack.compilers.NameVariation(prefix="", suffix="")
return spack.compilers.DetectVersionArgs(
id=compiler_id, variation=variation, language="cc", path=path
)
return _factory
def test_multiple_conflicting_compiler_definitions(mutable_config):
compiler_def = {
"compiler": {
@@ -61,6 +82,21 @@ def test_get_compiler_duplicates(mutable_config, compiler_factory):
assert len(duplicates) == 1
@pytest.mark.parametrize(
"input_version,expected_version,expected_error",
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
)
def test_version_detection_is_empty(
make_args_for_version, input_version, expected_version, expected_error
):
args = make_args_for_version(version=input_version)
result, error = spack.compilers.detect_version(args)
if not error:
assert result.id.version == expected_version
assert error == expected_error
def test_compiler_flags_from_config_are_grouped():
compiler_entry = {
"spec": "intel@17.0.2",
@@ -870,30 +906,51 @@ def prepare_executable(name):
@pytest.mark.parametrize(
"compilers_extra_attributes,expected_length",
"detected_versions,expected_length",
[
# If we detect a C compiler we expect the result to be valid
({"c": "/usr/bin/clang-12", "cxx": "/usr/bin/clang-12"}, 1),
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cc",
path="/usr/bin/clang-12",
),
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
),
],
1,
),
# If we detect only a C++ compiler we expect the result to be discarded
({"cxx": "/usr/bin/clang-12"}, 0),
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
)
],
0,
),
],
)
def test_detection_requires_c_compiler(compilers_extra_attributes, expected_length):
def test_detection_requires_c_compiler(detected_versions, expected_length):
"""Tests that compilers automatically added to the configuration have
at least a C compiler.
"""
packages_yaml = {
"llvm": {
"externals": [
{
"spec": "clang@12.0.0",
"prefix": "/usr",
"extra_attributes": {"compilers": compilers_extra_attributes},
}
]
}
}
result = spack.compilers.CompilerConfigFactory.from_packages_yaml(packages_yaml)
result = spack.compilers.make_compiler_list(detected_versions)
assert len(result) == expected_length

View File

@@ -0,0 +1,471 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test detection of compiler version"""
import pytest
import spack.compilers.aocc
import spack.compilers.arm
import spack.compilers.cce
import spack.compilers.clang
import spack.compilers.fj
import spack.compilers.gcc
import spack.compilers.intel
import spack.compilers.nag
import spack.compilers.nvhpc
import spack.compilers.oneapi
import spack.compilers.pgi
import spack.compilers.xl
import spack.compilers.xl_r
import spack.util.module_cmd
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n"
"Target: aarch64--linux-gnu\n"
"Thread model: posix\n"
"InstalledDir:\n"
"/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
"19.0",
),
(
"Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n"
"Target: aarch64--linux-gnu\n"
"Thread model: posix\n"
"InstalledDir:\n"
"/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
"19.3.1",
),
],
)
def test_arm_version_detection(version_str, expected_version):
version = spack.compilers.arm.Arm.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
("Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n", "8.4.6"),
("Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
("Cray clang Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
("Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n", "8.4.6"),
],
)
def test_cce_version_detection(version_str, expected_version):
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("10191")
@pytest.mark.parametrize(
"version_str,expected_version",
[
# macOS clang
(
"Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
"Target: x86_64-apple-darwin18.7.0\n"
"Thread model: posix\n"
"InstalledDir: "
"/Applications/Xcode.app/Contents/Developer/Toolchains/"
"XcodeDefault.xctoolchain/usr/bin\n",
"11.0.0",
),
(
"Apple LLVM version 7.0.2 (clang-700.1.81)\n"
"Target: x86_64-apple-darwin15.2.0\n"
"Thread model: posix\n",
"7.0.2",
),
],
)
def test_apple_clang_version_detection(version_str, expected_version):
cls = spack.compilers.class_for_compiler_name("apple-clang")
version = cls.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("10191")
@pytest.mark.parametrize(
"version_str,expected_version",
[
# LLVM Clang
(
"clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"6.0.1",
),
(
"clang version 3.1 (trunk 149096)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.1",
),
(
"clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"8.0.0",
),
(
"clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"9.0.1",
),
(
"clang version 8.0.0-3 (tags/RELEASE_800/final)\n"
"Target: aarch64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"8.0.0",
),
(
"clang version 11.0.0\n"
"Target: aarch64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"11.0.0",
),
],
)
def test_clang_version_detection(version_str, expected_version):
version = spack.compilers.clang.Clang.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# C compiler
(
"fcc (FCC) 4.0.0a 20190314\n"
"simulating gcc version 6.1\n"
"Copyright FUJITSU LIMITED 2019",
"4.0.0a",
),
# C++ compiler
(
"FCC (FCC) 4.0.0a 20190314\n"
"simulating gcc version 6.1\n"
"Copyright FUJITSU LIMITED 2019",
"4.0.0a",
),
# Fortran compiler
("frt (FRT) 4.0.0a 20190314\n" "Copyright FUJITSU LIMITED 2019", "4.0.0a"),
],
)
def test_fj_version_detection(version_str, expected_version):
version = spack.compilers.fj.Fj.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# Output of -dumpversion changed to return only major from GCC 7
("4.4.7\n", "4.4.7"),
("7\n", "7"),
],
)
def test_gcc_version_detection(version_str, expected_version):
version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"icpc (ICC) 12.1.5 20120612\n"
"Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
"12.1.5",
),
(
"ifort (IFORT) 12.1.5 20120612\n"
"Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
"12.1.5",
),
],
)
def test_intel_version_detection(version_str, expected_version):
version = spack.compilers.intel.Intel.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
( # ICX/ICPX
"Intel(R) oneAPI DPC++ Compiler 2021.1.2 (2020.10.0.1214)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.1.2",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++ Compiler 2021.2.0 (2021.2.0.20210317)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.2.0",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++/C++ Compiler 2021.3.0 (2021.3.0.20210619)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.3.0",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++/C++ Compiler 2021.4.0 (2021.4.0.20210924)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.4.0",
),
( # IFX
"ifx (IFORT) 2021.1.2 Beta 20201214\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.1.2",
),
( # IFX
"ifx (IFORT) 2021.2.0 Beta 20210317\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.2.0",
),
( # IFX
"ifx (IFORT) 2021.3.0 Beta 20210619\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.3.0",
),
( # IFX
"ifx (IFORT) 2021.4.0 Beta 20210924\n"
"Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
"2021.4.0",
),
( # IFX
"ifx (IFORT) 2022.0.0 20211123\n"
"Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
"2022.0.0",
),
( # IFX
"ifx (IFX) 2023.1.0 20230320\n"
"Copyright (C) 1985-2023 Intel Corporation. All rights reserved.",
"2023.1.0",
),
],
)
def test_oneapi_version_detection(version_str, expected_version):
version = spack.compilers.oneapi.Oneapi.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n"
"Product NPL6A60NA for x86-64 Linux\n",
"6.0.1037",
)
],
)
def test_nag_version_detection(version_str, expected_version):
version = spack.compilers.nag.Nag.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# C compiler on x86-64
(
"nvc 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on x86-64
(
"nvc++ 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on x86-64
(
"nvfortran 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C compiler on Power
(
"nvc 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on Power
(
"nvc++ 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on Power
(
"nvfortran 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C compiler on Arm
(
"nvc 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on Arm
(
"nvc++ 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on Arm
(
"nvfortran 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
],
)
def test_nvhpc_version_detection(version_str, expected_version):
version = spack.compilers.nvhpc.Nvhpc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# Output on x86-64
(
"pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n"
"The Portland Group - PGI Compilers and Tools\n"
"Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n",
"15.10",
),
# Output on PowerPC
(
"pgcc 17.4-0 linuxpower target on Linuxpower\n"
"PGI Compilers and Tools\n"
"Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n",
"17.4",
),
# Output when LLVM-enabled
(
"pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"PGI Compilers and Tools\n"
"Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n",
"18.4",
),
],
)
def test_pgi_version_detection(version_str, expected_version):
version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
("IBM XL C/C++ for Linux, V11.1 (5724-X14)\n" "Version: 11.01.0000.0000\n", "11.1"),
("IBM XL Fortran for Linux, V13.1 (5724-X16)\n" "Version: 13.01.0000.0000\n", "13.1"),
("IBM XL C/C++ for AIX, V11.1 (5724-X13)\n" "Version: 11.01.0000.0009\n", "11.1"),
(
"IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n" "Version: 09.00.0000.0017\n",
"9.0",
),
],
)
def test_xl_version_detection(version_str, expected_version):
version = spack.compilers.xl.Xl.extract_version_from_output(version_str)
assert version == expected_version
version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# This applies to C,C++ and FORTRAN compiler
(
"AMD clang version 12.0.0 (CLANG: AOCC_3_1_0-Build#126 2021_06_07)"
"(based on LLVM Mirror.Version.12.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.1.0",
),
(
"AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)"
"(based on LLVM Mirror.Version.12.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.0.0",
),
(
"AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)"
"(based on LLVM Mirror.Version.11.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"2.3.0",
),
(
"AMD clang version 10.0.0 (CLANG: AOCC_2.2.0-Build#93 2020_06_25)"
"(based on LLVM Mirror.Version.10.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"2.2.0",
),
],
)
def test_aocc_version_detection(version_str, expected_version):
version = spack.compilers.aocc.Aocc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("33901")
@pytest.mark.parametrize(
"version_str",
[
(
"Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
"Target: x86_64-apple-darwin18.7.0\n"
"Thread model: posix\n"
"InstalledDir: "
"/Applications/Xcode.app/Contents/Developer/Toolchains/"
"XcodeDefault.xctoolchain/usr/bin\n"
),
(
"Apple LLVM version 7.0.2 (clang-700.1.81)\n"
"Target: x86_64-apple-darwin15.2.0\n"
"Thread model: posix\n"
),
],
)
def test_apple_clang_not_detected_as_cce(version_str):
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
assert version == "unknown"

View File

@@ -425,6 +425,9 @@ def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12
spec.concretize()
assert spec.satisfies("cflags=-O2")
@pytest.mark.only_clingo(
"Optional compiler propagation isn't deprecated for original concretizer"
)
@pytest.mark.parametrize(
"spec_str,expected,not_expected",
[
@@ -469,6 +472,7 @@ def test_compiler_inherited_upwards(self):
for dep in spec.traverse():
assert "%clang" in dep
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer")
def test_architecture_deep_inheritance(self, mock_targets, compiler_factory):
"""Make sure that indirect dependencies receive architecture
information from the root even when partial architecture information
@@ -533,6 +537,9 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
with pytest.raises(spack.error.SpackError):
s.concretize()
@pytest.mark.only_clingo(
"Optional compiler propagation isn't deprecated for original concretizer"
)
@pytest.mark.parametrize(
"spec_str,expected_propagation",
[
@@ -560,6 +567,9 @@ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagat
for key, expected_satisfies in expected_propagation:
spec[key].satisfies(expected_satisfies)
@pytest.mark.only_clingo(
"Optional compiler propagation isn't deprecated for original concretizer"
)
def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
"""Test a package variant value was passed from its parent."""
spec = Spec("ascent~~shared +adios2 ^adios2+shared")
@@ -568,6 +578,9 @@ def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
assert spec.satisfies("^adios2+shared")
assert spec.satisfies("^bzip2~shared")
@pytest.mark.only_clingo(
"Optional compiler propagation isn't deprecated for original concretizer"
)
def test_concretize_propagate_specified_variant(self):
"""Test that only the specified variant is propagated to the dependencies"""
spec = Spec("parent-foo-bar ~~foo")
@@ -576,6 +589,7 @@ def test_concretize_propagate_specified_variant(self):
assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
@pytest.mark.only_clingo("Original concretizer is allowed to forego variant propagation")
def test_concretize_propagate_multivalue_variant(self):
"""Test that multivalue variants are propagating the specified value(s)
to their dependecies. The dependencies should not have the default value"""
@@ -632,6 +646,11 @@ def test_virtual_is_fully_expanded_for_mpileaks(self):
assert all(not d.dependencies(name="mpi") for d in spec.traverse())
assert all(x in spec for x in ("zmpi", "mpi"))
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec = Spec("indirect-mpich")
spec.normalize()
spec.concretize()
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:15.0.0"])
def test_compiler_inheritance(self, compiler_str):
spec_str = "mpileaks %{0}".format(compiler_str)
@@ -727,6 +746,7 @@ def test_conflicts_in_spec(self, conflict_spec):
with pytest.raises(spack.error.SpackError):
s.concretize()
@pytest.mark.only_clingo("Testing debug statements specific to new concretizer")
def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
s = Spec(conflict_spec)
with pytest.raises(spack.error.SpackError) as e:
@@ -900,6 +920,7 @@ def test_concretize_anonymous_dep(self, spec_str):
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
],
)
@pytest.mark.only_clingo("Original concretizer cannot work around conflicts")
def test_compiler_conflicts_in_package_py(
self, spec_str, expected_str, clang12_with_flags, gcc11_with_flags
):
@@ -1015,6 +1036,7 @@ def test_patching_dependencies(self, spec_str, patched_deps):
("quantum-espresso~veritas", ["^libelf@0.8.13"]),
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_working_around_conflicting_defaults(self, spec_str, expected):
s = Spec(spec_str).concretized()
@@ -1027,6 +1049,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
"spec_str,expected",
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
packages_yaml = {
"all": {"compiler": ["clang", "gcc"]},
@@ -1043,6 +1066,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected, mut
assert s.satisfies(condition)
@pytest.mark.regression("5651")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_package_with_constraint_not_met_by_external(self):
"""Check that if we have an external package A at version X.Y in
packages.yaml, but our spec doesn't allow X.Y as a version, then
@@ -1057,6 +1081,7 @@ def test_package_with_constraint_not_met_by_external(self):
assert not s["libelf"].external
@pytest.mark.regression("9744")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_cumulative_version_ranges_with_different_length(self):
s = Spec("cumulative-vrange-root").concretized()
assert s.concrete
@@ -1084,6 +1109,7 @@ def test_dependency_conditional_on_another_dependency_state(self):
@pytest.mark.parametrize(
"spec_str,expected", [("cmake %gcc", "%gcc"), ("cmake %clang", "%clang")]
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_compiler_constraint_with_external_package(self, spec_str, expected):
packages_yaml = {
"cmake": {"externals": [{"spec": "cmake@3.4.3", "prefix": "/usr"}], "buildable": False}
@@ -1128,9 +1154,18 @@ def test_compiler_in_nonbuildable_external_package(
spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
if xfailold and spack.config.get("config:concretizer") == "original":
pytest.xfail("This only works on the ASP-based concretizer")
assert s.satisfies(expected)
assert "external-common-perl" not in [d.name for d in s.dependencies()]
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_external_packages_have_consistent_hash(self):
s, t = Spec("externaltool"), Spec("externaltool")
s._old_concretize(), t._new_concretize()
assert s.dag_hash() == t.dag_hash()
def test_external_that_would_require_a_virtual_dependency(self):
s = Spec("requires-virtual").concretized()
@@ -1147,6 +1182,7 @@ def test_transitive_conditional_virtual_dependency(self, mutable_config):
assert s.satisfies("^[virtuals=stuff] externalvirtual")
@pytest.mark.regression("20040")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_conditional_provides_or_depends_on(self):
# Check that we can concretize correctly a spec that can either
# provide a virtual or depend on it based on the value of a variant
@@ -1185,6 +1221,7 @@ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, witho
assert not node.dependencies(deptype="test"), msg.format(pkg_name)
@pytest.mark.regression("20019")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_compiler_match_is_preferred_to_newer_version(self, compiler_factory):
# This spec depends on openblas. Openblas has a conflict
# that doesn't allow newer versions with gcc@4.4.0. Check
@@ -1203,6 +1240,7 @@ def test_target_ranges_in_conflicts(self):
with pytest.raises(spack.error.SpackError):
Spec("impossible-concretization").concretized()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_target_compatibility(self):
with pytest.raises(spack.error.SpackError):
Spec("libdwarf target=x86_64 ^libelf target=x86_64_v2").concretized()
@@ -1219,6 +1257,7 @@ def test_variant_not_default(self):
assert "+foo+bar+baz" in d
@pytest.mark.regression("20055")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_custom_compiler_version(self, mutable_config, compiler_factory, monkeypatch):
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
@@ -1319,6 +1358,7 @@ def mock_fn(*args, **kwargs):
{"add_variant": True, "delete_variant": True},
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_installed_packages_when_package_def_changes(
self, context, mutable_database, repo_with_changing_recipe
):
@@ -1348,6 +1388,7 @@ def test_reuse_installed_packages_when_package_def_changes(
# Structure and package hash will be different without reuse
assert root.dag_hash() != new_root_without_reuse.dag_hash()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@pytest.mark.regression("43663")
def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, mock_packages):
spack.config.set("concretizer:reuse", True)
@@ -1363,6 +1404,7 @@ def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, m
new2 = Spec("conditional-variant-pkg +two_whens").concretized()
assert new2.satisfies("@2 +two_whens +version_based")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_with_flags(self, mutable_database, mutable_config):
spack.config.set("concretizer:reuse", True)
spec = Spec("pkg-a cflags=-g cxxflags=-g").concretized()
@@ -1383,6 +1425,7 @@ def test_concretization_of_test_dependencies(self):
@pytest.mark.parametrize(
"spec_str", ["wrong-variant-in-conflicts", "wrong-variant-in-depends-on"]
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_error_message_for_inconsistent_variants(self, spec_str):
s = Spec(spec_str)
with pytest.raises(RuntimeError, match="not found in package"):
@@ -1487,6 +1530,7 @@ def test_multivalued_variants_from_cli(self, spec_str, expected_dict):
("deprecated-versions@=1.1.0", "deprecated-versions@1.1.0"),
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_deprecated_versions_not_selected(self, spec_str, expected):
with spack.config.override("config:deprecated", True):
s = Spec(spec_str).concretized()
@@ -1547,6 +1591,7 @@ def test_non_default_provider_of_multiple_virtuals(self):
"spec_str,expect_installed",
[("mpich", True), ("mpich+debug", False), ("mpich~debug", True)],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_concrete_specs_are_not_modified_on_reuse(
self, mutable_database, spec_str, expect_installed
):
@@ -1560,6 +1605,7 @@ def test_concrete_specs_are_not_modified_on_reuse(
assert s.satisfies(spec_str)
@pytest.mark.regression("26721,19736")
@pytest.mark.only_clingo("Original concretizer cannot use sticky variants")
def test_sticky_variant_in_package(self):
# Here we test that a sticky variant cannot be changed from its default value
# by the ASP solver if not set explicitly. The package used in the test needs
@@ -1575,6 +1621,7 @@ def test_sticky_variant_in_package(self):
assert s.satisfies("%clang") and s.satisfies("~allow-gcc")
@pytest.mark.regression("42172")
@pytest.mark.only_clingo("Original concretizer cannot use sticky variants")
@pytest.mark.parametrize(
"spec,allow_gcc",
[
@@ -1597,6 +1644,7 @@ def test_sticky_variant_in_external(self, spec, allow_gcc):
assert s["sticky-variant"].satisfies("+allow-gcc")
assert s["sticky-variant"].external
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_do_not_invent_new_concrete_versions_unless_necessary(self):
# ensure we select a known satisfying version rather than creating
# a new '2.7' version.
@@ -1618,12 +1666,14 @@ def test_do_not_invent_new_concrete_versions_unless_necessary(self):
("conditional-values-in-variant foo=foo", True),
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_conditional_values_in_variants(self, spec_str, valid):
s = Spec(spec_str)
raises = pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError))
with llnl.util.lang.nullcontext() if valid else raises:
s.concretize()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_conditional_values_in_conditional_variant(self):
"""Test that conditional variants play well with conditional possible values"""
s = Spec("conditional-values-in-variant@1.50.0").concretized()
@@ -1632,6 +1682,7 @@ def test_conditional_values_in_conditional_variant(self):
s = Spec("conditional-values-in-variant@1.60.0").concretized()
assert "cxxstd" in s.variants
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_target_granularity(self):
# The test architecture uses core2 as the default target. Check that when
# we configure Spack for "generic" granularity we concretize for x86_64
@@ -1642,6 +1693,7 @@ def test_target_granularity(self):
with spack.config.override("concretizer:targets", {"granularity": "generic"}):
assert s.concretized().satisfies("target=%s" % generic_target)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_host_compatible_concretization(self):
# Check that after setting "host_compatible" to false we cannot concretize.
# Here we use "k10" to set a target non-compatible with the current host
@@ -1654,6 +1706,7 @@ def test_host_compatible_concretization(self):
with pytest.raises(spack.error.SpackError):
s.concretized()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_add_microarchitectures_on_explicit_request(self):
# Check that if we consider only "generic" targets, we can still solve for
# specific microarchitectures on explicit requests
@@ -1662,6 +1715,7 @@ def test_add_microarchitectures_on_explicit_request(self):
assert s.satisfies("target=k10")
@pytest.mark.regression("29201")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
"""Test that we can reuse installed specs with versions not
declared in package.py
@@ -1676,6 +1730,7 @@ def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_rec
assert root.dag_hash() == new_root.dag_hash()
@pytest.mark.regression("29201")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_installed_version_is_selected_only_for_reuse(
self, mutable_database, repo_with_changing_recipe
):
@@ -1753,6 +1808,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
(["mpi", "mpich"], 1, 1),
],
)
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize(self, specs, expected, libc_offset):
specs = [Spec(s) for s in specs]
solver = spack.solver.asp.Solver()
@@ -1797,6 +1853,7 @@ def test_best_effort_coconcretize(self, specs, expected, libc_offset):
(["hdf5+mpi", "zmpi", "mpich"], "mpich", 2),
],
)
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
"""Test package preferences during coconcretization."""
specs = [Spec(s) for s in specs]
@@ -1812,6 +1869,7 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
counter += 1
assert counter == occurances, concrete_specs
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages):
specs = [Spec(x) for x in ["libdwarf%gcc", "libdwarf%clang"]]
solver = spack.solver.asp.Solver()
@@ -1827,6 +1885,7 @@ def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages):
):
list(solver.solve_in_rounds(specs))
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_coconcretize_reuse_and_virtuals(self):
reusable_specs = []
for s in ["mpileaks ^mpich", "zmpi"]:
@@ -1843,6 +1902,7 @@ def test_coconcretize_reuse_and_virtuals(self):
assert "zmpi" in spec
@pytest.mark.regression("30864")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_misleading_error_message_on_version(self, mutable_database):
# For this bug to be triggered we need a reusable dependency
# that is not optimal in terms of optimization scores.
@@ -1859,6 +1919,7 @@ def test_misleading_error_message_on_version(self, mutable_database):
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
@pytest.mark.regression("31148")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_version_weight_and_provenance(self):
"""Test package preferences during coconcretization."""
reusable_specs = [Spec(spec_str).concretized() for spec_str in ("pkg-b@0.9", "pkg-b@1.0")]
@@ -1890,6 +1951,7 @@ def test_version_weight_and_provenance(self):
assert criterion in result.criteria, criterion
assert result_spec.satisfies("^pkg-b@1.0")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_succeeds_with_config_compatible_os(self):
root_spec = Spec("pkg-b")
s = root_spec.concretized()
@@ -1915,6 +1977,7 @@ def test_git_hash_assigned_version_is_preferred(self):
assert hash in str(c)
@pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "main"))
@pytest.mark.only_clingo("Original concretizer cannot account for git hashes")
def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
s = Spec("develop-branch-version@git.%s=develop" % git_ref)
c = s.concretized()
@@ -1924,6 +1987,7 @@ def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
assert s.satisfies("@0.1:")
@pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "fbranch"))
@pytest.mark.only_clingo("Original concretizer cannot account for git hashes")
def test_git_ref_version_succeeds_with_unknown_version(self, git_ref):
# main is not defined in the package.py for this file
s = Spec("develop-branch-version@git.%s=main" % git_ref)
@@ -1931,6 +1995,7 @@ def test_git_ref_version_succeeds_with_unknown_version(self, git_ref):
assert s.satisfies("develop-branch-version@main")
@pytest.mark.regression("31484")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_installed_externals_are_reused(
self, mutable_database, repo_with_changing_recipe, tmp_path
):
@@ -1962,6 +2027,7 @@ def test_installed_externals_are_reused(
assert external3.dag_hash() == external1.dag_hash()
@pytest.mark.regression("31484")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_user_can_select_externals_with_require(self, mutable_database, tmp_path):
"""Test that users have means to select an external even in presence of reusable specs."""
external_conf = {
@@ -1990,6 +2056,7 @@ def test_user_can_select_externals_with_require(self, mutable_database, tmp_path
assert mpi_spec.name == "multi-provider-mpi"
@pytest.mark.regression("31484")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch):
"""Test that installed specs do not trigger conflicts. This covers for the rare case
where a conflict is added on a package after a spec matching the conflict was installed.
@@ -2010,6 +2077,7 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
assert s.satisfies("~debug"), s
@pytest.mark.regression("32471")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_require_targets_are_allowed(self, mutable_database):
"""Test that users can set target constraints under the require attribute."""
# Configuration to be added to packages.yaml
@@ -2184,6 +2252,7 @@ def test_unsolved_specs_raises_error(self, monkeypatch, mock_packages):
solver.driver.solve(setup, specs, reuse=[])
@pytest.mark.regression("43141")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_clear_error_when_unknown_compiler_requested(self, mock_packages):
"""Tests that the solver can report a case where the compiler cannot be set"""
with pytest.raises(
@@ -2286,6 +2355,7 @@ def test_virtuals_are_annotated_on_edges(self, spec_str):
edges = spec.edges_to_dependencies(name="callpath")
assert len(edges) == 1 and edges[0].virtuals == ()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@pytest.mark.db
@pytest.mark.parametrize(
"spec_str,mpi_name",
@@ -2300,6 +2370,7 @@ def test_virtuals_are_reconstructed_on_reuse(self, spec_str, mpi_name, mutable_d
assert len(mpi_edges) == 1
assert "mpi" in mpi_edges[0].virtuals
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_dont_define_new_version_from_input_if_checksum_required(self, working_env):
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
with pytest.raises(spack.error.UnsatisfiableSpecError):
@@ -2336,6 +2407,7 @@ def test_reuse_python_from_cli_and_extension_from_db(self, mutable_database):
("hdf5 ^gmake", {"gmake": "duplicates.test", "hdf5": "duplicates.test"}),
],
)
@pytest.mark.only_clingo("Uses specs requiring multiple gmake specs")
def test_select_lower_priority_package_from_repository_stack(
self, spec_str, expected_namespaces
):
@@ -2351,6 +2423,7 @@ def test_select_lower_priority_package_from_repository_stack(
assert s[name].concrete
assert s[name].namespace == namespace
@pytest.mark.only_clingo("Old concretizer cannot reuse")
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
"""Tests that we can reuse specs with compilers that are not configured locally."""
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
@@ -2421,6 +2494,7 @@ def test_spec_with_build_dep_from_json(self, tmp_path):
assert s["dttop"].dag_hash() == build_dep.dag_hash()
@pytest.mark.regression("44040")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_exclude_specs_from_reuse(self, monkeypatch):
"""Tests that we can exclude a spec from reuse when concretizing, and that the spec
is not added back to the solve as a dependency of another reusable spec.
@@ -2470,6 +2544,7 @@ def test_exclude_specs_from_reuse(self, monkeypatch):
[],
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_include_specs_from_externals_and_libcs(
self, included_externals, mutable_config, tmp_path
):
@@ -2502,6 +2577,7 @@ def test_include_specs_from_externals_and_libcs(
assert result["deprecated-versions"].satisfies("@1.0.0")
@pytest.mark.regression("44085")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_can_reuse_concrete_externals_for_dependents(self, mutable_config, tmp_path):
"""Test that external specs that are in the DB can be reused. This means they are
preferred to concretizing another external from packages.yaml
@@ -2524,6 +2600,7 @@ def test_can_reuse_concrete_externals_for_dependents(self, mutable_config, tmp_p
sombrero = result.specs[0]
assert sombrero["externaltool"].dag_hash() == external_spec.dag_hash()
@pytest.mark.only_clingo("Original concretizer cannot reuse")
def test_cannot_reuse_host_incompatible_libc(self):
"""Test whether reuse concretization correctly fails to reuse a spec with a host
incompatible libc."""
@@ -2604,6 +2681,7 @@ def duplicates_test_repository():
@pytest.mark.usefixtures("mutable_config", "duplicates_test_repository")
@pytest.mark.only_clingo("Not supported by the original concretizer")
class TestConcretizeSeparately:
"""Collects test on separate concretization"""
@@ -2822,6 +2900,7 @@ def edges_test_repository():
@pytest.mark.usefixtures("mutable_config", "edges_test_repository")
@pytest.mark.only_clingo("Edge properties not supported by the original concretizer")
class TestConcretizeEdges:
"""Collects tests on edge properties"""
@@ -2972,6 +3051,7 @@ def test_concretization_version_order():
]
@pytest.mark.only_clingo("Original concretizer cannot reuse specs")
@pytest.mark.parametrize(
"roots,reuse_yaml,expected,not_expected,expected_length",
[
@@ -3046,6 +3126,7 @@ def test_spec_filters(specs, include, exclude, expected):
assert f.selected_specs() == expected
@pytest.mark.only_clingo("clingo only reuse feature being tested")
@pytest.mark.regression("38484")
def test_git_ref_version_can_be_reused(install_mockery, do_not_check_runtimes_on_reuse):
first_spec = spack.spec.Spec("git-ref-package@git.2.1.5=2.1.5~opt").concretized()
@@ -3063,6 +3144,7 @@ def test_git_ref_version_can_be_reused(install_mockery, do_not_check_runtimes_on
assert third_spec.dag_hash() == first_spec.dag_hash()
@pytest.mark.only_clingo("clingo only reuse feature being tested")
@pytest.mark.parametrize("standard_version", ["2.0.0", "2.1.5", "2.1.6"])
def test_reuse_prefers_standard_over_git_versions(
standard_version, install_mockery, do_not_check_runtimes_on_reuse

View File

@@ -17,7 +17,10 @@
from spack.environment.environment import ViewDescriptor
from spack.version import Version
pytestmark = [pytest.mark.usefixtures("enable_runtimes")]
pytestmark = [
pytest.mark.only_clingo("Original concretizer does not support compiler runtimes"),
pytest.mark.usefixtures("enable_runtimes"),
]
def _concretize_with_reuse(*, root_str, reused_str):

View File

@@ -8,7 +8,10 @@
import spack.solver.asp
import spack.spec
pytestmark = [pytest.mark.not_on_windows("Windows uses old concretizer")]
pytestmark = [
pytest.mark.not_on_windows("Windows uses old concretizer"),
pytest.mark.only_clingo("Original concretizer does not support configuration requirements"),
]
version_error_messages = [
"Cannot satisfy 'fftw@:1.0' and 'fftw@1.1:",

View File

@@ -113,6 +113,7 @@ def test_preferred_compilers(self, compiler_str, spec_str):
spec = spack.spec.Spec(spec_str).concretized()
assert spec.compiler == CompilerSpec(compiler_str)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_preferred_target(self, mutable_mock_repo):
"""Test preferred targets are applied correctly"""
spec = concretize("mpich")
@@ -142,6 +143,7 @@ def test_preferred_versions(self):
spec = concretize("mpileaks")
assert spec.version == Version("2.2")
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
def test_preferred_versions_mixed_version_types(self):
update_packages("mixedversions", "version", ["=2.0"])
spec = concretize("mixedversions")
@@ -223,6 +225,7 @@ def test_preferred(self):
spec.concretize()
assert spec.version == Version("3.5.0")
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
def test_preferred_undefined_raises(self):
"""Preference should not specify an undefined version"""
update_packages("python", "version", ["3.5.0.1"])
@@ -230,6 +233,7 @@ def test_preferred_undefined_raises(self):
with pytest.raises(spack.config.ConfigError):
spec.concretize()
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
def test_preferred_truncated(self):
"""Versions without "=" are treated as version ranges: if there is
a satisfying version defined in the package.py, we should use that
@@ -506,6 +510,7 @@ def test_sticky_variant_accounts_for_packages_yaml(self):
assert s.satisfies("%gcc") and s.satisfies("+allow-gcc")
@pytest.mark.regression("41134")
@pytest.mark.only_clingo("Not backporting the fix to the old concretizer")
def test_default_preference_variant_different_type_does_not_error(self):
"""Tests that a different type for an existing variant in the 'all:' section of
packages.yaml doesn't fail with an error.

View File

@@ -19,7 +19,10 @@
from spack.test.conftest import create_test_repo
from spack.util.url import path_to_file_url
pytestmark = [pytest.mark.not_on_windows("Windows uses old concretizer")]
pytestmark = [
pytest.mark.not_on_windows("Windows uses old concretizer"),
pytest.mark.only_clingo("Original concretizer does not support configuration requirements"),
]
def update_packages_config(conf_str):

View File

@@ -18,7 +18,6 @@
import spack.config
import spack.directory_layout
import spack.environment as ev
import spack.fetch_strategy
import spack.main
import spack.package_base
import spack.paths
@@ -307,14 +306,14 @@ def test_add_config_path(mutable_config):
@pytest.mark.regression("17543,23259")
def test_add_config_path_with_enumerated_type(mutable_config):
spack.config.add("config:flags:keep_werror:all")
assert spack.config.get("config")["flags"]["keep_werror"] == "all"
spack.config.add("config:concretizer:clingo")
assert spack.config.get("config")["concretizer"] == "clingo"
spack.config.add("config:flags:keep_werror:specific")
assert spack.config.get("config")["flags"]["keep_werror"] == "specific"
spack.config.add("config:concretizer:original")
assert spack.config.get("config")["concretizer"] == "original"
with pytest.raises(spack.config.ConfigError):
spack.config.add("config:flags:keep_werror:foo")
spack.config.add("config:concretizer:foo")
def test_add_config_filename(mock_low_high_config, tmpdir):

View File

@@ -56,10 +56,8 @@
import spack.util.executable
import spack.util.git
import spack.util.gpg
import spack.util.parallel
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web
import spack.version
from spack.fetch_strategy import URLFetchStrategy
from spack.util.pattern import Bunch
@@ -706,10 +704,11 @@ def configuration_dir(tmpdir_factory, linux_os):
tmpdir.ensure("user", dir=True)
# Fill out config.yaml, compilers.yaml and modules.yaml templates.
solver = os.environ.get("SPACK_TEST_SOLVER", "clingo")
locks = sys.platform != "win32"
config = tmpdir.join("site", "config.yaml")
config_template = test_config / "config.yaml"
config.write(config_template.read_text().format(install_tree_root, locks))
config.write(config_template.read_text().format(install_tree_root, solver, locks))
target = str(archspec.cpu.host().family)
compilers = tmpdir.join("site", "compilers.yaml")
@@ -1003,7 +1002,7 @@ def temporary_store(tmpdir, request):
def mock_fetch(mock_archive, monkeypatch):
"""Fake the URL for a package so it downloads from a file."""
monkeypatch.setattr(
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(url=mock_archive.url)
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(mock_archive.url)
)
@@ -1418,24 +1417,6 @@ def mock_git_repository(git, tmpdir_factory):
r1 = rev_hash(branch)
r1_file = branch_file
multiple_directories_branch = "many_dirs"
num_dirs = 3
num_files = 2
dir_files = []
for i in range(num_dirs):
for j in range(num_files):
dir_files.append(f"dir{i}/file{j}")
git("checkout", "-b", multiple_directories_branch)
for f in dir_files:
repodir.ensure(f, file=True)
git("add", f)
git("-c", "commit.gpgsign=false", "commit", "-m", "many_dirs add files")
# restore default
git("checkout", default_branch)
# Map of version -> bunch. Each bunch includes; all the args
# that must be specified as part of a version() declaration (used to
# manufacture a version for the 'git-test' package); the associated
@@ -1455,11 +1436,6 @@ def mock_git_repository(git, tmpdir_factory):
"default-no-per-version-git": Bunch(
revision=default_branch, file=r0_file, args={"branch": default_branch}
),
"many-directories": Bunch(
revision=multiple_directories_branch,
file=dir_files[0],
args={"git": url, "branch": multiple_directories_branch},
),
}
t = Bunch(
@@ -1836,7 +1812,12 @@ def __call__(self, *args, **kwargs):
tty.msg("curl: (22) The requested URL returned error: 404")
self.returncode = 22
monkeypatch.setattr(spack.util.web, "require_curl", MockCurl)
def mock_curl(*args):
return MockCurl()
monkeypatch.setattr(spack.util.web, "_curl", mock_curl)
yield
@pytest.fixture(scope="function")
@@ -1975,18 +1956,26 @@ def nullify_globals(request, monkeypatch):
def pytest_runtest_setup(item):
# Skip tests if they are marked only clingo and are run with the original concretizer
only_clingo_marker = item.get_closest_marker(name="only_clingo")
if only_clingo_marker and os.environ.get("SPACK_TEST_SOLVER") == "original":
pytest.skip(*only_clingo_marker.args)
# Skip tests if they are marked only original and are run with clingo
only_original_marker = item.get_closest_marker(name="only_original")
if only_original_marker and os.environ.get("SPACK_TEST_SOLVER", "clingo") == "clingo":
pytest.skip(*only_original_marker.args)
# Skip test marked "not_on_windows" if they're run on Windows
not_on_windows_marker = item.get_closest_marker(name="not_on_windows")
if not_on_windows_marker and sys.platform == "win32":
pytest.skip(*not_on_windows_marker.args)
@pytest.fixture(autouse=True)
@pytest.fixture(scope="function")
def disable_parallel_buildcache_push(monkeypatch):
"""Disable process pools in tests."""
monkeypatch.setattr(
spack.util.parallel, "make_concurrent_executor", spack.util.parallel.SequentialExecutor
)
monkeypatch.setattr(spack.cmd.buildcache, "_make_pool", spack.cmd.buildcache.NoPool)
def _root_path(x, y, *, path):

Some files were not shown because too many files have changed in this diff Show More