Compare commits

..

4 Commits

Author SHA1 Message Date
Wouter Deconinck
a5f8cfd3f3 RPackages with custom url: url -> urls=[url] 2024-08-20 13:27:57 -05:00
Wouter Deconinck
d38b4024bc RPackages with bioc: remove redundant homepage and url 2024-08-20 13:27:24 -05:00
Wouter Deconinck
4d09bc13d9 RPackage: urls=[bioc/src/contrib,data/annotation/src/contrib] for bioc 2024-08-20 13:22:50 -05:00
Wouter Deconinck
1794cd598c RPackage: urls=[src/contrib,src/contrib/Archive], list_url=src/contrib 2024-08-20 13:21:41 -05:00
440 changed files with 6019 additions and 6885 deletions

View File

@@ -893,9 +893,8 @@ The valid variables for a ``when`` clause are:
#. ``env``. The user environment (usually ``os.environ`` in Python).
#. ``hostname``. The hostname of the system.
#. ``full_hostname``. The fully qualified hostname of the system.
#. ``hostname``. The hostname of the system (if ``hostname`` is an
executable in the user's PATH).
^^^^^^^^^^^^^^^^^^^^^^^^
SpecLists as Constraints

View File

@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
option ``--depth 1`` will be used if the version of git and the specified
transport protocol support it, and ``--single-branch`` will be used if the
version of git supports it.
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1366,41 +1361,6 @@ Submodules
For more information about git submodules see the manpage of git: ``man
git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
If ``git_sparse_paths`` is supplied and the git version is too old
then a warning will be issued and that package will use the standard cloning operations instead.
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
or a more complex package attribute using the ``@property`` decorator. The return value should be
a list for a callable implementation of ``git_sparse_paths``.
.. code-block:: python
def sparse_path_function(package)
"""a callable function that can be used in side a version"""
# paths can be directories or functions, all subdirectories and files are included
paths = ["doe", "rae", "me/file.cpp"]
if package.spec.version > Version("1.2.0"):
paths.extend(["fae"])
return paths
class MyPackage(package):
# can also be a package attribute that will be used if not specified in versions
git_sparse_paths = ["doe", "rae"]
# use the package attribute
version("1.0.0")
version("1.1.0")
# use the function
version("1.1.5", git_sparse_paths=sparse_path_func)
version("1.2.0", git_sparse_paths=sparse_path_func)
version("1.2.5", git_sparse_paths=sparse_path_func)
version("1.1.5", git_sparse_paths=sparse_path_func)
.. _github-fetch:
^^^^^^

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
astunparse
----------------

View File

@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
with os.scandir(self.etc_dir) as it:
etc_files = [
p.path for p in it
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
]
basenames = [
basename
for basename in os.listdir(self.etc_dir)
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
and os.path.isfile(os.path.join(self.etc_dir, basename))
]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
etc_files.sort()
basenames.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
etc_files = [
os.path.join(self.etc_dir, basename)
for basename in _DISTRO_RELEASE_BASENAMES
]
for filepath in etc_files:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
basenames = _DISTRO_RELEASE_BASENAMES
for basename in basenames:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match is None:
continue
filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -231,6 +231,96 @@ def is_host_name(instance):
return True
try:
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
except ImportError:
pass
else:
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
try:
from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(draft7="iri", raises=ValueError)
def is_iri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(draft7="iri-reference", raises=ValueError)
def is_iri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI_reference")
try:
from strict_rfc3339 import validate_rfc3339
except ImportError:
try:
from rfc3339_validator import validate_rfc3339
except ImportError:
validate_rfc3339 = None
if validate_rfc3339:
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3339(instance)
@_checks_drafts(draft7="time")
def is_time(instance):
if not isinstance(instance, str_types):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
try:
import webcolors
except ImportError:
pass
else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str_types) or
instance.lower() in webcolors.css21_names_to_hex
):
return True
return is_css_color_code(instance)
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
try:
import jsonpointer
except ImportError:
pass
else:
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str_types):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str_types):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
try:
import uritemplate.exceptions
except ImportError:
pass
else:
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
raises=uritemplate.exceptions.InvalidTemplate,
)
def is_uri_template(
instance,
template_validator=uritemplate.Validator().force_balanced_braces(),
):
template = uritemplate.URITemplate(instance)
return template_validator.validate(template)

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch(
name: str = "",
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(),
compilers={},
generation=generation,
cpu_part=cpu_part,
)
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
)
if architecture in (PPC64LE, PPC64):
@@ -351,10 +345,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures."""
"""Types and functions to manage information
on CPU microarchitectures.
"""
import functools
import platform
import re
@@ -63,24 +65,21 @@ class Microarchitecture:
passed in as argument above.
* versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant.
cpu_part (str): cpu part of the architecture, if relevant.
generation (int): generation of the micro-architecture, if
relevant.
"""
# pylint: disable=too-many-arguments,too-many-instance-attributes
# pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name
self.parents = parents
self.vendor = vendor
self.features = features
self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation
# Only relevant for AArch64
self.cpu_part = cpu_part
# Cache the ancestor computation
self._ancestors = None
@@ -112,7 +111,6 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers
and self.generation == other.generation
and self.cpu_part == other.cpu_part
)
@coerce_target_names
@@ -145,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__
fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
"cpu_part={0.cpu_part!r})"
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
)
return fmt.format(self)
@@ -193,7 +190,6 @@ def to_dict(self):
"generation": self.generation,
"parents": [str(x) for x in self.parents],
"compilers": self.compilers,
"cpupart": self.cpu_part,
}
@staticmethod
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]),
compilers=data.get("compilers", {}),
generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
)
def optimization_flags(self, compiler, version):
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"])
compilers = values.get("compilers", {})
generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture(
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]

View File

@@ -2225,14 +2225,10 @@
],
"nvhpc": [
{
"versions": "21.11:23.8",
"versions": "21.11:",
"name": "zen3",
"flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
}
]
}
@@ -2715,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99"
}
]
},
"cpupart": "0x0af"
}
},
"a64fx": {
"from": ["armv8.2a"],
@@ -2784,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
}
]
},
"cpupart": "0x001"
}
},
"cortex_a72": {
"from": ["aarch64"],
@@ -2822,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72"
}
]
},
"cpupart": "0xd08"
}
},
"neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"],
@@ -2909,8 +2902,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd0c"
}
},
"neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"],
@@ -2934,6 +2926,8 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
@@ -3034,8 +3028,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd40"
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
@@ -3059,10 +3052,13 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
@@ -3070,12 +3066,18 @@
"sb",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
"bf16",
"dgh"
],
"compilers" : {
"gcc": [
@@ -3100,19 +3102,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.3.99",
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{
"versions": "12.0:12.2.99",
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "12.3:",
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
@@ -3147,113 +3145,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}
},
"m1": {
"from": ["armv8.4a"],
@@ -3319,8 +3211,7 @@
"flags" : "-mcpu=apple-m1"
}
]
},
"cpupart": "0x022"
}
},
"m2": {
"from": ["m1", "armv8.5a"],
@@ -3398,8 +3289,7 @@
"flags" : "-mcpu=apple-m2"
}
]
},
"cpupart": "0x032"
}
},
"arm": {
"from": [],

View File

@@ -52,9 +52,6 @@
}
}
}
},
"cpupart": {
"type": "string"
}
},
"required": [
@@ -110,4 +107,4 @@
"additionalProperties": false
}
}
}
}

View File

@@ -1,45 +0,0 @@
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
index 89e1868047..50c3b18d4d 100644
--- a/lib/spack/external/_vendoring/distro/distro.py
+++ b/lib/spack/external/_vendoring/distro/distro.py
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
- basenames = [
- basename
- for basename in os.listdir(self.etc_dir)
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
- and os.path.isfile(os.path.join(self.etc_dir, basename))
- ]
+ with os.scandir(self.etc_dir) as it:
+ etc_files = [
+ p.path for p in it
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
+ ]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
- basenames.sort()
+ etc_files.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
- basenames = _DISTRO_RELEASE_BASENAMES
- for basename in basenames:
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ etc_files = [
+ os.path.join(self.etc_dir, basename)
+ for basename in _DISTRO_RELEASE_BASENAMES
+ ]
+
+ for filepath in etc_files:
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
if match is None:
continue
- filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
-__version__ = metadata.version("jsonschema")
+
+__version__ = "3.2.0"
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
index 281a7cfcff..29061e3661 100644
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
@@ -231,96 +231,6 @@ def is_host_name(instance):
return True
-try:
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
- import idna
-except ImportError:
- pass
-else:
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
- def is_idn_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- idna.encode(instance)
- return True
-
-
-try:
- import rfc3987
-except ImportError:
- try:
- from rfc3986_validator import validate_rfc3986
- except ImportError:
- pass
- else:
- @_checks_drafts(name="uri")
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI_reference")
-
-else:
- @_checks_drafts(draft7="iri", raises=ValueError)
- def is_iri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI")
-
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
- def is_iri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI_reference")
-
- @_checks_drafts(name="uri", raises=ValueError)
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI_reference")
-
-
-try:
- from strict_rfc3339 import validate_rfc3339
-except ImportError:
- try:
- from rfc3339_validator import validate_rfc3339
- except ImportError:
- validate_rfc3339 = None
-
-if validate_rfc3339:
- @_checks_drafts(name="date-time")
- def is_datetime(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3339(instance)
-
- @_checks_drafts(draft7="time")
- def is_time(instance):
- if not isinstance(instance, str_types):
- return True
- return is_datetime("1970-01-01T" + instance)
-
-
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
-
-
-try:
- import webcolors
-except ImportError:
- pass
-else:
- def is_css_color_code(instance):
- return webcolors.normalize_hex(instance)
-
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
- def is_css21_color(instance):
- if (
- not isinstance(instance, str_types) or
- instance.lower() in webcolors.css21_names_to_hex
- ):
- return True
- return is_css_color_code(instance)
-
- def is_css3_color(instance):
- if instance.lower() in webcolors.css3_names_to_hex:
- return True
- return is_css_color_code(instance)
-
-
-try:
- import jsonpointer
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="json-pointer",
- draft7="json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_json_pointer(instance):
- if not isinstance(instance, str_types):
- return True
- return jsonpointer.JsonPointer(instance)
-
- # TODO: I don't want to maintain this, so it
- # needs to go either into jsonpointer (pending
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
- # into a new external library.
- @_checks_drafts(
- draft7="relative-json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_relative_json_pointer(instance):
- # Definition taken from:
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
- if not isinstance(instance, str_types):
- return True
- non_negative_integer, rest = [], ""
- for i, character in enumerate(instance):
- if character.isdigit():
- non_negative_integer.append(character)
- continue
-
- if not non_negative_integer:
- return False
-
- rest = instance[i:]
- break
- return (rest == "#") or jsonpointer.JsonPointer(rest)
-
-
-try:
- import uritemplate.exceptions
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="uri-template",
- draft7="uri-template",
- raises=uritemplate.exceptions.InvalidTemplate,
- )
- def is_uri_template(
- instance,
- template_validator=uritemplate.Validator().force_balanced_braces(),
- ):
- template = uritemplate.URITemplate(instance)
- return template_validator.validate(template)

View File

@@ -1624,12 +1624,6 @@ def remove_linked_tree(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
if sys.platform == "win32":
# Adding this prefix allows shutil to remove long paths on windows
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
long_path_pfx = "\\\\?\\"
if not path.startswith(long_path_pfx):
path = long_path_pfx + path
shutil.rmtree(path, **kwargs)

View File

@@ -6,6 +6,7 @@
import collections.abc
import contextlib
import functools
import inspect
import itertools
import os
import re
@@ -15,7 +16,7 @@
from typing import Any, Callable, Iterable, List, Tuple
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
ignore_modules = [r"^\.#", "~$"]
def index_by(objects, *funcs):
@@ -83,6 +84,20 @@ def index_by(objects, *funcs):
return result
def caller_locals():
"""This will return the locals of the *parent* of the caller.
This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
del stack
def attr_setdefault(obj, name, value):
"""Like dict.setdefault, but for objects."""
if not hasattr(obj, name):
@@ -90,6 +105,15 @@ def attr_setdefault(obj, name, value):
return getattr(obj, name)
def has_method(cls, name):
for base in inspect.getmro(cls):
if base is object:
continue
if name in base.__dict__:
return True
return False
def union_dicts(*dicts):
"""Use update() to combine all dicts into one.
@@ -154,22 +178,19 @@ def list_modules(directory, **kwargs):
order."""
list_directories = kwargs.setdefault("directories", True)
ignore = re.compile(ignore_modules)
for name in os.listdir(directory):
if name == "__init__.py":
continue
with os.scandir(directory) as it:
for entry in it:
if entry.name == "__init__.py" or entry.name == "__pycache__":
continue
path = os.path.join(directory, name)
if list_directories and os.path.isdir(path):
init_py = os.path.join(path, "__init__.py")
if os.path.isfile(init_py):
yield name
if (
list_directories
and entry.is_dir()
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
):
yield entry.name
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
yield entry.name[:-3] # strip .py
elif name.endswith(".py"):
if not any(re.search(pattern, name) for pattern in ignore_modules):
yield re.sub(".py$", "", name)
def decorator_with_or_without_args(decorator):
@@ -216,8 +237,8 @@ def setter(name, value):
value.__name__ = name
setattr(cls, name, value)
if not hasattr(cls, "_cmp_key"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
if not has_method(cls, "_cmp_key"):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
@@ -367,8 +388,8 @@ def cd_fun():
TypeError: If the class does not have a ``_cmp_iter`` method
"""
if not hasattr(cls, "_cmp_iter"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
if not has_method(cls, "_cmp_iter"):
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
# comparison operators are implemented in terms of lazy_eq and lazy_lt
def eq(self, other):
@@ -843,19 +864,20 @@ def uniq(sequence):
return uniq_list
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements,
replacing intervening elements with '...'. For example::
elide_list(["1", "2", "3", "4", "5", "6"], 4)
elide_list([1,2,3,4,5,6], 4)
gives::
["1", "2", "3", "...", "6"]
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
return [*line_list[: max_num - 1], "...", line_list[-1]]
return line_list
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
else:
return line_list
@contextlib.contextmanager

View File

@@ -10,7 +10,6 @@
import errno
import io
import multiprocessing
import multiprocessing.connection
import os
import re
import select

131
lib/spack/spack/abi.py Normal file
View File

@@ -0,0 +1,131 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from llnl.util.lang import memoized
import spack.spec
import spack.version
from spack.compilers.clang import Clang
from spack.util.executable import Executable, ProcessError
class ABI:
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec
) -> bool:
"""Return true if architecture of target spec is ABI compatible
to the architecture of constraint spec. If either the target
or constraint specs have no architecture, target is also defined
as architecture ABI compatible to constraint."""
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
)
@memoized
def _gcc_get_libstdcxx_version(self, version):
"""Returns gcc ABI compatibility info by getting the library version of
a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
spec = spack.spec.CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
return None
compiler = compilers[0]
rungcc = None
libname = None
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
libname = "libgcc_s." + dso_suffix
else:
return None
try:
# Some gcc's are actually clang and don't respond properly to
# --print-file-name (they just print the filename, not the
# full path). Ignore these and expect them to be handled as clang.
if Clang.default_version(rungcc.exe[0]) != "unknown":
return None
output = rungcc("--print-file-name=%s" % libname, output=str)
except ProcessError:
return None
if not output:
return None
libpath = os.path.realpath(output.strip())
if not libpath:
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
are ABI compatible."""
plib = self._gcc_get_libstdcxx_version(pversion)
clib = self._gcc_get_libstdcxx_version(cversion)
if not plib or not clib:
return False
return plib == clib
def _intel_compiler_compare(
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
) -> bool:
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
# Test major and minor versions. Ignore build version.
pv = pversion.lo
cv = cversion.lo
return pv.up_to(2) == cv.up_to(2)
def compiler_compatible(
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
) -> bool:
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
if parent.compiler.name != child.compiler.name:
# Different compiler families are assumed ABI incompatible
return False
if loose:
return True
# TODO: Can we move the specialized ABI matching stuff
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion
):
return True
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
pversion, cversion
):
return True
return False
def compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
) -> bool:
"""Returns true if target spec is ABI compatible to constraint spec"""
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
target, constraint, loose=loose
)

View File

@@ -39,9 +39,9 @@ def _search_duplicate_compilers(error_cls):
import collections
import collections.abc
import glob
import inspect
import io
import itertools
import os
import pathlib
import pickle
import re
@@ -210,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
)
#: Sanity checks on packages.yaml
config_repos = AuditClass(
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
)
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
@@ -372,27 +367,6 @@ def _ensure_all_virtual_packages_have_default_providers(error_cls):
]
@config_repos
def _ensure_no_folders_without_package_py(error_cls):
"""Check that we don't leave any folder without a package.py in repos"""
errors = []
for repository in spack.repo.PATH.repos:
missing = []
for entry in os.scandir(repository.packages_path):
if not entry.is_dir():
continue
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
if not package_py.exists():
missing.append(entry.path)
if missing:
summary = (
f"The '{repository.namespace}' repository misses a package.py file"
f" in the following folders"
)
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
@@ -524,7 +498,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
name_definitions = collections.defaultdict(list)
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for cls_item in pkg_cls.__mro__:
for cls_item in inspect.getmro(pkg_cls):
for name in RESERVED_NAMES:
current_value = cls_item.__dict__.get(name)
if current_value is None:
@@ -553,7 +527,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, []))
return errors

View File

@@ -6,6 +6,7 @@
import codecs
import collections
import concurrent.futures
import contextlib
import copy
import hashlib
import io
@@ -24,7 +25,7 @@
import urllib.request
import warnings
from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
from typing import Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.filesystem as fsys
import llnl.util.lang
@@ -957,7 +958,7 @@ def _spec_files_from_cache(url: str):
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url))
def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
def generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
"""Create or replace the build cache index on the given mirror. The
buildcache index contains an entry for each binary package under the
cache_prefix.
@@ -1118,7 +1119,7 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
return ExistsInBuildcache(signed, unsigned, tarball)
def _url_upload_tarball_and_specfile(
def _upload_tarball_and_specfile(
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
):
files = BuildcacheFiles(spec, tmpdir, out_url)
@@ -1153,146 +1154,49 @@ def _url_upload_tarball_and_specfile(
)
class Uploader:
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
self.mirror = mirror
self.force = force
self.update_index = update_index
self.tmpdir: str
self.executor: concurrent.futures.Executor
def __enter__(self):
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
self._executor = spack.util.parallel.make_concurrent_executor()
self.tmpdir = self._tmpdir.__enter__()
self.executor = self.executor = self._executor.__enter__()
return self
def __exit__(self, *args):
self._executor.__exit__(*args)
self._tmpdir.__exit__(*args)
def push_or_raise(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
skipped, errors = self.push(specs)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {self.mirror.push_url}:\n"
+ "\n".join(
f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors
)
)
return skipped
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
raise NotImplementedError
def tag(self, tag: str, roots: List[spack.spec.Spec]):
"""Make a list of selected specs together available under the given tag"""
pass
class OCIUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
base_image: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.target_image = spack.oci.oci.image_from_mirror(mirror)
self.base_image = ImageReference.from_string(base_image) if base_image else None
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
skipped, base_images, checksums, upload_errors = _oci_push(
target_image=self.target_image,
base_image=self.base_image,
installed_specs_with_deps=specs,
force=self.force,
tmpdir=self.tmpdir,
executor=self.executor,
)
self._base_images = base_images
self._checksums = checksums
# only update index if any binaries were uploaded
if self.update_index and len(skipped) + len(upload_errors) < len(specs):
_oci_update_index(self.target_image, self.tmpdir, self.executor)
return skipped, upload_errors
def tag(self, tag: str, roots: List[spack.spec.Spec]):
tagged_image = self.target_image.with_tag(tag)
# _push_oci may not populate self._base_images if binaries were already in the registry
for spec in roots:
_oci_update_base_images(
base_image=self.base_image,
target_image=self.target_image,
spec=spec,
base_image_cache=self._base_images,
)
_oci_put_manifest(
self._base_images, self._checksums, tagged_image, self.tmpdir, None, None, *roots
)
class URLUploader(Uploader):
def __init__(
self,
mirror: spack.mirror.Mirror,
force: bool,
update_index: bool,
signing_key: Optional[str],
) -> None:
super().__init__(mirror, force, update_index)
self.url = mirror.push_url
self.signing_key = signing_key
def push(
self, specs: List[spack.spec.Spec]
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
return _url_push(
specs,
out_url=self.url,
force=self.force,
update_index=self.update_index,
signing_key=self.signing_key,
tmpdir=self.tmpdir,
executor=self.executor,
)
def make_uploader(
mirror: spack.mirror.Mirror,
force: bool = False,
update_index: bool = False,
signing_key: Optional[str] = None,
base_image: Optional[str] = None,
) -> Uploader:
"""Builder for the appropriate uploader based on the mirror type"""
if mirror.push_url.startswith("oci://"):
return OCIUploader(
mirror=mirror, force=force, update_index=update_index, base_image=base_image
)
else:
return URLUploader(
mirror=mirror, force=force, update_index=update_index, signing_key=signing_key
)
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
@contextlib.contextmanager
def default_push_context() -> Generator[Tuple[str, concurrent.futures.Executor], None, None]:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
yield tmpdir, executor
def push_or_raise(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> List[Spec]:
"""Same as push, but raises an exception on error. Returns a list of skipped specs already
present in the build cache when force=False."""
skipped, errors = push(specs, out_url, signing_key, force, update_index)
if errors:
raise PushToBuildCacheError(
f"Failed to push {len(errors)} specs to {out_url}:\n"
+ "\n".join(f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors)
)
return skipped
def push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
force: bool = False,
update_index: bool = False,
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
present (when force=False). Does not raise on error."""
with default_push_context() as (tmpdir, executor):
return _push(specs, out_url, signing_key, force, update_index, tmpdir, executor)
class FancyProgress:
def __init__(self, total: int):
self.n = 0
@@ -1330,7 +1234,7 @@ def fail(self) -> None:
tty.info(f"{self.pre}Failed to push {self.pretty_spec}")
def _url_push(
def _push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
@@ -1375,7 +1279,7 @@ def _url_push(
upload_futures = [
executor.submit(
_url_upload_tarball_and_specfile,
_upload_tarball_and_specfile,
spec,
tmpdir,
out_url,
@@ -1405,12 +1309,12 @@ def _url_push(
if signing_key:
keys_tmpdir = os.path.join(tmpdir, "keys")
os.mkdir(keys_tmpdir)
_url_push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
if update_index:
index_tmpdir = os.path.join(tmpdir, "index")
os.mkdir(index_tmpdir)
_url_generate_package_index(out_url, index_tmpdir)
generate_package_index(out_url, index_tmpdir)
return skipped, errors
@@ -1527,9 +1431,12 @@ def _oci_put_manifest(
for s in expected_blobs:
# If a layer for a dependency has gone missing (due to removed manifest in the registry, a
# failed push, or a local forced uninstall), we cannot create a runnable container image.
# If an OCI registry is only used for storage, this is not a hard error, but for now we
# raise an exception unconditionally, until someone requests a more lenient behavior.
checksum = checksums.get(s.dag_hash())
if checksum:
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
if not checksum:
raise MissingLayerError(f"missing layer for {_format_spec(s)}")
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
@@ -1574,7 +1481,6 @@ def _oci_put_manifest(
"size": checksums[s.dag_hash()].size,
}
for s in expected_blobs
if s.dag_hash() in checksums
),
],
}
@@ -1613,7 +1519,7 @@ def _oci_update_base_images(
)
def _oci_push(
def _push_oci(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
@@ -2561,6 +2467,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
@@ -2738,7 +2645,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
)
def _url_push_keys(
def push_keys(
*mirrors: Union[spack.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
@@ -3189,3 +3096,7 @@ class CannotListKeys(GenerateIndexError):
class PushToBuildCacheError(spack.error.SpackError):
"""Raised when unable to push objects to binary mirror"""
class MissingLayerError(spack.error.SpackError):
"""Raised when a required layer for a dependency is missing in an OCI registry."""

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package"""
import fnmatch
import importlib
import os.path
import re
import sys
@@ -29,7 +28,7 @@
def _python_import(module: str) -> bool:
try:
importlib.import_module(module)
__import__(module)
except ImportError:
return False
return True

View File

@@ -143,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers)
@contextlib.contextmanager
@@ -152,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
bootstrap_store_path = store_path()
user_configuration = _read_and_sanitize_configuration()
with spack.environment.no_active_environment():
with spack.platforms.use_platform(
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
spack.platforms.real_host()
), spack.repo.use_repositories(spack.paths.packages_path):
# Default configuration scopes excluding command line

View File

@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
# Ensure we trigger environment modifications if we have an environment
if BootstrapEnvironment.spack_yaml().exists():
with BootstrapEnvironment() as env:
env.load()
env.update_syspath_and_environ()
return [
_required_executable(

View File

@@ -457,12 +457,9 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
@@ -1559,7 +1556,7 @@ def __init__(self, package):
#: Modules for the classes in the MRO up to PackageBase
modules_in_mro = []
for cls in type(package).__mro__:
for cls in inspect.getmro(type(package)):
module = cls.module
if module == self.current_module:

View File

@@ -108,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
if _supports_compilation_databases(pkg):
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use.

View File

@@ -138,14 +138,14 @@ def cuda_flags(arch_list):
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.6")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -17,7 +17,7 @@
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path
from llnl.util.filesystem import HeaderList, LibraryList
import spack.builder
import spack.config
@@ -120,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
"""
return []
@property
def bindir(self) -> str:
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
windows = self.spec.satisfies("platform=windows")
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate

View File

@@ -85,20 +85,28 @@ def homepage(cls):
return "https://bioconductor.org/packages/" + cls.bioc
@lang.classproperty
def url(cls):
def urls(cls):
if cls.cran:
return (
return [
"https://cloud.r-project.org/src/contrib/"
+ cls.cran
+ "_"
+ str(list(cls.versions)[0])
+ ".tar.gz"
)
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
]
elif cls.bioc:
return [
"https://bioconductor.org/packages/release/bioc/src/contrib/"
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
"https://bioconductor.org/packages/release/data/annotation/src/contrib/"
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
]
else:
return [cls.url]
@lang.classproperty
def list_url(cls):
if cls.cran:
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
return "https://cloud.r-project.org/src/contrib/"
@property
def git(self):

View File

@@ -12,7 +12,6 @@
from llnl.util import lang
import spack.build_environment
import spack.multimethod
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS = {}
@@ -296,11 +295,7 @@ def _decorator(fn):
return _decorator
class BuilderMeta(
PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore
):
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
pass

View File

@@ -9,11 +9,11 @@
import llnl.util.lang
from llnl.util.filesystem import mkdirp
from llnl.util.symlink import symlink
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.paths
import spack.util.file_cache
import spack.util.path
@@ -74,6 +74,23 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def symlink(self, mirror_ref):
"""Symlink a human readible path in our mirror to the actual
storage location."""
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
# In this case the link itself exists but it is broken: remove
# it and recreate it (in order to fix any symlinks broken prior
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
symlink(relative_dst, cosmetic_path)
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (

View File

@@ -1110,8 +1110,7 @@ def main_script_replacements(cmd):
cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
elif cdash_config:
# warn only if there was actually a CDash configuration.
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
service_job_retries = {
@@ -1383,10 +1382,8 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
return True
except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}")
@@ -1436,6 +1433,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_log_dir: path into which build log should be copied
"""
tty.debug(f"job spec: {job_spec}")
if not job_spec:
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
tty.error(msg)
return
try:
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import importlib
import os
import re
import sys
@@ -115,8 +114,8 @@ def get_module(cmd_name):
try:
# Try to import the command from the built-in directory
module_name = f"{__name__}.{pname}"
module = importlib.import_module(module_name)
module_name = "%s.%s" % (__name__, pname)
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)

View File

@@ -37,6 +37,7 @@
from spack import traverse
from spack.cmd import display_specs
from spack.cmd.common import arguments
from spack.oci.image import ImageReference
from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages"
@@ -391,8 +392,13 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
mirror: spack.mirror.Mirror = args.mirror
# Check if this is an OCI image.
try:
target_image = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
target_image = None
push_url = mirror.push_url
@@ -403,11 +409,14 @@ def push_fn(args):
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
if mirror.push_url.startswith("oci://") and not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
if target_image:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
unsigned = True
# Select a signing key, or None if unsigned.
@@ -438,17 +447,49 @@ def push_fn(args):
(s, PackageNotInstalledError("package not installed")) for s in not_installed
)
with bindist.make_uploader(
mirror=mirror,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
base_image=args.base_image,
) as uploader:
skipped, upload_errors = uploader.push(specs=specs)
failed.extend(upload_errors)
if not upload_errors and args.tag:
uploader.tag(args.tag, roots)
with bindist.default_push_context() as (tmpdir, executor):
if target_image:
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
skipped, base_images, checksums, upload_errors = bindist._push_oci(
target_image=target_image,
base_image=base_image,
installed_specs_with_deps=specs,
force=args.force,
tmpdir=tmpdir,
executor=executor,
)
if upload_errors:
failed.extend(upload_errors)
# Apart from creating manifests for each individual spec, we allow users to create a
# separate image tag for all root specs and their runtime dependencies.
elif args.tag:
tagged_image = target_image.with_tag(args.tag)
# _push_oci may not populate base_images if binaries were already in the registry
for spec in roots:
bindist._oci_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
bindist._oci_put_manifest(
base_images, checksums, tagged_image, tmpdir, None, None, *roots
)
tty.info(f"Tagged {tagged_image}")
else:
skipped, upload_errors = bindist._push(
specs,
out_url=push_url,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
tmpdir=tmpdir,
executor=executor,
)
failed.extend(upload_errors)
if skipped:
if len(specs) == 1:
@@ -460,7 +501,7 @@ def push_fn(args):
"The following {} specs were skipped as they already exist in the buildcache:\n"
" {}\n"
" Use --force to overwrite them.".format(
len(skipped), ", ".join(elide_list([_format_spec(s) for s in skipped], 5))
len(skipped), ", ".join(elide_list(skipped, 5))
)
)
@@ -481,6 +522,13 @@ def push_fn(args):
),
)
# Update the OCI index if requested
if target_image and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
bindist._oci_update_index(target_image, tmpdir, executor)
def install_fn(args):
"""install from a binary package"""
@@ -768,7 +816,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist._url_generate_package_index(url, tmpdir)
bindist.generate_package_index(url, tmpdir)
if update_keys:
keys_url = url_util.join(

View File

@@ -50,7 +50,6 @@ def setup_parser(subparser):
default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
arguments.add_common_arguments(find_parser, ["jobs"])
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
@@ -79,21 +78,25 @@ def setup_parser(subparser):
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
# None signals spack.compiler.find_compilers to use its default logic
paths = args.add_paths or None
new_compilers = spack.compilers.find_compilers(
path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
n = len(new_compilers)
s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4)
config = spack.config.CONFIG
filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
else:
tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:")

View File

@@ -6,7 +6,6 @@
import re
import sys
import urllib.parse
from typing import List
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -15,15 +14,9 @@
import spack.stage
import spack.util.web
from spack.spec import Spec
from spack.url import (
UndetectableNameError,
UndetectableVersionError,
find_versions_of_archive,
parse_name,
parse_version,
)
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
from spack.util.editor import editor
from spack.util.executable import which
from spack.util.executable import ProcessError, which
from spack.util.format import get_version_lines
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
@@ -96,20 +89,14 @@ class BundlePackageTemplate:
url_def = " # There is no URL since there is no code to download."
body_def = " # There is no need for install() since there is no code."
def __init__(self, name: str, versions, languages: List[str]):
def __init__(self, name, versions):
self.name = name
self.class_name = mod_to_class(name)
self.versions = versions
self.languages = languages
def write(self, pkg_path):
"""Writes the new package file."""
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
if all_deps and self.dependencies:
all_deps.append("")
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
@@ -119,7 +106,7 @@ def write(self, pkg_path):
base_class_name=self.base_class_name,
url_def=self.url_def,
versions=self.versions,
dependencies="\n".join(all_deps),
dependencies=self.dependencies,
body_def=self.body_def,
)
)
@@ -138,8 +125,8 @@ def install(self, spec, prefix):
url_line = ' url = "{url}"'
def __init__(self, name, url, versions, languages: List[str]):
super().__init__(name, versions, languages)
def __init__(self, name, url, versions):
super().__init__(name, versions)
self.url_def = self.url_line.format(url=url)
@@ -227,13 +214,13 @@ def luarocks_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
if not name.startswith("lua-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to lua-{0}".format(name))
name = "lua-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class MesonPackageTemplate(PackageTemplate):
@@ -334,14 +321,14 @@ class RacketPackageTemplate(PackageTemplate):
# subdirectory = None
"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
if not name.startswith("rkt-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
name = "rkt-{0}".format(name)
self.body_def = self.body_def.format(name[4:])
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
@@ -374,7 +361,7 @@ def config_settings(self, spec, prefix):
settings = {}
return settings"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
@@ -428,7 +415,7 @@ def __init__(self, name, url, versions, languages: List[str]):
+ self.url_line
)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class RPackageTemplate(PackageTemplate):
@@ -447,7 +434,7 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
@@ -467,7 +454,7 @@ def __init__(self, name, url, versions, languages: List[str]):
if bioc:
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PerlmakePackageTemplate(PackageTemplate):
@@ -487,14 +474,14 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
@@ -519,7 +506,7 @@ class OctavePackageTemplate(PackageTemplate):
# FIXME: Add additional dependencies if required.
# depends_on("octave-foo", type=("build", "run"))"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
@@ -527,7 +514,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to octave-{0}".format(name))
name = "octave-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class RubyPackageTemplate(PackageTemplate):
@@ -547,7 +534,7 @@ def build(self, spec, prefix):
# FIXME: If not needed delete this function
pass"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name ruby-numpy`, don't rename it
# ruby-ruby-numpy
if not name.startswith("ruby-"):
@@ -555,7 +542,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
name = "ruby-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class MakefilePackageTemplate(PackageTemplate):
@@ -593,14 +580,14 @@ def configure_args(self, spec, prefix):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
templates = {
@@ -671,48 +658,8 @@ def setup_parser(subparser):
)
#: C file extensions
C_EXT = {".c"}
#: C++ file extensions
CXX_EXT = {
".C",
".c++",
".cc",
".ccm",
".cpp",
".CPP",
".cxx",
".h++",
".hh",
".hpp",
".hxx",
".inl",
".ipp",
".ixx",
".tcc",
".tpp",
}
#: Fortran file extensions
FORTRAN_EXT = {
".f77",
".F77",
".f90",
".F90",
".f95",
".F95",
".f",
".F",
".for",
".FOR",
".ftn",
".FTN",
}
class BuildSystemAndLanguageGuesser:
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
class BuildSystemGuesser:
"""An instance of BuildSystemGuesser provides a callable object to be used
during ``spack create``. By passing this object to ``spack checksum``, we
can take a peek at the fetched tarball and discern the build system it uses
"""
@@ -720,119 +667,81 @@ class BuildSystemAndLanguageGuesser:
def __init__(self):
"""Sets the default build system."""
self.build_system = "generic"
self._c = False
self._cxx = False
self._fortran = False
# List of files in the archive ordered by their depth in the directory tree.
self._file_entries: List[str] = []
def __call__(self, archive: str, url: str) -> None:
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
if url is not None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
return
if url.endswith(".gem"):
self.build_system = "ruby"
return
if url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
return
if url.endswith(".rock"):
self.build_system = "lua"
return
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
]
# Peek inside the compressed file.
if archive.endswith(".zip") or ".zip#" in archive:
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
try:
unzip = which("unzip")
assert unzip is not None
output = unzip("-lq", archive, output=str)
except Exception:
output = unzip("-lq", stage.archive_file, output=str)
except ProcessError:
output = ""
else:
try:
tar = which("tar")
assert tar is not None
output = tar("tf", archive, output=str)
except Exception:
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
output = ""
self._file_entries[:] = output.splitlines()
lines = output.splitlines()
# Files closest to the root should be considered first when determining build system.
self._file_entries.sort(key=lambda p: p.count("/"))
self._determine_build_system(url)
self._determine_language()
def _determine_build_system(self, url: str) -> None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
elif url.endswith(".gem"):
self.build_system = "ruby"
elif url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
elif url.endswith(".rock"):
self.build_system = "lua"
elif self._file_entries:
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(re.compile(pattern), build_system)
for pattern, build_system in (
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
)
]
# Determine the build system based on the files contained in the archive.
for file in self._file_entries:
for pattern, build_system in clues:
if pattern.search(file):
self.build_system = build_system
return
def _determine_language(self):
for entry in self._file_entries:
_, ext = os.path.splitext(entry)
if not self._c and ext in C_EXT:
self._c = True
elif not self._cxx and ext in CXX_EXT:
self._cxx = True
elif not self._fortran and ext in FORTRAN_EXT:
self._fortran = True
if self._c and self._cxx and self._fortran:
return
@property
def languages(self) -> List[str]:
langs: List[str] = []
if self._c:
langs.append("c")
if self._cxx:
langs.append("cxx")
if self._fortran:
langs.append("fortran")
return langs
# Determine the build system based on the files contained
# in the archive.
for pattern, bs in clues:
if any(re.search(pattern, line) for line in lines):
self.build_system = bs
break
def get_name(name, url):
@@ -902,7 +811,7 @@ def get_url(url):
def get_versions(args, name):
"""Returns a list of versions and hashes for a package.
Also returns a BuildSystemAndLanguageGuesser object.
Also returns a BuildSystemGuesser object.
Returns default values if no URL is provided.
@@ -911,7 +820,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@@ -925,7 +834,7 @@ def get_versions(args, name):
# version("1.2.4")"""
# Default guesser
guesser = BuildSystemAndLanguageGuesser()
guesser = BuildSystemGuesser()
valid_url = True
try:
@@ -938,7 +847,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = find_versions_of_archive(args.url)
url_dict = spack.url.find_versions_of_archive(args.url)
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
if url_dict_filtered is None:
@@ -965,7 +874,7 @@ def get_versions(args, name):
return versions, guesser
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
def get_build_system(template, url, guesser):
"""Determine the build system template.
If a template is specified, always use that. Otherwise, if a URL
@@ -973,10 +882,11 @@ def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGue
build system it uses. Otherwise, use a generic template by default.
Args:
template: ``--template`` argument given to ``spack create``
url: ``url`` argument given to ``spack create``
guesser: The first_stage_function given to ``spack checksum`` which records the build
system it detects
template (str): ``--template`` argument given to ``spack create``
url (str): ``url`` argument given to ``spack create``
args (argparse.Namespace): The arguments given to ``spack create``
guesser (BuildSystemGuesser): The first_stage_function given to
``spack checksum`` which records the build system it detects
Returns:
str: The name of the build system template to use
@@ -1050,7 +960,7 @@ def create(parser, args):
build_system = get_build_system(args.template, url, guesser)
# Create the package template object
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
constr_args = {"name": name, "versions": versions}
package_class = templates[build_system]
if package_class != BundlePackageTemplate:
constr_args["url"] = url

View File

@@ -10,10 +10,8 @@
import spack.cmd
import spack.config
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.stage
import spack.util.path
import spack.version
from spack.cmd.common import arguments
@@ -64,7 +62,7 @@ def change_fn(section):
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
def _retrieve_develop_source(spec, abspath):
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
# better if we can create the `source_path` directly into its final
@@ -73,13 +71,13 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
source_stage: spack.stage.Stage = package.stage[0]
source_stage = package.stage[0]
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
source_stage.fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
source_stage.default_fetcher_only = True
source_stage.disable_mirrors()
source_stage.fetcher.set_package(package)
package.stage.steal_source(abspath)

View File

@@ -468,30 +468,32 @@ def env_remove(args):
This removes an environment managed by Spack. Directory environments
and manifests embedded in repositories should be removed manually.
"""
remove_envs = []
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
try:
env = ev.read(env_name)
valid_envs.append(env)
valid_envs.append(env_name)
if env_name in args.rm_env:
remove_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
if name == env_name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
@@ -504,7 +506,7 @@ def env_remove(args):
if not answer:
tty.die("Will not remove any environments")
for env in remove_envs:
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")

View File

@@ -224,7 +224,7 @@ def gpg_publish(args):
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(
spack.binary_distribution.push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
)

View File

@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
def info(parser, args):
spec = spack.spec.Spec(args.package)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
pkg = pkg_cls(spec)
# Output core package information

View File

@@ -23,6 +23,11 @@ def setup_parser(subparser):
output.add_argument(
"-s", "--safe", action="store_true", help="only list safe versions of the package"
)
output.add_argument(
"--safe-only",
action="store_true",
help="[deprecated] only list safe versions of the package",
)
output.add_argument(
"-r", "--remote", action="store_true", help="only list remote versions of the package"
)
@@ -42,13 +47,17 @@ def versions(parser, args):
safe_versions = pkg.versions
if args.safe_only:
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
args.safe = args.safe_only
if not (args.remote or args.new):
if sys.stdout.isatty():
tty.msg("Safe versions (already checksummed):")
if not safe_versions:
if sys.stdout.isatty():
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug("Manually add versions to the package.")
else:
colify(sorted(safe_versions, reverse=True), indent=2)
@@ -74,12 +83,12 @@ def versions(parser, args):
if not remote_versions:
if sys.stdout.isatty():
if not fetched_versions:
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug(
"Check the list_url and list_depth attributes of "
"the package to help Spack find versions."
)
else:
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@@ -29,9 +29,6 @@
__all__ = ["Compiler"]
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
@@ -703,30 +700,6 @@ def compiler_environment(self):
os.environ.clear()
os.environ.update(backup_env)
def to_dict(self):
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
flags_dict.update(
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
)
result = {
"spec": str(self.spec),
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
"flags": flags_dict,
"operating_system": str(self.operating_system),
"target": str(self.target),
"modules": self.modules or [],
"environment": self.environment or {},
"extra_rpaths": self.extra_rpaths or [],
}
if self.enable_implicit_rpaths is not None:
result["implicit_rpaths"] = self.enable_implicit_rpaths
if self.alias:
result["alias"] = self.alias
return result
class CompilerAccessError(spack.error.SpackError):
def __init__(self, compiler, paths):

View File

@@ -6,11 +6,12 @@
"""This module contains functions related to finding compilers on the
system and configuring Spack to use multiple compilers.
"""
import importlib
import collections
import itertools
import multiprocessing.pool
import os
import sys
import warnings
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -21,15 +22,16 @@
import spack.compiler
import spack.config
import spack.error
import spack.operating_systems
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
import spack.version
from spack.operating_systems import windows_os
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
_path_instance_vars = ["cc", "cxx", "f77", "fc"]
_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
_other_instance_vars = [
"modules",
"operating_system",
@@ -61,10 +63,6 @@
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def pkg_spec_for_compiler(cspec):
"""Return the spec of the package that provides the compiler."""
for spec, package in _compiler_to_pkg.items():
@@ -87,7 +85,29 @@ def converter(cspec_like, *args, **kwargs):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
return {"compiler": compiler.to_dict()}
d = {}
d["spec"] = str(compiler.spec)
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
d["flags"].update(
dict(
(attr, getattr(compiler, attr, None))
for attr in _flags_instance_vars
if hasattr(compiler, attr)
)
)
d["operating_system"] = str(compiler.operating_system)
d["target"] = str(compiler.target)
d["modules"] = compiler.modules or []
d["environment"] = compiler.environment or {}
d["extra_rpaths"] = compiler.extra_rpaths or []
if compiler.enable_implicit_rpaths is not None:
d["implicit_rpaths"] = compiler.enable_implicit_rpaths
if compiler.alias:
d["alias"] = compiler.alias
return {"compiler": d}
def get_compiler_config(
@@ -107,7 +127,7 @@ def get_compiler_config(
# Do not init config because there is a non-empty scope
return config
find_compilers(scope=scope)
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
return config
@@ -116,8 +136,125 @@ def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
"""Return the compiler configuration from packages.yaml"""
packages_yaml = configuration.get("packages", scope=scope)
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
config = configuration.get("packages", scope=scope)
if not config:
return []
packages = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in config.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
packages.extend(_compiler_config_from_package_config(externals_config))
return packages
def _compiler_config_from_package_config(config):
compilers = []
for entry in config:
compiler = _compiler_config_from_external(entry)
if compiler:
compilers.append(compiler)
return compilers
def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"])
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
err_header = f"The external spec '{spec}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if extra_attributes_key not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": config.get("modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
@@ -141,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -190,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -239,77 +380,79 @@ def all_compiler_specs(scope=None, init_config=True):
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
mixed_toolchain: bool = False,
max_workers: Optional[int] = None,
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
) -> List["spack.compiler.Compiler"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
"""Return the list of compilers found in the paths given as arguments.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
max_workers: number of processes used to search for compilers
"""
import spack.detection
known_compilers = set(all_compilers(init_config=False))
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
def remove_errors(
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
) -> DetectVersionArgs:
value, _ = item
assert value is not None
return value
return make_compiler_list(
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
mixed_toolchain=mixed_toolchain,
)
valid_compilers = {}
for name, detected in detected_packages.items():
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x.spec)]
if not compilers:
continue
valid_compilers[name] = compilers
def _has_fortran_compilers(x):
if "compilers" not in x.spec.extra_attributes:
return False
def find_new_compilers(
path_hints: Optional[List[str]] = None,
scope: Optional[str] = None,
*,
mixed_toolchain: bool = False,
):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
return "fortran" in x.spec.extra_attributes["compilers"]
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: scope to look for a compiler. If None consider the merged configuration.
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
"""
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
if mixed_toolchain:
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
if gccs:
best_gcc = sorted(
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x.spec).version
)[-1]
gfortran = best_gcc.spec.extra_attributes["compilers"]["fortran"]
for name in ("llvm", "apple-clang"):
if name not in valid_compilers:
continue
candidates = valid_compilers[name]
for candidate in candidates:
if _has_fortran_compilers(candidate):
continue
candidate.spec.extra_attributes["compilers"]["fortran"] = gfortran
new_compilers = []
for name, detected in valid_compilers.items():
for config in CompilerConfigFactory.from_specs([x.spec for x in detected]):
c = _compiler_from_config_entry(config["compiler"])
if c in known_compilers:
continue
new_compilers.append(c)
add_compilers_to_config(new_compilers, scope=scope)
return new_compilers
return select_new_compilers(compilers, scope)
def select_new_compilers(compilers, scope=None):
@@ -319,9 +462,7 @@ def select_new_compilers(compilers, scope=None):
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
same_specs = compilers_for_spec(
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
)
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
if not same_specs:
compilers_not_in_config.append(c)
@@ -369,9 +510,8 @@ def replace_apple_clang(name):
return [replace_apple_clang(name) for name in all_compiler_module_names()]
@llnl.util.lang.memoized
def all_compiler_module_names() -> List[str]:
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
@_auto_compiler_spec
@@ -391,12 +531,7 @@ def find(compiler_spec, scope=None, init_config=True):
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
compiler spec. Return an empty list if nothing found."""
return [
c.spec
for c in compilers_for_spec(
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
)
]
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
def all_compilers(scope=None, init_config=True):
@@ -418,11 +553,14 @@ def all_compilers_from(configuration, scope=None, init_config=True):
@_auto_compiler_spec
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
def compilers_for_spec(
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
for cspec in matches:
@@ -431,7 +569,7 @@ def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
config = all_compilers_config(spack.config.CONFIG, scope=scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -463,15 +601,13 @@ def compiler_from_dict(items):
os = items.get("operating_system", None)
target = items.get("target", None)
if not (
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
):
if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in spack.compiler.PATH_INSTANCE_VARS:
for c in _path_instance_vars:
compiler_path = items["paths"][c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
@@ -599,6 +735,24 @@ def compiler_for_spec(compiler_spec, arch_spec):
return compilers[0]
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.CONFIG
scope_to_compilers = {}
for scope in config.scopes:
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
if compilers:
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = {}
for scope, compilers in scope_to_compilers.items():
config_file = config.get_config_filename(scope, "compilers")
cfg_file_to_duplicates[config_file] = compilers
return cfg_file_to_duplicates
@llnl.util.lang.memoized
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
@@ -612,7 +766,7 @@ def class_for_compiler_name(compiler_name):
submodule_name = compiler_name.replace("-", "_")
module_name = ".".join(["spack", "compilers", submodule_name])
module_obj = importlib.import_module(module_name)
module_obj = __import__(module_name, fromlist=[None])
cls = getattr(module_obj, mod_to_class(compiler_name))
# make a note of the name in the module so we can get to it easily.
@@ -621,10 +775,272 @@ def class_for_compiler_name(compiler_name):
return cls
def all_os_classes():
"""
Return the list of classes for all operating systems available on
this platform
"""
classes = []
platform = spack.platforms.host()
for os_class in platform.operating_sys.values():
classes.append(os_class)
return classes
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
#: Gathers the attribute values by which a detected compiler is considered
#: unique in Spack.
#:
#: - os: the operating system
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
#: - version: the version of the compiler
#:
CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
#: Variations on a matched compiler name
NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
#: Groups together the arguments needed by `detect_version`. The four entries
#: in the tuple are:
#:
#: - id: An instance of the CompilerID named tuple (version can be set to None
#: as it will be detected later)
#: - variation: a NameVariation for file being tested
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
#: - path: full path to the executable being tested
#:
DetectVersionArgs = collections.namedtuple(
"DetectVersionArgs", ["id", "variation", "language", "path"]
)
def arguments_to_detect_version_fn(
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
) -> List[DetectVersionArgs]:
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
The ``operating_system`` instance can customize the behavior of this
function by providing a method called with the same name.
Args:
operating_system: the operating system on which we are looking for compilers
paths: paths to search for compilers
Returns:
List of DetectVersionArgs tuples. Each item in the list will be later
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
command_arguments: List[DetectVersionArgs] = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
compiler_id = CompilerID(operating_system, compiler_name, None)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
language=language,
path=full_path,
)
command_arguments.append(detect_version_args)
return command_arguments
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
return fn(paths)
def detect_version(
detect_version_args: DetectVersionArgs,
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
"""Computes the version of a compiler and adds it to the information
passed as input.
As this function is meant to be executed by worker processes it won't
raise any exception but instead will return a (value, error) tuple that
needs to be checked by the code dispatching the calls.
Args:
detect_version_args: information on the compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
version of the compiler was computed correctly and the first argument
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, f"{language}_version")
try:
version = callback(path)
if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = f"Couldn't get version for compiler {path}".format(path)
except spack.util.executable.ProcessError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except spack.util.executable.ProcessTimeoutError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, str(e)
)
return None, error
operating_system = detect_version_args.id.os
fn = getattr(operating_system, "detect_version", _default)
return fn(detect_version_args)
def make_compiler_list(
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
) -> List["spack.compiler.Compiler"]:
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions: list of DetectVersionArgs containing a valid version
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
Returns:
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
# https://github.com/spack/spack/pull/33385/files#r1040036318
target = archspec.cpu.host()
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
"cc" not in by_compiler_id[variation], # None last
"cxx" not in by_compiler_id[variation], # None last
getattr(variation, "prefix", None),
getattr(variation, "suffix", None),
)
# Flatten to a list of compiler id, primary variation and compiler dictionary
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# Fill any missing parts from subsequent entries (without mixing toolchains)
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
)
if next_lang:
selected[lang] = next_lang
flat_compilers.append((compiler_id, selected_variation, selected))
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
if mixed_toolchain:
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
return compilers
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
"""Add missing compilers across toolchains when they are missing for a particular language.
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
fortran compiler (no flang)."""
# First collect the clangs that are missing a fortran compiler
clangs_without_flang = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name in ("clang", "apple-clang")
and "f77" not in compiler
and "fc" not in compiler
]
if not clangs_without_flang:
return
# Filter on GCCs with fortran compiler
gccs_with_fortran = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
]
# Sort these GCCs by "best variation" (no prefix / suffix first)
gccs_with_fortran.sort(
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
)
# Attach the optimal GCC fortran compiler to the clangs that don't have one
for clang_id, _, clang_compiler in clangs_without_flang:
gcc_compiler = next(
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
)
if not gcc_compiler:
continue
# Update the fc / f77 entries
clang_compiler["f77"] = gcc_compiler["f77"]
clang_compiler["fc"] = gcc_compiler["fc"]
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.
@@ -671,164 +1087,20 @@ def name_matches(name, name_list):
return False
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
class CompilerConfigFactory:
"""Class aggregating all ways of constructing a list of compiler config entries."""
@staticmethod
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
result = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for s in specs:
if s.name not in compiler_package_names:
continue
candidate = CompilerConfigFactory.from_external_spec(s)
if candidate is None:
continue
result.append(candidate)
return result
@staticmethod
def from_packages_yaml(packages_yaml) -> List[dict]:
compiler_specs = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
current_specs = []
for current_external in externals_config:
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
if compiler:
current_specs.append(compiler)
compiler_specs.extend(current_specs)
return CompilerConfigFactory.from_specs(compiler_specs)
@staticmethod
def _spec_from_external_config(config):
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
return result
@staticmethod
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
spec = spack.spec.parse_with_version_concrete(spec)
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
if extra_attributes is None:
return None
paths = CompilerConfigFactory._extract_compiler_paths(spec)
if paths is None:
return None
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": getattr(spec, "external_modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
@staticmethod
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
err_header = f"The external spec '{spec}' cannot be used as a compiler"
extra_attributes = spec.extra_attributes
# If I have 'extra_attributes' warn if 'compilers' is missing,
# or we don't have a C compiler
if _COMPILERS_KEY not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
)
return None
attribute_compilers = extra_attributes[_COMPILERS_KEY]
if _C_KEY not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
)
return None
c_compiler = attribute_compilers[_C_KEY]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if _CXX_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if _FORTRAN_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
return {
"cc": c_compiler,
"cxx": attribute_compilers.get(_CXX_KEY, None),
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
}
@staticmethod
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return operating_system, target
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__(
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
f"Compiler configuration must contain entries for "
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
% _path_instance_vars,
)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super().__init__("Spack could not find any compilers!")
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
@@ -839,3 +1111,25 @@ def __init__(self, compiler_spec, target):
super().__init__(
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
)
class CompilerDuplicateError(spack.error.SpackError):
def __init__(self, compiler_spec, arch_spec):
config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
descriptor = lambda num: "time" if num == 1 else "times"
duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
cfgfile, str(count), descriptor(count)
)
msg = (
"Compiler configuration contains entries with duplicate"
+ " specification ({0}, {1})".format(compiler_spec, arch_spec)
+ " in the following files:\n\t"
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
)
super().__init__(msg)
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)

View File

@@ -223,30 +223,6 @@ def get_oneapi_root(pth: str):
)
self.msvc_compiler_environment = CmdCall(*env_cmds)
@property
def cxx11_flag(self):
return "/std:c++11"
@property
def cxx14_flag(self):
return "/std:c++14"
@property
def cxx17_flag(self):
return "/std:c++17"
@property
def cxx20_flag(self):
return "/std:c++20"
@property
def c11_flag(self):
return "/std:c11"
@property
def c17_flag(self):
return "/std:c17"
@property
def msvc_version(self):
"""This is the VCToolset version *NOT* the actual version of the cl compiler

View File

@@ -8,6 +8,7 @@
from contextlib import contextmanager
from itertools import chain
import spack.abi
import spack.compilers
import spack.config
import spack.environment

View File

@@ -1090,7 +1090,7 @@ def validate(
def read_config_file(
path: str, schema: Optional[YamlConfigDict] = None
filename: str, schema: Optional[YamlConfigDict] = None
) -> Optional[YamlConfigDict]:
"""Read a YAML configuration file.
@@ -1100,9 +1100,21 @@ def read_config_file(
# to preserve flexibility in calling convention (don't need to provide
# schema when it's not necessary) while allowing us to validate against a
# known schema when the top-level key could be incorrect.
if not os.path.exists(filename):
# Ignore nonexistent files.
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
return None
elif not os.path.isfile(filename):
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
elif not os.access(filename, os.R_OK):
raise ConfigFileError(f"Config file is not readable: {filename}")
try:
with open(path) as f:
tty.debug(f"Reading config from file {path}")
tty.debug(f"Reading config from file {filename}")
with open(filename) as f:
data = syaml.load_config(f)
if data:
@@ -1113,20 +1125,15 @@ def read_config_file(
return data
except FileNotFoundError:
# Ignore nonexistent files.
tty.debug(f"Skipping nonexistent config path {path}", level=3)
return None
except OSError as e:
raise ConfigFileError(f"Path is not a file or is not readable: {path}: {str(e)}") from e
except StopIteration as e:
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {path}") from e
except StopIteration:
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {filename}")
except syaml.SpackYAMLError as e:
raise ConfigFileError(str(e)) from e
except OSError as e:
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
def _override(string: str) -> bool:
"""Test if a spack YAML string is an override.

View File

@@ -45,9 +45,7 @@ def __reduce__(self):
def restore(
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
) -> "DetectedPackage":
spec = spack.spec.Spec.from_detection(
spec_str=spec_str, external_path=prefix, extra_attributes=extra_attributes
)
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
return DetectedPackage(spec=spec, prefix=prefix)
@@ -241,7 +239,7 @@ def update_configuration(
external_entries = pkg_config.get("externals", [])
assert not isinstance(external_entries, bool), "unexpected value for external entry"
all_new_specs.extend([x.spec for x in new_entries])
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
if buildable is False:
pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config

View File

@@ -62,7 +62,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
def file_identifier(path):
s = os.stat(path)
return s.st_dev, s.st_ino
return (s.st_dev, s.st_ino)
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
@@ -80,8 +80,6 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
constructed based on the PATH environment variable.
"""
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
# Make use we don't doubly list /usr/lib and /lib etc
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
return path_to_dict(search_paths)

View File

@@ -104,9 +104,7 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
@property
def expected_specs(self) -> List[spack.spec.Spec]:
return [
spack.spec.Spec.from_detection(
item.spec, external_path=self.tmpdir.name, extra_attributes=item.extra_attributes
)
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
for item in self.test.results
]

View File

@@ -32,9 +32,10 @@ class OpenMpi(Package):
"""
import collections
import collections.abc
import functools
import os.path
import re
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
import llnl.util.lang
import llnl.util.tty.color
@@ -47,7 +48,6 @@ class OpenMpi(Package):
import spack.util.crypto
import spack.variant
from spack.dependency import Dependency
from spack.directives_meta import DirectiveError, DirectiveMeta
from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import (
@@ -80,6 +80,22 @@ class OpenMpi(Package):
"redistribute",
]
#: These are variant names used by Spack internally; packages can't use them
reserved_names = [
"arch",
"architecture",
"dev_path",
"namespace",
"operating_system",
"os",
"patches",
"platform",
"target",
]
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
_patch_order_index = 0
@@ -139,6 +155,219 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
return spack.spec.Spec(value)
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
directive = DirectiveMeta.directive
@@ -617,7 +846,7 @@ def format_error(msg, pkg):
msg += " @*r{{[{0}, variant '{1}']}}"
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
if name in spack.variant.reserved_names:
if name in reserved_names:
def _raise_reserved_name(pkg):
msg = "The name '%s' is reserved by Spack" % name
@@ -881,6 +1110,10 @@ def _execute_languages(pkg: "spack.package_base.PackageBase"):
return _execute_languages
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
class DependencyError(DirectiveError):
"""This is raised when a dependency specification is invalid."""

View File

@@ -1,234 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import functools
from typing import List, Set
import llnl.util.lang
import spack.error
import spack.spec
#: Names of possible directives. This list is mostly populated using the @directive decorator.
#: Some directives leverage others and in that case are not automatically added.
directive_names = ["build_system"]
class DirectiveMeta(type):
"""Flushes the directives that were temporarily stored in the staging
area into the package.
"""
# Set of all known directives
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
# to be executed. Here we go reversed because we want to execute
# commands:
# 1. in the order they were defined
# 2. following the MRO
attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
attr_dict["_directives_to_be_executed"] = [
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
attr_dict["_directives_to_be_executed"].extend(
DirectiveMeta._directives_to_be_executed
)
DirectiveMeta._directives_to_be_executed = []
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated with the directives.
# All dictionaries are defaultdicts that create lists for missing keys.
for d in DirectiveMeta._directive_dict_names:
setattr(cls, d, {})
# Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
# Ignore any directives executed *within* top-level
# directives by clearing out the queue they're appended to
DirectiveMeta._directives_to_be_executed = []
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
@staticmethod
def push_to_context(when_spec):
"""Add a spec to the context constraints."""
DirectiveMeta._when_constraints_from_context.append(when_spec)
@staticmethod
def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
defined, e.g. to add version or dependency information. Directives
are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
.. code-block:: python
@directive(dicts='versions')
version(pkg, ...):
...
This directive allows you write:
.. code-block:: python
class Foo(Package):
version(...)
The ``@directive`` decorator handles a couple things for you:
1. Adds the class scope (pkg) as an initial parameter when
called, like a class method would. This allows you to modify
a package from within a directive, while the package is still
being defined.
2. It automatically adds a dictionary called "versions" to the
package so that you can refer to pkg.versions.
The ``(dicts='versions')`` part ensures that ALL packages in Spack
will have a ``versions`` attribute after they're constructed, and
that if no directive actually modified it, it will just be an
empty dict.
This is just a modular way to add storage attributes to the
Package class, and it's how Spack gets information from the
packages to the core.
"""
global directive_names
if isinstance(dicts, str):
dicts = (dicts,)
if not isinstance(dicts, collections.abc.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
raise TypeError(message.format(type(dicts)))
# Add the dictionary names if not already there
DirectiveMeta._directive_dict_names |= set(dicts)
# This decorator just returns the directive functions
def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
if decorated_function.__name__ == "version":
msg = (
'directive "{0}" cannot be used within a "when"'
' context since it does not support a "when=" '
"argument"
)
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
if kwargs.get("when"):
when_constraints.append(spack.spec.Spec(kwargs["when"]))
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
# lazily. Instead, let the called directive handle them.
# This allows nested directive calls in packages. The
# caller can return the directive if it should be queued.
def remove_directives(arg):
directives = DirectiveMeta._directives_to_be_executed
if isinstance(arg, (list, tuple)):
# Descend into args that are lists or tuples
for a in arg:
remove_directives(a)
else:
# Remove directives args from the exec queue
remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
# Nasty, but it's the best way I can think of to avoid
# side effects if directive results are passed as args
remove_directives(args)
remove_directives(list(kwargs.values()))
# A directive returns either something that is callable on a
# package or a sequence of them
result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, collections.abc.Sequence):
values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
return _wrapper
return _decorator
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""

View File

@@ -1214,6 +1214,7 @@ def scope_name(self):
def include_concrete_envs(self):
"""Copy and save the included envs' specs internally"""
lockfile_meta = None
root_hash_seen = set()
concrete_hash_seen = set()
self.included_concrete_spec_data = {}
@@ -1224,26 +1225,37 @@ def include_concrete_envs(self):
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
env = Environment(env_path)
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
with open(env.lock_path) as f:
lockfile_as_dict = env._read_lockfile(f)
# Lockfile_meta must match each env and use at least format version 5
if lockfile_meta is None:
lockfile_meta = lockfile_as_dict["_meta"]
elif lockfile_meta != lockfile_as_dict["_meta"]:
raise SpackEnvironmentError("All lockfile _meta values must match")
elif lockfile_meta["lockfile-version"] < 5:
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
# Copy unique root specs from env
for root_dict in env._concrete_roots_dict():
self.included_concrete_spec_data[env_path] = {"roots": []}
for root_dict in lockfile_as_dict["roots"]:
if root_dict["hash"] not in root_hash_seen:
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
root_hash_seen.add(root_dict["hash"])
# Copy unique concrete specs from env
for dag_hash, spec_details in env._concrete_specs_dict().items():
if dag_hash not in concrete_hash_seen:
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
{dag_hash: spec_details}
for concrete_spec in lockfile_as_dict["concrete_specs"]:
if concrete_spec not in concrete_hash_seen:
self.included_concrete_spec_data[env_path].update(
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
)
concrete_hash_seen.add(dag_hash)
concrete_hash_seen.add(concrete_spec)
# Copy transitive include data
transitive = env.included_concrete_spec_data
if transitive:
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
if "include_concrete" in lockfile_as_dict.keys():
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
"include_concrete"
]
self._read_lockfile_dict(self._to_lockfile_dict())
self.write()
@@ -1644,7 +1656,7 @@ def _concretize_separately(self, tests=False):
# Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
# Early return if there is nothing to do
if len(args) == 0:
@@ -2161,23 +2173,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
return specs
def _concrete_specs_dict(self):
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {}
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
# Assumes no legacy formats, since this was just created.
spec_dict[ht.dag_hash.name] = s.dag_hash()
concrete_specs[s.dag_hash()] = spec_dict
return concrete_specs
def _concrete_roots_dict(self):
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = self._concrete_specs_dict()
root_specs = self._concrete_roots_dict()
spack_dict = {"version": spack.spack_version}
spack_commit = spack.main.get_spack_commit()
@@ -2198,7 +2203,7 @@ def _to_lockfile_dict(self):
# spack version information
"spack": spack_dict,
# users specs + hashes are the 'roots' of the environment
"roots": root_specs,
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
"concrete_specs": concrete_specs,
}

View File

@@ -54,7 +54,7 @@
import spack.version
import spack.version.git_ref_lookup
from spack.util.compression import decompressor_for
from spack.util.executable import CommandNotFoundError, Executable, which
from spack.util.executable import CommandNotFoundError, which
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
@@ -246,28 +246,33 @@ class URLFetchStrategy(FetchStrategy):
# these are checksum types. The generic 'checksum' is deprecated for
# specific hash names, but we need it for backward compatibility
optional_attrs = [*crypto.hashes.keys(), "checksum"]
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(**kwargs)
self.url = url
# Prefer values in kwargs to the positionals.
self.url = kwargs.get("url", url)
self.mirrors = kwargs.get("mirrors", [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
self.digest: Optional[str] = checksum
self.digest = kwargs.get("checksum", checksum)
for h in self.optional_attrs:
if h in kwargs:
self.digest = kwargs[h]
self.expand_archive: bool = kwargs.get("expand", True)
self.extra_options: dict = kwargs.get("fetch_options", {})
self._curl: Optional[Executable] = None
self.extension: Optional[str] = kwargs.get("extension", None)
self.expand_archive = kwargs.get("expand", True)
self.extra_options = kwargs.get("fetch_options", {})
self._curl = None
self.extension = kwargs.get("extension", None)
if not self.url:
raise ValueError("URLFetchStrategy requires a url for fetching.")
@property
def curl(self) -> Executable:
def curl(self):
if not self._curl:
self._curl = web_util.require_curl()
return self._curl
@@ -343,8 +348,8 @@ def _fetch_urllib(self, url):
if os.path.lexists(save_file):
os.remove(save_file)
with open(save_file, "wb") as f:
shutil.copyfileobj(response, f)
with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(response.headers))
@@ -463,7 +468,7 @@ def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if not self.digest:
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
verify_checksum(self.archive_file, self.digest)
@@ -474,8 +479,8 @@ def reset(self):
"""
if not self.archive_file:
raise NoArchiveFileError(
f"Tried to reset {self.__class__.__name__} before fetching",
f"Failed on reset() for URL{self.url}",
"Tried to reset URLFetchStrategy before fetching",
"Failed on reset() for URL %s" % self.url,
)
# Remove everything but the archive from the stage
@@ -488,10 +493,14 @@ def reset(self):
self.expand()
def __repr__(self):
return f"{self.__class__.__name__}<{self.url}>"
url = self.url if self.url else "no url"
return "%s<%s>" % (self.__class__.__name__, url)
def __str__(self):
return self.url
if self.url:
return self.url
else:
return "[no url]"
@fetcher
@@ -504,7 +513,7 @@ def fetch(self):
# check whether the cache file exists.
if not os.path.isfile(path):
raise NoCacheError(f"No cache of {path}")
raise NoCacheError("No cache of %s" % path)
# remove old symlink if one is there.
filename = self.stage.save_filename
@@ -514,8 +523,8 @@ def fetch(self):
# Symlink to local cached archive.
symlink(path, filename)
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
# download.
# Remove link if checksum fails, or subsequent fetchers
# will assume they don't need to download.
if self.digest:
try:
self.check()
@@ -524,12 +533,12 @@ def fetch(self):
raise
# Notify the user how we fetched.
tty.msg(f"Using cached archive: {path}")
tty.msg("Using cached archive: {0}".format(path))
class OCIRegistryFetchStrategy(URLFetchStrategy):
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
super().__init__(url=url, checksum=checksum, **kwargs)
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(url, checksum, **kwargs)
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
@@ -574,18 +583,18 @@ def __init__(self, **kwargs):
# Set a URL based on the type of fetch strategy.
self.url = kwargs.get(self.url_attr, None)
if not self.url:
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
for attr in self.optional_attrs:
setattr(self, attr, kwargs.get(attr, None))
@_needs_stage
def check(self):
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
@_needs_stage
def expand(self):
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
@_needs_stage
def archive(self, destination, *, exclude: Optional[str] = None):
@@ -605,10 +614,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
)
def __str__(self):
return f"VCS: {self.url}"
return "VCS: %s" % self.url
def __repr__(self):
return f"{self.__class__}<{self.url}>"
return "%s<%s>" % (self.__class__, self.url)
@fetcher
@@ -711,17 +720,11 @@ class GitFetchStrategy(VCSFetchStrategy):
"submodules",
"get_full_repo",
"submodules_delete",
"git_sparse_paths",
]
git_version_re = r"git version (\S+)"
def __init__(self, **kwargs):
self.commit: Optional[str] = None
self.tag: Optional[str] = None
self.branch: Optional[str] = None
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
@@ -732,7 +735,6 @@ def __init__(self, **kwargs):
self.submodules = kwargs.get("submodules", False)
self.submodules_delete = kwargs.get("submodules_delete", False)
self.get_full_repo = kwargs.get("get_full_repo", False)
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
@property
def git_version(self):
@@ -770,71 +772,68 @@ def git(self):
@property
def cachable(self):
return self.cache_enabled and bool(self.commit)
return self.cache_enabled and bool(self.commit or self.tag)
def source_id(self):
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
return self.commit
return self.commit or self.tag
def mirror_id(self):
if self.commit:
repo_ref = self.commit or self.tag or self.branch
if repo_ref:
repo_path = urllib.parse.urlparse(self.url).path
result = os.path.sep.join(["git", repo_path, self.commit])
result = os.path.sep.join(["git", repo_path, repo_ref])
return result
def _repo_info(self):
args = ""
if self.commit:
args = f" at commit {self.commit}"
elif self.tag:
args = f" at tag {self.tag}"
elif self.branch:
args = f" on branch {self.branch}"
return f"{self.url}{args}"
if self.commit:
args = " at commit {0}".format(self.commit)
elif self.tag:
args = " at tag {0}".format(self.tag)
elif self.branch:
args = " on branch {0}".format(self.branch)
return "{0}{1}".format(self.url, args)
@_needs_stage
def fetch(self):
if self.stage.expanded:
tty.debug(f"Already fetched {self.stage.source_path}")
tty.debug("Already fetched {0}".format(self.stage.source_path))
return
if self.git_sparse_paths:
self._sparse_clone_src()
else:
self._clone_src()
self.submodule_operations()
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
def bare_clone(self, dest: str) -> None:
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
"""
Execute a bare clone for metadata only
Clone a repository to a path.
Requires a destination since bare cloning does not provide source
and shouldn't be used for staging.
This method handles cloning from git, but does not require a stage.
Arguments:
dest (str or None): The path into which the code is cloned. If None,
requires a stage and uses the stage's source path.
commit (str or None): A commit to fetch from the remote. Only one of
commit, branch, and tag may be non-None.
branch (str or None): A branch to fetch from the remote.
tag (str or None): A tag to fetch from the remote.
bare (bool): Execute a "bare" git clone (--bare option to git)
"""
# Default to spack source path
tty.debug(f"Cloning git repository: {self._repo_info()}")
dest = dest or self.stage.source_path
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
debug = spack.config.get("config:debug")
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
def _clone_src(self) -> None:
"""Clone a repository to a path using git."""
# Default to spack source path
dest = self.stage.source_path
tty.debug(f"Cloning git repository: {self._repo_info()}")
git = self.git
debug = spack.config.get("config:debug")
if self.commit:
if bare:
# We don't need to worry about which commit/branch/tag is checked out
clone_args = ["clone", "--bare"]
if not debug:
clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
elif commit:
# Need to do a regular clone and check out everything if
# they asked for a particular commit.
clone_args = ["clone", self.url]
@@ -853,7 +852,7 @@ def _clone_src(self) -> None:
)
with working_dir(dest):
checkout_args = ["checkout", self.commit]
checkout_args = ["checkout", commit]
if not debug:
checkout_args.insert(1, "--quiet")
git(*checkout_args)
@@ -865,10 +864,10 @@ def _clone_src(self) -> None:
args.append("--quiet")
# If we want a particular branch ask for it.
if self.branch:
args.extend(["--branch", self.branch])
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", self.tag])
if branch:
args.extend(["--branch", branch])
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
args.extend(["--branch", tag])
# Try to be efficient if we're using a new enough git.
# This checks out only one branch's history
@@ -900,7 +899,7 @@ def _clone_src(self) -> None:
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
if tag and self.git_version < spack.version.Version("1.8.5.2"):
# pull --tags returns a "special" error code of 1 in
# older versions that we have to ignore.
# see: https://github.com/git/git/commit/19d122b
@@ -913,79 +912,6 @@ def _clone_src(self) -> None:
git(*pull_args, ignore_errors=1)
git(*co_args)
def _sparse_clone_src(self, **kwargs):
"""Use git's sparse checkout feature to clone portions of a git repository"""
dest = self.stage.source_path
git = self.git
if self.git_version < spack.version.Version("2.26.0"):
# technically this should be supported for 2.25, but bumping for OS issues
# see https://github.com/spack/spack/issues/45771
# code paths exist where the package is not set. Assure some indentifier for the
# package that was configured for sparse checkout exists in the error message
identifier = str(self.url)
if self.package:
identifier += f" ({self.package.name})"
tty.warn(
(
f"{identifier} is configured for git sparse-checkout "
"but the git version is too old to support sparse cloning. "
"Cloning the full repository instead."
)
)
self._clone_src()
else:
# default to depth=2 to allow for retention of some git properties
depth = kwargs.get("depth", 2)
needs_fetch = self.branch or self.tag
git_ref = self.branch or self.tag or self.commit
assert git_ref
clone_args = ["clone"]
if needs_fetch:
clone_args.extend(["--branch", git_ref])
if self.get_full_repo:
clone_args.append("--no-single-branch")
else:
clone_args.append("--single-branch")
clone_args.extend(
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
)
sparse_args = ["sparse-checkout", "set"]
if callable(self.git_sparse_paths):
sparse_args.extend(self.git_sparse_paths())
else:
sparse_args.extend([p for p in self.git_sparse_paths])
sparse_args.append("--cone")
checkout_args = ["checkout", git_ref]
if not spack.config.get("config:debug"):
clone_args.insert(1, "--quiet")
checkout_args.insert(1, "--quiet")
with temp_cwd():
git(*clone_args)
repo_name = get_single_file(".")
if self.stage:
self.stage.srcdir = repo_name
shutil.move(repo_name, dest)
with working_dir(dest):
git(*sparse_args)
git(*checkout_args)
def submodule_operations(self):
dest = self.stage.source_path
git = self.git
if self.submodules_delete:
with working_dir(dest):
for submodule_to_delete in self.submodules_delete:
@@ -1038,7 +964,7 @@ def protocol_supports_shallow_clone(self):
return not (self.url.startswith("http://") or self.url.startswith("/"))
def __str__(self):
return f"[git] {self._repo_info()}"
return "[git] {0}".format(self._repo_info())
@fetcher
@@ -1362,7 +1288,7 @@ def reset(self):
shutil.move(scrubbed, source_path)
def __str__(self):
return f"[hg] {self.url}"
return "[hg] %s" % self.url
@fetcher
@@ -1371,16 +1297,47 @@ class S3FetchStrategy(URLFetchStrategy):
url_attr = "s3"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("S3FetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("s3://"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from s3:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
return
self._fetch_urllib(self.url)
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(response, f)
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(
RuntimeError(f"Missing archive {self.archive_file} after fetching")
@@ -1393,17 +1350,46 @@ class GCSFetchStrategy(URLFetchStrategy):
url_attr = "gs"
def __init__(self, *args, **kwargs):
try:
super().__init__(*args, **kwargs)
except ValueError:
if not kwargs.get("url"):
raise ValueError("GCSFetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if not self.url.startswith("gs"):
raise spack.error.FetchError(
f"{self.__class__.__name__} can only fetch from gs:// urls."
)
if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}")
tty.debug("Already downloaded {0}".format(self.archive_file))
return
self._fetch_urllib(self.url)
parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path):
try:
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f:
shutil.copyfileobj(response, f)
content_type = web_util.get_header(response.headers, "Content-type")
if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(
@@ -1417,7 +1403,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
as well as after expanding it."""
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
super().__init__(url=url, checksum=archive_sha256)
super().__init__(url, archive_sha256)
self.expanded_sha256 = expanded_sha256
def expand(self):
@@ -1459,14 +1445,14 @@ def stable_target(fetcher):
return False
def from_url(url: str) -> URLFetchStrategy:
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
Currently just gives you a URLFetchStrategy that uses curl.
TODO: make this return appropriate fetch strategies for other
types of URLs.
"""
return URLFetchStrategy(url=url)
return URLFetchStrategy(url)
def from_kwargs(**kwargs):
@@ -1535,12 +1521,10 @@ def _check_version_attributes(fetcher, pkg, version):
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
except spack.package_base.NoURLError:
raise ExtrapolationError(
f"Can't extrapolate a URL for version {version} because "
f"package {pkg.name} defines no URLs"
)
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
raise ExtrapolationError(msg % (version, pkg.name))
def _from_merged_attrs(fetcher, pkg, version):
@@ -1557,11 +1541,8 @@ def _from_merged_attrs(fetcher, pkg, version):
attrs["fetch_options"] = pkg.fetch_options
attrs.update(pkg.versions[version])
if fetcher.url_attr == "git":
pkg_attr_list = ["submodules", "git_sparse_paths"]
for pkg_attr in pkg_attr_list:
if hasattr(pkg, pkg_attr):
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
attrs.setdefault("submodules", pkg.submodules)
return fetcher(**attrs)
@@ -1656,9 +1637,11 @@ def for_package_version(pkg, version=None):
raise InvalidArgsError(pkg, version, **args)
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
def from_url_scheme(url, *args, **kwargs):
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
in the given url."""
url = kwargs.get("url", url)
parsed_url = urllib.parse.urlparse(url, scheme="file")
scheme_mapping = kwargs.get("scheme_mapping") or {
@@ -1675,9 +1658,11 @@ def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
for fetcher in all_strategies:
url_attr = getattr(fetcher, "url_attr", None)
if url_attr and url_attr == scheme:
return fetcher(url=url, **kwargs)
return fetcher(url, *args, **kwargs)
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
raise ValueError(
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
)
def from_list_url(pkg):
@@ -1702,9 +1687,7 @@ def from_list_url(pkg):
)
# construct a fetcher
return URLFetchStrategy(
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
)
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
@@ -1732,10 +1715,10 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
def fetcher(self, target_path, digest, **kwargs):
path = os.path.join(self.root, target_path)
url = url_util.path_to_file_url(path)
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
return CacheURLFetchStrategy(url, digest, **kwargs)
def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)

View File

@@ -20,7 +20,6 @@
systems (e.g. modules, lmod, etc.) or to add other custom
features.
"""
import importlib
from llnl.util.lang import ensure_last, list_modules
@@ -47,7 +46,11 @@ def _populate_hooks(cls):
for name in relative_names:
module_name = __name__ + "." + name
module_obj = importlib.import_module(module_name)
# When importing a module from a package, __import__('A.B', ...)
# returns package A when 'fromlist' is empty. If fromlist is not
# empty it returns the submodule B instead
# See: https://stackoverflow.com/a/2725668/771663
module_obj = __import__(module_name, fromlist=[None])
cls._hooks.append((module_name, module_obj))
@property

View File

@@ -24,6 +24,5 @@ def post_install(spec, explicit):
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True)
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@@ -488,6 +488,7 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
pkg.windows_establish_runtime_linkage()
if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook()
@@ -1610,7 +1611,9 @@ def _add_tasks(self, request: BuildRequest, all_deps):
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
spack.compilers.find_compilers([compiler_search_prefix])
spack.compilers.add_compilers_to_config(
spack.compilers.find_compilers([compiler_search_prefix])
)
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
"""

View File

@@ -21,7 +21,6 @@
from typing import List, Optional, Union
import llnl.url
import llnl.util.symlink
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -31,7 +30,6 @@
import spack.fetch_strategy
import spack.mirror
import spack.oci.image
import spack.repo
import spack.spec
import spack.util.path
import spack.util.spack_json as sjson
@@ -428,74 +426,51 @@ def _determine_extension(fetcher):
return ext
class MirrorLayout:
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
class MirrorReference:
"""A ``MirrorReference`` stores the relative paths where you can store a
package/resource in a mirror directory.
def __init__(self, path: str) -> None:
self.path = path
The appropriate storage location is given by ``storage_path``. The
``cosmetic_path`` property provides a reference that a human could generate
themselves based on reading the details of the package.
A user can iterate over a ``MirrorReference`` object to get all the
possible names that might be used to refer to the resource in a mirror;
this includes names generated by previous naming schemes that are no-longer
reported by ``storage_path`` or ``cosmetic_path``.
"""
def __init__(self, cosmetic_path, global_path=None):
self.global_path = global_path
self.cosmetic_path = cosmetic_path
@property
def storage_path(self):
if self.global_path:
return self.global_path
else:
return self.cosmetic_path
def __iter__(self):
"""Yield all paths including aliases where the resource can be found."""
yield self.path
def make_alias(self, root: str) -> None:
"""Make the entry ``root / self.path`` available under a human readable alias"""
pass
if self.global_path:
yield self.global_path
yield self.cosmetic_path
class DefaultLayout(MirrorLayout):
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
# When we have a digest, it is used as the primary storage location. If not, then we use
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
# a digest, that's why an alias is required and a digest optional.
super().__init__(path=digest_path or alias_path)
self.alias = alias_path
self.digest_path = digest_path
class OCIImageLayout:
"""Follow the OCI Image Layout Specification to archive blobs
def make_alias(self, root: str) -> None:
"""Symlink a human readible path in our mirror to the actual storage location."""
# We already use the human-readable path as the main storage location.
if not self.digest_path:
return
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
alias_dir = os.path.dirname(alias)
relative_dst = os.path.relpath(digest, start=alias_dir)
mkdirp(alias_dir)
tmp = f"{alias}.tmp"
llnl.util.symlink.symlink(relative_dst, tmp)
try:
os.rename(tmp, alias)
except OSError:
# Clean up the temporary if possible
try:
os.unlink(tmp)
except OSError:
pass
raise
def __iter__(self):
if self.digest_path:
yield self.digest_path
yield self.alias
class OCILayout(MirrorLayout):
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
``blobs/<algorithm>/<digest>``"""
Paths are of the form `blobs/<algorithm>/<digest>`
"""
def __init__(self, digest: spack.oci.image.Digest) -> None:
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
def __iter__(self):
yield self.storage_path
def default_mirror_layout(
fetcher: "spack.fetch_strategy.FetchStrategy",
per_package_ref: str,
spec: Optional["spack.spec.Spec"] = None,
) -> MirrorLayout:
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
"""Returns a ``MirrorReference`` object which keeps track of the relative
storage path of the resource associated with the specified ``fetcher``."""
ext = None
@@ -519,7 +494,7 @@ def default_mirror_layout(
if global_ref and ext:
global_ref += ".%s" % ext
return DefaultLayout(per_package_ref, global_ref)
return MirrorReference(per_package_ref, global_ref)
def get_all_versions(specs):

View File

@@ -25,11 +25,14 @@
so package authors should use their judgement.
"""
import functools
import inspect
from contextlib import contextmanager
import spack.directives_meta
from llnl.util.lang import caller_locals
import spack.directives
import spack.error
import spack.spec
from spack.spec import Spec
class MultiMethodMeta(type):
@@ -132,7 +135,7 @@ def __call__(self, package_or_builder_self, *args, **kwargs):
# its superclasses for successive calls. We don't have that
# information within `SpecMultiMethod`, because it is not
# associated with the package class.
for cls in package_or_builder_self.__class__.__mro__[1:]:
for cls in inspect.getmro(package_or_builder_self.__class__)[1:]:
superself = cls.__dict__.get(self.__name__, None)
if isinstance(superself, SpecMultiMethod):
@@ -162,9 +165,9 @@ def __init__(self, condition):
condition (str): condition to be met
"""
if isinstance(condition, bool):
self.spec = spack.spec.Spec() if condition else None
self.spec = Spec() if condition else None
else:
self.spec = spack.spec.Spec(condition)
self.spec = Spec(condition)
def __call__(self, method):
"""This annotation lets packages declare multiple versions of
@@ -226,9 +229,11 @@ def install(self, prefix):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
assert (
MultiMethodMeta._locals is not None
), "cannot use multimethod, missing MultiMethodMeta metaclass?"
# In Python 2, Get the first definition of the method in the
# calling scope by looking at the caller's locals. In Python 3,
# we handle this using MultiMethodMeta.__prepare__.
if MultiMethodMeta._locals is None:
MultiMethodMeta._locals = caller_locals()
# Create a multimethod with this name if there is not one already
original_method = MultiMethodMeta._locals.get(method.__name__)
@@ -261,17 +266,17 @@ def __enter__(self):
and add their constraint to whatever may be already present in the directive
`when=` argument.
"""
spack.directives_meta.DirectiveMeta.push_to_context(str(self.spec))
spack.directives.DirectiveMeta.push_to_context(str(self.spec))
def __exit__(self, exc_type, exc_val, exc_tb):
spack.directives_meta.DirectiveMeta.pop_from_context()
spack.directives.DirectiveMeta.pop_from_context()
@contextmanager
def default_args(**kwargs):
spack.directives_meta.DirectiveMeta.push_default_args(kwargs)
spack.directives.DirectiveMeta.push_default_args(kwargs)
yield
spack.directives_meta.DirectiveMeta.pop_default_args()
spack.directives.DirectiveMeta.pop_default_args()
class MultiMethodError(spack.error.SpackError):

View File

@@ -390,12 +390,15 @@ def make_stage(
) -> spack.stage.Stage:
_urlopen = _urlopen or spack.oci.opener.urlopen
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
url=url, checksum=digest.digest, _urlopen=_urlopen
url, checksum=digest.digest, _urlopen=_urlopen
)
# Use blobs/<alg>/<encoded> as the cache path, which follows
# the OCI Image Layout Specification. What's missing though,
# is the `oci-layout` and `index.json` files, which are
# required by the spec.
return spack.stage.Stage(
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
fetch_strategy,
mirror_paths=spack.mirror.OCIImageLayout(digest),
name=digest.digest,
keep=keep,
)

View File

@@ -15,7 +15,6 @@
import functools
import glob
import hashlib
import importlib
import inspect
import io
import os
@@ -246,7 +245,10 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
if version_str:
objs_by_version[version_str].append(obj)
except Exception as e:
tty.debug(f"Cannot detect the version of '{obj}' [{str(e)}]")
msg = (
"An error occurred when trying to detect " 'the version of "{0}" [{1}]'
)
tty.debug(msg.format(obj, str(e)))
specs = []
for version_str, objs in objs_by_version.items():
@@ -259,23 +261,27 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
if isinstance(variant, str):
variant = (variant, {})
variant_str, extra_attributes = variant
spec_str = f"{cls.name}@{version_str} {variant_str}"
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
# Pop a few reserved keys from extra attributes, since
# they have a different semantics
external_path = extra_attributes.pop("prefix", None)
external_modules = extra_attributes.pop("modules", None)
try:
spec = spack.spec.Spec.from_detection(
spec = spack.spec.Spec(
spec_str,
external_path=external_path,
external_modules=external_modules,
extra_attributes=extra_attributes,
)
except Exception as e:
tty.debug(f'Parsing failed [spec_str="{spec_str}", error={str(e)}]')
msg = 'Parsing failed [spec_str="{0}", error={1}]'
tty.debug(msg.format(spec_str, str(e)))
else:
specs.append(spec)
specs.append(
spack.spec.Spec.from_detection(
spec, extra_attributes=extra_attributes
)
)
return sorted(specs)
@@ -734,7 +740,7 @@ def __init__(self, spec):
raise ValueError(msg.format(self))
# init internal variables
self._stage: Optional[StageComposite] = None
self._stage = None
self._fetcher = None
self._tester: Optional["PackageTest"] = None
@@ -862,7 +868,7 @@ def module(cls):
We use this to add variables to package modules. This makes
install() methods easier to write (e.g., can call configure())
"""
return importlib.import_module(cls.__module__)
return __import__(cls.__module__, fromlist=[cls.__name__])
@classproperty
def namespace(cls):
@@ -878,7 +884,7 @@ def fullname(cls):
def fullnames(cls):
"""Fullnames for this package and any packages from which it inherits."""
fullnames = []
for cls in cls.__mro__:
for cls in inspect.getmro(cls):
namespace = getattr(cls, "namespace", None)
if namespace:
fullnames.append("%s.%s" % (namespace, cls.name))
@@ -1092,10 +1098,9 @@ def _make_resource_stage(self, root_stage, resource):
root=root_stage,
resource=resource,
name=self._resource_stage(resource),
mirror_paths=spack.mirror.default_mirror_layout(
mirror_paths=spack.mirror.mirror_archive_paths(
resource.fetcher, os.path.join(self.name, pretty_resource_name)
),
mirrors=spack.mirror.MirrorCollection(source=True).values(),
path=self.path,
)
@@ -1107,7 +1112,7 @@ def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
format_string = "{name}-{version}"
pretty_name = self.spec.format_path(format_string)
mirror_paths = spack.mirror.default_mirror_layout(
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher, os.path.join(self.name, pretty_name), self.spec
)
# Construct a path where the stage should build..
@@ -1116,7 +1121,6 @@ def _make_root_stage(self, fetcher):
stage = Stage(
fetcher,
mirror_paths=mirror_paths,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
name=stage_name,
path=self.path,
search_fn=self._download_search,
@@ -1173,7 +1177,7 @@ def stage(self):
return self._stage
@stage.setter
def stage(self, stage: StageComposite):
def stage(self, stage):
"""Allow a stage object to be set to override the default."""
self._stage = stage
@@ -1467,7 +1471,6 @@ def do_fetch(self, mirror_only=False):
checksum
and (self.version not in self.versions)
and (not isinstance(self.version, GitVersion))
and ("dev_path" not in self.spec.variants)
):
tty.warn(
"There is no checksum on file to fetch %s safely."

View File

@@ -5,11 +5,10 @@
import stat
import warnings
import spack.config
import spack.error
import spack.repo
import spack.spec
from spack.config import ConfigError
from spack.util.path import canonicalize_path
from spack.version import Version
_lesser_spec_types = {"compiler": spack.spec.CompilerSpec, "version": Version}
@@ -155,6 +154,44 @@ def preferred_variants(cls, pkg_name):
)
def spec_externals(spec):
"""Return a list of external specs (w/external directory path filled in),
one for each known external installation.
"""
# break circular import.
from spack.util.module_cmd import path_from_modules # noqa: F401
def _package(maybe_abstract_spec):
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
return pkg_cls(maybe_abstract_spec)
allpkgs = spack.config.get("packages")
names = set([spec.name])
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
external_specs = []
for name in names:
pkg_config = allpkgs.get(name, {})
pkg_externals = pkg_config.get("externals", [])
for entry in pkg_externals:
spec_str = entry["spec"]
external_path = entry.get("prefix", None)
if external_path:
external_path = canonicalize_path(external_path)
external_modules = entry.get("modules", None)
external_spec = spack.spec.Spec.from_detection(
spack.spec.Spec(
spec_str, external_path=external_path, external_modules=external_modules
),
extra_attributes=entry.get("extra_attributes", {}),
)
if external_spec.intersects(spec):
external_specs.append(external_spec)
# Defensively copy returned specs
return [s.copy() for s in external_specs]
def is_spec_buildable(spec):
"""Return true if the spec is configured as buildable"""
allpkgs = spack.config.get("packages")

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import hashlib
import inspect
import os
import os.path
import pathlib
@@ -184,8 +185,8 @@ def __init__(
# search mro to look for the file
abs_path: Optional[str] = None
# At different times we call FilePatch on instances and classes
pkg_cls = pkg if isinstance(pkg, type) else pkg.__class__
for cls in pkg_cls.__mro__: # type: ignore
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
for cls in inspect.getmro(pkg_cls): # type: ignore
if not hasattr(cls, "module"):
# We've gone too far up the MRO
break
@@ -318,19 +319,18 @@ def stage(self) -> "spack.stage.Stage":
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
)
else:
fetcher = fs.URLFetchStrategy(url=self.url, sha256=self.sha256, expand=False)
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
self._stage = spack.stage.Stage(
fetcher,
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
mirror_paths=mirror_ref,
mirrors=spack.mirror.MirrorCollection(source=True).values(),
)
return self._stage

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
from ._functions import _host, by_name, platforms, reset
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
from ._platform import Platform
from .darwin import Darwin
from .freebsd import FreeBSD
@@ -23,6 +23,7 @@
"host",
"by_name",
"reset",
"prevent_cray_detection",
]
#: The "real" platform of the host running Spack. This should not be changed

View File

@@ -2,8 +2,12 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import llnl.util.lang
import spack.util.environment
from .darwin import Darwin
from .freebsd import FreeBSD
from .linux import Linux
@@ -53,3 +57,14 @@ def by_name(name):
"""
platform_cls = cls_by_name(name)
return platform_cls() if platform_cls else None
@contextlib.contextmanager
def prevent_cray_detection():
"""Context manager that prevents the detection of the Cray platform"""
reset()
try:
with spack.util.environment.set_env(MODULEPATH=""):
yield
finally:
reset()

View File

@@ -365,9 +365,9 @@ def __init__(self, namespace):
def __getattr__(self, name):
"""Getattr lazily loads modules if they're not already loaded."""
submodule = f"{self.__package__}.{name}"
submodule = self.__package__ + "." + name
try:
setattr(self, name, importlib.import_module(submodule))
setattr(self, name, __import__(submodule))
except ImportError:
msg = "'{0}' object has no attribute {1}"
raise AttributeError(msg.format(type(self), name))
@@ -1281,7 +1281,7 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
raise RepoError(msg) from e
cls = getattr(module, class_name)
if not isinstance(cls, type):
if not inspect.isclass(cls):
tty.die(f"{pkg_name}.{class_name} is not a class")
# Clear any prior changes to class attributes in case the class was loaded from the

View File

@@ -32,6 +32,7 @@
import spack.config
import spack.config as sc
import spack.deptypes as dt
import spack.directives
import spack.environment as ev
import spack.error
import spack.package_base
@@ -284,14 +285,16 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
return NoDuplicatesCounter(specs, tests=tests)
def all_compilers_in_config(configuration):
return spack.compilers.all_compilers_from(configuration)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
libcs = {
c.default_libc
for c in spack.compilers.all_compilers_from(spack.config.CONFIG)
if c.default_libc
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
}
if libcs:
@@ -610,7 +613,7 @@ def _external_config_with_implicit_externals(configuration):
if not using_libc_compatibility():
return packages_yaml
for compiler in spack.compilers.all_compilers_from(configuration):
for compiler in all_compilers_in_config(configuration):
libc = compiler.default_libc
if libc:
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
@@ -1877,7 +1880,8 @@ def _spec_clauses(
# validate variant value only if spec not concrete
if not spec.concrete:
if not spec.virtual and vname not in spack.variant.reserved_names:
reserved_names = spack.directives.reserved_names
if not spec.virtual and vname not in reserved_names:
pkg_cls = self.pkg_class(spec.name)
try:
variant_def, _ = pkg_cls.variants[vname]
@@ -2998,7 +3002,7 @@ class CompilerParser:
def __init__(self, configuration) -> None:
self.compilers: Set[KnownCompiler] = set()
for c in spack.compilers.all_compilers_from(configuration):
for c in all_compilers_in_config(configuration):
if using_libc_compatibility() and not c_compiler_runs(c):
tty.debug(
f"the C compiler {c.cc} does not exist, or does not run correctly."
@@ -3462,7 +3466,7 @@ def reorder_flags(self):
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict(
(c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG))
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
)
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())

View File

@@ -51,7 +51,6 @@
import collections
import collections.abc
import enum
import io
import itertools
import os
import pathlib
@@ -1428,7 +1427,7 @@ def __init__(
# init an empty spec that matches anything.
self.name = None
self.versions = vn.VersionList(":")
self.variants = VariantMap(self)
self.variants = vt.VariantMap(self)
self.architecture = None
self.compiler = None
self.compiler_flags = FlagMap(self)
@@ -1640,7 +1639,7 @@ def _add_flag(self, name, value, propagate):
Known flags currently include "arch"
"""
if propagate and name in vt.reserved_names:
if propagate and name in spack.directives.reserved_names:
raise UnsupportedPropagationError(
f"Propagation with '==' is not supported for '{name}'."
)
@@ -2578,27 +2577,22 @@ def from_signed_json(stream):
return Spec.from_dict(extracted_json)
@staticmethod
def from_detection(
spec_str: str,
*,
external_path: str,
external_modules: Optional[List[str]] = None,
extra_attributes: Optional[Dict] = None,
) -> "Spec":
def from_detection(spec_str, extra_attributes=None):
"""Construct a spec from a spec string determined during external
detection and attach extra attributes to it.
Args:
spec_str: spec string
external_path: prefix of the external spec
external_modules: optional module files to be loaded when the external spec is used
extra_attributes: dictionary containing extra attributes
spec_str (str): spec string
extra_attributes (dict): dictionary containing extra attributes
Returns:
spack.spec.Spec: external spec
"""
s = Spec(spec_str, external_path=external_path, external_modules=external_modules)
s = Spec(spec_str)
extra_attributes = syaml.sorted_dict(extra_attributes or {})
# This is needed to be able to validate multi-valued variants,
# otherwise they'll still be abstract in the context of detection.
substitute_abstract_variants(s)
vt.substitute_abstract_variants(s)
s.extra_attributes = extra_attributes
return s
@@ -2921,7 +2915,7 @@ def validate_or_raise(self):
# Ensure correctness of variants (if the spec is not virtual)
if not spec.virtual:
Spec.ensure_valid_variants(spec)
substitute_abstract_variants(spec)
vt.substitute_abstract_variants(spec)
@staticmethod
def ensure_valid_variants(spec):
@@ -2941,7 +2935,9 @@ def ensure_valid_variants(spec):
pkg_variants = pkg_cls.variants
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names))
not_existing = set(spec.variants) - (
set(pkg_variants) | set(spack.directives.reserved_names)
)
if not_existing:
raise vt.UnknownVariantError(spec, not_existing)
@@ -3890,7 +3886,7 @@ def format_attribute(match_object: Match) -> str:
if part.startswith("_"):
raise SpecFormatStringError("Attempted to format private attribute")
else:
if part == "variants" and isinstance(current, VariantMap):
if part == "variants" and isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
else:
@@ -4345,155 +4341,9 @@ def attach_git_version_lookup(self):
v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname))
class VariantMap(lang.HashableMap):
"""Map containing variant instances. New values can be added only
if the key is not already present."""
def __init__(self, spec: Spec):
super().__init__()
self.spec = spec
def __setitem__(self, name, vspec):
# Raise a TypeError if vspec is not of the right type
if not isinstance(vspec, vt.AbstractVariant):
raise TypeError(
"VariantMap accepts only values of variant types "
f"[got {type(vspec).__name__} instead]"
)
# Raise an error if the variant was already in this map
if name in self.dict:
msg = 'Cannot specify variant "{0}" twice'.format(name)
raise vt.DuplicateVariantError(msg)
# Raise an error if name and vspec.name don't match
if name != vspec.name:
raise KeyError(
f'Inconsistent key "{name}", must be "{vspec.name}" to ' "match VariantSpec"
)
# Set the item
super().__setitem__(name, vspec)
def substitute(self, vspec):
"""Substitutes the entry under ``vspec.name`` with ``vspec``.
Args:
vspec: variant spec to be substituted
"""
if vspec.name not in self:
raise KeyError(f"cannot substitute a key that does not exist [{vspec.name}]")
# Set the item
super().__setitem__(vspec.name, vspec)
def satisfies(self, other):
return all(k in self and self[k].satisfies(other[k]) for k in other)
def intersects(self, other):
return all(self[k].intersects(other[k]) for k in other if k in self)
def constrain(self, other: "VariantMap") -> bool:
"""Add all variants in other that aren't in self to self. Also constrain all multi-valued
variants that are already present. Return True iff self changed"""
if other.spec is not None and other.spec._concrete:
for k in self:
if k not in other:
raise vt.UnsatisfiableVariantSpecError(self[k], "<absent>")
changed = False
for k in other:
if k in self:
# If they are not compatible raise an error
if not self[k].compatible(other[k]):
raise vt.UnsatisfiableVariantSpecError(self[k], other[k])
# If they are compatible merge them
changed |= self[k].constrain(other[k])
else:
# If it is not present copy it straight away
self[k] = other[k].copy()
changed = True
return changed
@property
def concrete(self):
"""Returns True if the spec is concrete in terms of variants.
Returns:
bool: True or False
"""
return self.spec._concrete or all(v in self for v in self.spec.package_class.variants)
def copy(self) -> "VariantMap":
clone = VariantMap(self.spec)
for name, variant in self.items():
clone[name] = variant.copy()
return clone
def __str__(self):
if not self:
return ""
# print keys in order
sorted_keys = sorted(self.keys())
# Separate boolean variants from key-value pairs as they print
# differently. All booleans go first to avoid ' ~foo' strings that
# break spec reuse in zsh.
bool_keys = []
kv_keys = []
for key in sorted_keys:
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants.
string = io.StringIO()
for key in bool_keys:
string.write(str(self[key]))
for key in kv_keys:
string.write(" ")
string.write(str(self[key]))
return string.getvalue()
def substitute_abstract_variants(spec: Spec):
"""Uses the information in `spec.package` to turn any variant that needs
it into a SingleValuedVariant.
This method is best effort. All variants that can be substituted will be
substituted before any error is raised.
Args:
spec: spec on which to operate the substitution
"""
# This method needs to be best effort so that it works in matrix exlusion
# in $spack/lib/spack/spack/spec_list.py
failed = []
for name, v in spec.variants.items():
if name == "dev_path":
spec.variants.substitute(vt.SingleValuedVariant(name, v._original_value))
continue
elif name in vt.reserved_names:
continue
elif name not in spec.package_class.variants:
failed.append(name)
continue
pkg_variant, _ = spec.package_class.variants[name]
new_variant = pkg_variant.make_variant(v._original_value)
pkg_variant.validate_or_raise(new_variant, spec.package_class)
spec.variants.substitute(new_variant)
# Raise all errors at once
if failed:
raise vt.UnknownVariantError(spec, failed)
def parse_with_version_concrete(spec_like: Union[str, Spec], compiler: bool = False):
def parse_with_version_concrete(string: str, compiler: bool = False):
"""Same as Spec(string), but interprets @x as @=x"""
s: Union[CompilerSpec, Spec] = CompilerSpec(spec_like) if compiler else Spec(spec_like)
s: Union[CompilerSpec, Spec] = CompilerSpec(string) if compiler else Spec(string)
interpreted_version = s.versions.concrete_range_as_version
if interpreted_version:
s.versions = vn.VersionList([interpreted_version])
@@ -4892,7 +4742,6 @@ def get_host_environment() -> Dict[str, Any]:
"architecture": arch_spec,
"arch_str": str(arch_spec),
"hostname": socket.gethostname(),
"full_hostname": socket.getfqdn(),
}

View File

@@ -5,7 +5,6 @@
import itertools
from typing import List
import spack.spec
import spack.variant
from spack.error import SpackError
from spack.spec import Spec
@@ -226,7 +225,7 @@ def _expand_matrix_constraints(matrix_config):
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.spec.substitute_abstract_variants(test_spec)
spack.variant.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass

View File

@@ -13,7 +13,7 @@
import stat
import sys
import tempfile
from typing import Callable, Dict, Generator, Iterable, List, Optional, Set
from typing import Callable, Dict, Iterable, List, Optional, Set
import llnl.string
import llnl.util.lang
@@ -352,10 +352,8 @@ class Stage(LockableStagingDir):
def __init__(
self,
url_or_fetch_strategy,
*,
name=None,
mirror_paths: Optional[spack.mirror.MirrorLayout] = None,
mirrors: Optional[Iterable[spack.mirror.Mirror]] = None,
mirror_paths=None,
keep=False,
path=None,
lock=True,
@@ -364,30 +362,36 @@ def __init__(
"""Create a stage object.
Parameters:
url_or_fetch_strategy
URL of the archive to be downloaded into this stage, OR a valid FetchStrategy.
URL of the archive to be downloaded into this stage, OR
a valid FetchStrategy.
name
If a name is provided, then this stage is a named stage and will persist between runs
(or if you construct another stage object later). If name is not provided, then this
If a name is provided, then this stage is a named stage
and will persist between runs (or if you construct another
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
mirror_paths
If provided, Stage will search Spack's mirrors for this archive at each of the
provided relative mirror paths before using the default fetch strategy.
If provided, Stage will search Spack's mirrors for
this archive at each of the provided relative mirror paths
before using the default fetch strategy.
keep
By default, when used as a context manager, the Stage is deleted on exit when no
exceptions are raised. Pass True to keep the stage intact even if no exceptions are
raised.
By default, when used as a context manager, the Stage
is deleted on exit when no exceptions are raised.
Pass True to keep the stage intact even if no
exceptions are raised.
path
If provided, the stage path to use for associated builds.
lock
True if the stage directory file lock is to be used, False otherwise.
True if the stage directory file lock is to be used, False
otherwise.
search_fn
The search function that provides the fetch strategy instance.
The search function that provides the fetch strategy
instance.
"""
super().__init__(name, path, keep, lock)
@@ -403,37 +407,30 @@ def __init__(
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
self.search_fn = search_fn
# If we fetch from a mirror, but the original data is from say git, we can currently not
# prove that they are equal (we don't even have a tree hash in package.py). This bool is
# used to skip checksum verification and instead warn the user.
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
self.skip_checksum_for_mirror = not bool(self.default_fetcher.digest)
else:
self.skip_checksum_for_mirror = True
# used for mirrored archives of repositories.
self.skip_checksum_for_mirror = True
self.srcdir = None
self.mirror_layout = mirror_paths
self.mirrors = list(mirrors) if mirrors else []
# Allow users the disable both mirrors and download cache
self.default_fetcher_only = False
self.mirror_paths = mirror_paths
@property
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
fnames = []
expanded = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
expanded = self.default_fetcher.expand_archive
fnames.append(url_util.default_download_filename(self.default_fetcher.url))
if self.mirror_layout:
fnames.append(os.path.basename(self.mirror_layout.path))
if self.mirror_paths:
fnames.extend(os.path.basename(x) for x in self.mirror_paths)
paths = [os.path.join(self.path, f) for f in fnames]
paths.extend(os.path.join(self.path, f) for f in fnames)
if not expanded:
# If the download file is not compressed, the "archive" is a single file placed in
# Stage.source_path
# If the download file is not compressed, the "archive" is a
# single file placed in Stage.source_path
paths.extend(os.path.join(self.source_path, f) for f in fnames)
return paths
@@ -466,63 +463,80 @@ def source_path(self):
"""Returns the well-known source directory path."""
return os.path.join(self.path, _source_path_subdir)
def _generate_fetchers(self, mirror_only=False) -> Generator[fs.FetchStrategy, None, None]:
fetchers: List[fs.FetchStrategy] = []
def disable_mirrors(self):
"""The Stage will not attempt to look for the associated fetcher
target in any of Spack's mirrors (including the local download cache).
"""
self.mirror_paths = []
def fetch(self, mirror_only=False, err_msg=None):
"""Retrieves the code or archive
Args:
mirror_only (bool): only fetch from a mirror
err_msg (str or None): the error message to display if all fetchers
fail or ``None`` for the default fetch failure message
"""
fetchers = []
if not mirror_only:
fetchers.append(self.default_fetcher)
# If this archive is normally fetched from a URL, then use the same digest.
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
extension = self.default_fetcher.extension
else:
digest = None
expand = True
extension = None
# TODO: move mirror logic out of here and clean it up!
# TODO: Or @alalazo may have some ideas about how to use a
# TODO: CompositeFetchStrategy here.
if not self.default_fetcher_only and self.mirror_layout and self.mirrors:
self.skip_checksum_for_mirror = True
if self.mirror_paths:
# Join URLs of mirror roots with mirror paths. Because
# urljoin() will strip everything past the final '/' in
# the root, so we add a '/' if it is not present.
mirror_urls = [
url_util.join(mirror.fetch_url, rel_path)
for mirror in spack.mirror.MirrorCollection(source=True).values()
if not mirror.fetch_url.startswith("oci://")
for rel_path in self.mirror_paths
]
# If this archive is normally fetched from a tarball URL,
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
expand = True
extension = None
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive
extension = self.default_fetcher.extension
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
self.skip_checksum_for_mirror = not bool(digest)
# Add URL strategies for all the mirrors with the digest
# Insert fetchers in the order that the URLs are provided.
fetchers[:0] = (
fs.from_url_scheme(
url_util.join(mirror.fetch_url, self.mirror_layout.path),
checksum=digest,
expand=expand,
extension=extension,
for url in reversed(mirror_urls):
fetchers.insert(
0, fs.from_url_scheme(url, digest, expand=expand, extension=extension)
)
for mirror in self.mirrors
if not mirror.fetch_url.startswith("oci://") # no support for mirrors yet
)
if not self.default_fetcher_only and self.mirror_layout and self.default_fetcher.cachable:
fetchers.insert(
0,
spack.caches.FETCH_CACHE.fetcher(
self.mirror_layout.path, digest, expand=expand, extension=extension
),
)
if self.default_fetcher.cachable:
for rel_path in reversed(list(self.mirror_paths)):
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
rel_path, digest, expand=expand, extension=extension
)
fetchers.insert(0, cache_fetcher)
yield from fetchers
def generate_fetchers():
for fetcher in fetchers:
yield fetcher
# The search function may be expensive, so wait until now to
# call it so the user can stop if a prior fetcher succeeded
if self.search_fn and not mirror_only:
dynamic_fetchers = self.search_fn()
for fetcher in dynamic_fetchers:
yield fetcher
# The search function may be expensive, so wait until now to call it so the user can stop
# if a prior fetcher succeeded
if self.search_fn and not mirror_only:
yield from self.search_fn()
def fetch(self, mirror_only: bool = False, err_msg: Optional[str] = None) -> None:
"""Retrieves the code or archive
Args:
mirror_only: only fetch from a mirror
err_msg: the error message to display if all fetchers fail or ``None`` for the default
fetch failure message
"""
errors: List[str] = []
for fetcher in self._generate_fetchers(mirror_only):
for fetcher in generate_fetchers():
try:
fetcher.stage = self
self.fetcher = fetcher
@@ -581,60 +595,56 @@ def steal_source(self, dest):
self.destroy()
def check(self):
"""Check the downloaded archive against a checksum digest."""
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
cache = isinstance(self.fetcher, fs.CacheURLFetchStrategy)
if cache:
secure_msg = "your download cache is in a secure location"
else:
secure_msg = "you trust this mirror and have a secure connection"
tty.warn(
f"Using {'download cache' if cache else 'a mirror'} instead of version control",
"The required sources are normally checked out from a version control system, "
f"but have been archived {'in download cache' if cache else 'on a mirror'}: "
f"{self.fetcher}. Spack lacks a tree hash to verify the integrity of this "
f"archive. Make sure {secure_msg}.",
"Fetching from mirror without a checksum!",
"This package is normally checked out from a version "
"control system, but it has been archived on a spack "
"mirror. This means we cannot know a checksum for the "
"tarball in advance. Be sure that your connection to "
"this mirror is secure!",
)
elif spack.config.get("config:checksum"):
self.fetcher.check()
def cache_local(self):
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_layout.path)
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
def cache_mirror(
self, mirror: spack.caches.MirrorCache, stats: spack.mirror.MirrorStats
) -> None:
def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached
Arguments:
mirror: the mirror to cache this Stage's resource in
stats: this is updated depending on whether the caching operation succeeded or failed
mirror (spack.caches.MirrorCache): the mirror to cache this Stage's
resource in
stats (spack.mirror.MirrorStats): this is updated depending on whether the
caching operation succeeded or failed
"""
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
# BundleFetchStrategy has no source to fetch. The associated fetcher does nothing but
# the associated stage may still exist. There is currently no method available on the
# fetcher to distinguish this ('cachable' refers to whether the fetcher refers to a
# resource with a fixed ID, which is not the same concept as whether there is anything
# to fetch at all) so we must examine the type of the fetcher.
# BundleFetchStrategy has no source to fetch. The associated
# fetcher does nothing but the associated stage may still exist.
# There is currently no method available on the fetcher to
# distinguish this ('cachable' refers to whether the fetcher
# refers to a resource with a fixed ID, which is not the same
# concept as whether there is anything to fetch at all) so we
# must examine the type of the fetcher.
return
elif mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
if mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
return
elif not self.mirror_layout:
return
absolute_storage_path = os.path.join(mirror.root, self.mirror_layout.path)
absolute_storage_path = os.path.join(mirror.root, self.mirror_paths.storage_path)
if os.path.exists(absolute_storage_path):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
mirror.store(self.fetcher, self.mirror_layout.path)
mirror.store(self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)
self.mirror_layout.make_alias(mirror.root)
mirror.symlink(self.mirror_paths)
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
@@ -642,9 +652,9 @@ def expand_archive(self):
downloaded."""
if not self.expanded:
self.fetcher.expand()
tty.debug(f"Created stage in {self.path}")
tty.debug("Created stage in {0}".format(self.path))
else:
tty.debug(f"Already staged {self.name} in {self.path}")
tty.debug("Already staged {0} in {1}".format(self.name, self.path))
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
@@ -1163,20 +1173,18 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
try:
url_or_fs = url
if options:
url_or_fs = fs.URLFetchStrategy(url=url, fetch_options=options)
url_or_fs = fs.URLFetchStrategy(url, fetch_options=options)
with Stage(url_or_fs, keep=keep_stage) as stage:
# Fetch the archive
stage.fetch()
archive = stage.archive_file
assert archive is not None, f"Archive not found for {url}"
if action_fn is not None and archive:
if action_fn is not None:
# Only run first_stage_function the first time,
# no need to run it every time
action_fn(archive, url)
action_fn(stage, url)
# Checksum the archive and add it to the list
checksum = spack.util.crypto.checksum(hashlib.sha256, archive)
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
return checksum, None
except fs.FailedDownloadError:
return None, f"[WORKER] Failed to fetch {url}"

View File

@@ -12,7 +12,7 @@
modifications to global state in memory that must be replicated in the
child process.
"""
import importlib
import io
import multiprocessing
import pickle
@@ -118,7 +118,7 @@ def __init__(self, module_patches, class_patches):
def restore(self):
for module_name, attr_name, value in self.module_patches:
value = pickle.load(value)
module = importlib.import_module(module_name)
module = __import__(module_name)
setattr(module, attr_name, value)
for class_fqn, attr_name, value in self.class_patches:
value = pickle.load(value)

View File

@@ -0,0 +1,66 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
""" Test ABI compatibility helpers"""
import pytest
from spack.abi import ABI
from spack.spec import Spec
@pytest.mark.parametrize(
"target,constraint,expected",
[
("foo", "bar", True),
("platform=linux", "foo", True),
("foo", "arch=linux-fedora31-x86_64", True),
("arch=linux-fedora31-skylake", "arch=linux-fedora31-skylake", True),
("arch=linux-fedora31-skylake", "arch=linux-fedora31-x86_64", False),
("platform=linux os=fedora31", "arch=linux-fedora31-x86_64", True),
("platform=linux", "arch=linux-fedora31-x86_64", True),
("platform=linux os=fedora31", "platform=linux", True),
("platform=darwin", "arch=linux-fedora31-x86_64", False),
("os=fedora31", "platform=linux", True),
],
)
def test_architecture_compatibility(target, constraint, expected):
assert ABI().architecture_compatible(Spec(target), Spec(constraint)) == expected
@pytest.mark.parametrize(
"target,constraint,loose,expected",
[
("foo", "bar", False, True),
("%gcc", "foo", False, True),
("foo", "%gcc", False, True),
("%gcc", "%gcc", False, True),
("%gcc", "%intel", False, False),
("%gcc", "%clang", False, False),
("%gcc@9.1", "%gcc@9.2", False, False), # TODO should be true ?
("%gcc@9.2.1", "%gcc@9.2.2", False, False), # TODO should be true ?
("%gcc@4.9", "%gcc@9.2", False, False),
("%clang@5", "%clang@6", False, False),
("%gcc@9.1", "%gcc@9.2", True, True),
("%gcc@9.2.1", "%gcc@9.2.2", True, True),
("%gcc@4.9", "%gcc@9.2", True, True),
("%clang@5", "%clang@6", True, True),
],
)
def test_compiler_compatibility(target, constraint, loose, expected):
assert ABI().compiler_compatible(Spec(target), Spec(constraint), loose=loose) == expected
@pytest.mark.parametrize(
"target,constraint,loose,expected",
[
("foo", "bar", False, True),
("%gcc", "platform=linux", False, True),
("%gcc@9.2.1", "%gcc@8.3.1 platform=linux", False, False),
("%gcc@9.2.1", "%gcc@8.3.1 platform=linux", True, True),
("%gcc@9.2.1 arch=linux-fedora31-skylake", "%gcc@9.2.1 platform=linux", False, True),
],
)
def test_compatibility(target, constraint, loose, expected):
assert ABI().compatible(Spec(target), Spec(constraint), loose=loose) == expected

View File

@@ -8,9 +8,7 @@
import io
import json
import os
import pathlib
import platform
import shutil
import sys
import tarfile
import urllib.error
@@ -18,11 +16,12 @@
import urllib.response
from pathlib import Path, PurePath
import py
import pytest
import archspec.cpu
from llnl.util.filesystem import copy_tree, join_path, visit_directory_tree
from llnl.util.filesystem import join_path, visit_directory_tree
from llnl.util.symlink import readlink
import spack.binary_distribution as bindist
@@ -82,67 +81,72 @@ def test_mirror(mirror_dir):
@pytest.fixture(scope="module")
def config_directory(tmp_path_factory):
# Copy defaults to a temporary "site" scope
defaults_dir = tmp_path_factory.mktemp("test_configs")
config_path = pathlib.Path(spack.paths.etc_path)
copy_tree(str(config_path / "defaults"), str(defaults_dir / "site"))
# Create a "user" scope
(defaults_dir / "user").mkdir()
# Detect compilers
cfg_scopes = [
spack.config.DirectoryConfigScope(name, str(defaults_dir / name))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
with spack.config.use_configuration(*cfg_scopes):
_ = spack.compilers.find_compilers(scope="site")
yield defaults_dir
shutil.rmtree(str(defaults_dir))
def config_directory(tmpdir_factory):
tmpdir = tmpdir_factory.mktemp("test_configs")
# restore some sane defaults for packages and config
config_path = py.path.local(spack.paths.etc_path)
modules_yaml = config_path.join("defaults", "modules.yaml")
os_modules_yaml = config_path.join(
"defaults", "%s" % platform.system().lower(), "modules.yaml"
)
packages_yaml = config_path.join("defaults", "packages.yaml")
config_yaml = config_path.join("defaults", "config.yaml")
repos_yaml = config_path.join("defaults", "repos.yaml")
tmpdir.ensure("site", dir=True)
tmpdir.ensure("user", dir=True)
tmpdir.ensure("site/%s" % platform.system().lower(), dir=True)
modules_yaml.copy(tmpdir.join("site", "modules.yaml"))
os_modules_yaml.copy(tmpdir.join("site/%s" % platform.system().lower(), "modules.yaml"))
packages_yaml.copy(tmpdir.join("site", "packages.yaml"))
config_yaml.copy(tmpdir.join("site", "config.yaml"))
repos_yaml.copy(tmpdir.join("site", "repos.yaml"))
yield tmpdir
tmpdir.remove()
@pytest.fixture(scope="function")
def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
def default_config(tmpdir, config_directory, monkeypatch, install_mockery):
# This fixture depends on install_mockery to ensure
# there is a clear order of initialization. The substitution of the
# config scopes here is done on top of the substitution that comes with
# install_mockery
mutable_dir = tmp_path / "mutable_config" / "tmp"
mutable_dir.mkdir(parents=True)
copy_tree(str(config_directory), str(mutable_dir))
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
config_directory.copy(mutable_dir)
scopes = [
spack.config.DirectoryConfigScope(name, str(mutable_dir / name))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
cfg = spack.config.Configuration(
*[
spack.config.DirectoryConfigScope(name, str(mutable_dir))
for name in [f"site/{platform.system().lower()}", "site", "user"]
]
)
with spack.config.use_configuration(*scopes):
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
extensions = spack.config.get("config:template_dirs")
if not extensions:
spack.config.set(
"config:template_dirs",
[os.path.join(spack.paths.share_path, "templates")],
scope="user",
)
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
njobs = spack.config.get("config:build_jobs")
if not njobs:
spack.config.set("config:build_jobs", 4, scope="user")
extensions = spack.config.get("config:template_dirs")
if not extensions:
spack.config.set(
"config:template_dirs",
[os.path.join(spack.paths.share_path, "templates")],
scope="user",
)
(mutable_dir / "build_stage").mkdir()
build_stage = spack.config.get("config:build_stage")
if not build_stage:
spack.config.set(
"config:build_stage", [str(mutable_dir / "build_stage")], scope="user"
)
timeout = spack.config.get("config:connect_timeout")
if not timeout:
spack.config.set("config:connect_timeout", 10, scope="user")
mutable_dir.ensure("build_stage", dir=True)
build_stage = spack.config.get("config:build_stage")
if not build_stage:
spack.config.set(
"config:build_stage", [str(mutable_dir.join("build_stage"))], scope="user"
)
timeout = spack.config.get("config:connect_timeout")
if not timeout:
spack.config.set("config:connect_timeout", 10, scope="user")
yield spack.config.CONFIG
yield spack.config.CONFIG
spack.config.CONFIG = old_config
mutable_dir.remove()
@pytest.fixture(scope="function")
@@ -353,7 +357,7 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
assert len(keys) == 1
fpr = keys[0]
bindist._url_push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
bindist.push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
# dir 2: import the key from the mirror, and confirm that its fingerprint
# matches the one created above
@@ -488,7 +492,7 @@ def mock_list_url(url, recursive=False):
test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist._url_generate_package_index(test_url, str(tmp_path))
bindist.generate_package_index(test_url, str(tmp_path))
assert (
"Warning: Encountered problem listing packages at "
@@ -509,7 +513,7 @@ def mock_list_url(url, recursive=False):
bindist.generate_key_index(url, str(tmp_path))
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
bindist._url_generate_package_index(url, str(tmp_path))
bindist.generate_package_index(url, str(tmp_path))
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err

View File

@@ -10,7 +10,6 @@
import spack.binary_distribution as bd
import spack.main
import spack.mirror
import spack.spec
import spack.util.url
@@ -23,21 +22,17 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
specs = [spec]
# populate cache, everything is new
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
# Runs fine the first time, second time it's a no-op
out_url = spack.util.url.path_to_file_url(str(tmp_path))
skipped = bd.push_or_raise(specs, out_url, signing_key=None)
assert not skipped
# should skip all
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert skipped == specs
skipped = bd.push_or_raise(specs, out_url, signing_key=None)
assert skipped == specs
# with force=True none should be skipped
with bd.make_uploader(mirror, force=True) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
# Should work fine with force=True
skipped = bd.push_or_raise(specs, out_url, signing_key=None, force=True)
assert not skipped
# Remove the tarball, which should cause push to push.
os.remove(
@@ -47,6 +42,5 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
/ bd.tarball_name(spec, ".spack")
)
with bd.make_uploader(mirror) as uploader:
skipped = uploader.push_or_raise(specs)
assert not skipped
skipped = bd.push_or_raise(specs, out_url, signing_key=None)
assert not skipped

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os
import platform
import posixpath
@@ -592,7 +593,7 @@ def test_setting_attributes(self, default_mock_concretization):
# We can also propagate the settings to classes in the MRO
module_wrapper.propagate_changes_to_mro()
for cls in type(s.package).__mro__:
for cls in inspect.getmro(type(s.package)):
current_module = cls.module
if current_module == spack.package_base:
break

View File

@@ -56,6 +56,6 @@ def test_build_systems(url_and_build_system):
url, build_system = url_and_build_system
with spack.stage.Stage(url) as stage:
stage.fetch()
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser(stage.archive_file, url)
guesser = spack.cmd.create.BuildSystemGuesser()
guesser(stage, url)
assert build_system == guesser.build_system

View File

@@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
from llnl.util.filesystem import working_dir
@@ -35,10 +33,11 @@ def test_blame_by_percent(mock_packages):
assert "EMAIL" in out
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
def test_blame_file(mock_packages):
"""Sanity check the blame command to make sure it works."""
with working_dir(spack.paths.prefix):
out = blame(os.path.join("bin", "spack"))
out = blame("bin/spack")
assert "LAST_COMMIT" in out
assert "AUTHOR" in out
assert "EMAIL" in out

View File

@@ -7,7 +7,6 @@
import json
import os
import shutil
from typing import List
import pytest
@@ -17,7 +16,6 @@
import spack.environment as ev
import spack.error
import spack.main
import spack.mirror
import spack.spec
import spack.util.url
from spack.spec import Spec
@@ -382,22 +380,18 @@ def test_correct_specs_are_pushed(
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
slash_hash = f"/{spec.dag_hash()}"
slash_hash = "/{0}".format(spec.dag_hash())
class DontUpload(spack.binary_distribution.Uploader):
def __init__(self):
super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
self.pushed = []
packages_to_push = []
def push(self, specs: List[spack.spec.Spec]):
self.pushed.extend(s.name for s in specs)
return [], [] # nothing skipped, nothing errored
def fake_push(specs, *args, **kwargs):
assert all(isinstance(s, Spec) for s in specs)
packages_to_push.extend(s.name for s in specs)
skipped = []
errors = []
return skipped, errors
uploader = DontUpload()
monkeypatch.setattr(
spack.binary_distribution, "make_uploader", lambda *args, **kwargs: uploader
)
monkeypatch.setattr(spack.binary_distribution, "_push", fake_push)
buildcache_create_args = ["create", "--unsigned"]
@@ -409,10 +403,10 @@ def push(self, specs: List[spack.spec.Spec]):
buildcache(*buildcache_create_args)
# Order is not guaranteed, so we can't just compare lists
assert set(uploader.pushed) == set(expected)
assert set(packages_to_push) == set(expected)
# Ensure no duplicates
assert len(set(uploader.pushed)) == len(uploader.pushed)
assert len(set(packages_to_push)) == len(packages_to_push)
@pytest.mark.parametrize("signed", [True, False])

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
import filecmp
import os
import shutil
import subprocess
import pytest
@@ -155,6 +156,22 @@ def test_update_with_header(tmpdir):
commands("--update", str(update_file), "--header", str(filename))
@pytest.mark.xfail
def test_no_pipe_error():
"""Make sure we don't see any pipe errors when piping output."""
proc = subprocess.Popen(
["spack", "commands", "--format=rst"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
# Call close() on stdout to cause a broken pipe
proc.stdout.close()
proc.wait()
stderr = proc.stderr.read().decode("utf-8")
assert "Broken pipe" not in stderr
def test_bash_completion():
"""Test the bash completion writer."""
out1 = commands("--format=bash")

View File

@@ -81,6 +81,34 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_execut
assert "gcc" in output
@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
"""Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
Clang with a few tweaks.
"""
gcc_path = mock_executable(
"gcc",
output="""
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
elif [ "$1" = "--version" ]; then
echo "Configured with: --prefix=/dummy"
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
echo "Target: x86_64-apple-darwin18.7.0"
echo "Thread model: posix"
echo "InstalledDir: /dummy"
else
echo "clang: error: no input files"
fi
""",
)
os.environ["PATH"] = str(gcc_path.parent)
output = compiler("find", "--scope=site")
assert "gcc" not in output
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
@@ -103,7 +131,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
def test_compiler_add(mutable_config, mock_executable):
def test_compiler_add(mutable_config, mock_packages, mock_executable):
"""Tests that we can add a compiler to configuration."""
expected_version = "4.5.3"
gcc_path = mock_executable(
@@ -121,12 +149,7 @@ def test_compiler_add(mutable_config, mock_executable):
compilers_before_find = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
all=None,
compiler_spec=None,
add_paths=[str(root_dir)],
scope=None,
mixed_toolchain=False,
jobs=1,
all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False
)
spack.cmd.compiler.compiler_find(args)
compilers_after_find = set(spack.compilers.all_compiler_specs())
@@ -206,7 +229,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
for name in ("gcc-8", "g++-8", "gfortran-8"):
shutil.copy(compilers_dir / name, new_dir / name)
# Set PATH to have the new folder searched first
os.environ["PATH"] = f"{str(new_dir)}:{str(compilers_dir)}"
os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
compiler("find", "--scope=site")

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import tarfile
import pytest
@@ -155,24 +154,24 @@ def test_create_template_bad_name(mock_test_repo, name, expected):
def test_build_system_guesser_no_stage():
"""Test build system guesser when stage not provided."""
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
with pytest.raises(AttributeError, match="'NoneType' object has no attribute"):
guesser(None, "/the/url/does/not/matter")
def test_build_system_guesser_octave(tmp_path):
def test_build_system_guesser_octave():
"""
Test build system guesser for the special case, where the same base URL
identifies the build system rather than guessing the build system from
files contained in the archive.
"""
url, expected = "downloads.sourceforge.net/octave/", "octave"
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
guesser(str(tmp_path / "archive.tar.gz"), url)
guesser(None, url)
assert guesser.build_system == expected
# Also ensure get the correct template
@@ -208,40 +207,3 @@ def _parse_name_offset(path, v):
def test_no_url():
"""Test creation of package without a URL."""
create("--skip-editor", "-n", "create-new-package")
@pytest.mark.parametrize(
"source_files,languages",
[
(["fst.c", "snd.C"], ["c", "cxx"]),
(["fst.c", "snd.cxx"], ["c", "cxx"]),
(["fst.F", "snd.cc"], ["cxx", "fortran"]),
(["fst.f", "snd.c"], ["c", "fortran"]),
(["fst.jl", "snd.py"], []),
],
)
def test_language_and_build_system_detection(tmp_path, source_files, languages):
"""Test that languages are detected from tarball, and the build system is guessed from the
most top-level build system file."""
def add(tar: tarfile.TarFile, name: str, type):
tarinfo = tarfile.TarInfo(name)
tarinfo.type = type
tar.addfile(tarinfo)
tarball = str(tmp_path / "example.tar.gz")
with tarfile.open(tarball, "w:gz") as tar:
add(tar, "./third-party/", tarfile.DIRTYPE)
add(tar, "./third-party/example/", tarfile.DIRTYPE)
add(tar, "./third-party/example/CMakeLists.txt", tarfile.REGTYPE) # false positive
add(tar, "./configure", tarfile.REGTYPE) # actual build system
add(tar, "./src/", tarfile.DIRTYPE)
for file in source_files:
add(tar, f"src/{file}", tarfile.REGTYPE)
guesser = spack.cmd.create.BuildSystemAndLanguageGuesser()
guesser(str(tarball), "https://example.com")
assert guesser.build_system == "autotools"
assert guesser.languages == languages

View File

@@ -1734,17 +1734,6 @@ def test_env_include_concrete_env_yaml(env_name):
assert test.path in combined_yaml["include_concrete"]
@pytest.mark.regression("45766")
@pytest.mark.parametrize("format", ["v1", "v2", "v3"])
def test_env_include_concrete_old_env(format, tmpdir):
lockfile = os.path.join(spack.paths.test_path, "data", "legacy_env", f"{format}.lock")
# create an env from old .lock file -- this does not update the format
env("create", "old-env", lockfile)
env("create", "--include-concrete", "old-env", "test")
assert ev.read("old-env").all_specs() == ev.read("test").all_specs()
def test_env_bad_include_concrete_env():
with pytest.raises(ev.SpackEnvironmentError):
env("create", "--include-concrete", "nonexistant_env", "combined_env")

View File

@@ -72,12 +72,8 @@ def test_find_external_two_instances_same_package(mock_executable):
def test_find_external_update_config(mutable_config):
entries = [
spack.detection.DetectedPackage(
Spec.from_detection("cmake@1.foo", external_path="/x/y1/"), "/x/y1/"
),
spack.detection.DetectedPackage(
Spec.from_detection("cmake@3.17.2", external_path="/x/y2/"), "/x/y2/"
),
spack.detection.DetectedPackage(Spec.from_detection("cmake@1.foo"), "/x/y1/"),
spack.detection.DetectedPackage(Spec.from_detection("cmake@3.17.2"), "/x/y2/"),
]
pkg_to_entries = {"cmake": entries}
@@ -104,7 +100,7 @@ def test_get_executables(working_env, mock_executable):
# TODO: this test should be made to work, but in the meantime it is
# causing intermittent (spurious) CI failures on all PRs
@pytest.mark.not_on_windows("Test fails intermittently on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Test fails intermittently on Windows")
def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
@@ -225,8 +221,10 @@ def fail():
assert "Skipping manifest and continuing" in output
def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
"""Checks that 'spack find external' doesn't overwrite an existing spec in packages.yaml."""
def test_find_external_merge(mutable_config, mutable_mock_repo):
"""Check that 'spack find external' doesn't overwrite an existing spec
entry in packages.yaml.
"""
pkgs_cfg_init = {
"find-externals1": {
"externals": [{"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix/"}],
@@ -236,12 +234,8 @@ def test_find_external_merge(mutable_config, mutable_mock_repo, tmp_path):
mutable_config.update_config("packages", pkgs_cfg_init)
entries = [
spack.detection.DetectedPackage(
Spec.from_detection("find-externals1@1.1", external_path="/x/y1/"), "/x/y1/"
),
spack.detection.DetectedPackage(
Spec.from_detection("find-externals1@1.2", external_path="/x/y2/"), "/x/y2/"
),
spack.detection.DetectedPackage(Spec.from_detection("find-externals1@1.1"), "/x/y1/"),
spack.detection.DetectedPackage(Spec.from_detection("find-externals1@1.2"), "/x/y2/"),
]
pkg_to_entries = {"find-externals1": entries}
scope = spack.config.default_modify_scope("packages")

View File

@@ -2,13 +2,29 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.main import SpackCommand
@pytest.mark.xfail
def test_reuse_after_help():
"""Test `spack help` can be called twice with the same SpackCommand."""
help_cmd = SpackCommand("help", subprocess=True)
help_cmd()
# This second invocation will somehow fail because the parser no
# longer works after add_all_commands() is called in
# SpackArgumentParser.format_help_sections().
#
# TODO: figure out why this doesn't work properly and change this
# test to use a single SpackCommand.
#
# It seems that parse_known_args() finds "too few arguments" the
# second time through b/c add_all_commands() ends up leaving extra
# positionals in the parser. But this used to work before we loaded
# commands lazily.
help_cmd()

View File

@@ -24,12 +24,6 @@
style = spack.main.SpackCommand("style")
ISORT = which("isort")
BLACK = which("black")
FLAKE8 = which("flake8")
MYPY = which("mypy")
@pytest.fixture(autouse=True)
def has_develop_branch(git):
"""spack style requires git and a develop branch to run -- skip if we're missing either."""
@@ -196,8 +190,8 @@ def external_style_root(git, flake8_package_with_errors, tmpdir):
yield tmpdir, py_file
@pytest.mark.skipif(not ISORT, reason="isort is not installed.")
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_fix_style(external_style_root):
"""Make sure spack style --fix works."""
tmpdir, py_file = external_style_root
@@ -215,10 +209,10 @@ def test_fix_style(external_style_root):
assert filecmp.cmp(broken_py, fixed_py)
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not ISORT, reason="isort is not installed.")
@pytest.mark.skipif(not MYPY, reason="mypy is not installed.")
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
@pytest.mark.skipif(not which("isort"), reason="isort is not installed.")
@pytest.mark.skipif(not which("mypy"), reason="mypy is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
def test_external_root(external_style_root, capfd):
"""Ensure we can run in a separate root directory w/o configuration files."""
tmpdir, py_file = external_style_root
@@ -244,7 +238,7 @@ def test_external_root(external_style_root, capfd):
assert "lib/spack/spack/dummy.py:7: [F401] 'os' imported but unused" in output
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style(flake8_package, tmpdir):
root_relative = os.path.relpath(flake8_package, spack.paths.prefix)
@@ -270,7 +264,7 @@ def test_style(flake8_package, tmpdir):
assert "spack style checks were clean" in output
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_errors(flake8_package_with_errors):
root_relative = os.path.relpath(flake8_package_with_errors, spack.paths.prefix)
output = style(
@@ -281,8 +275,8 @@ def test_style_with_errors(flake8_package_with_errors):
assert "spack style found errors" in output
@pytest.mark.skipif(not BLACK, reason="black is not installed.")
@pytest.mark.skipif(not FLAKE8, reason="flake8 is not installed.")
@pytest.mark.skipif(not which("black"), reason="black is not installed.")
@pytest.mark.skipif(not which("flake8"), reason="flake8 is not installed.")
def test_style_with_black(flake8_package_with_errors):
output = style("--tool", "black,flake8", flake8_package_with_errors, fail_on_error=False)
assert "black found errors" in output

View File

@@ -11,6 +11,13 @@
versions = SpackCommand("versions")
def test_safe_only_versions():
"""Only test the safe versions of a package.
(Using the deprecated command line argument)
"""
versions("--safe-only", "zlib")
def test_safe_versions():
"""Only test the safe versions of a package."""

View File

@@ -19,6 +19,27 @@
from spack.util.executable import Executable, ProcessError
@pytest.fixture()
def make_args_for_version(monkeypatch):
def _factory(version, path="/usr/bin/gcc"):
class MockOs:
pass
compiler_name = "gcc"
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
monkeypatch.setattr(compiler_cls, "cc_version", lambda x: version)
compiler_id = spack.compilers.CompilerID(
os=MockOs, compiler_name=compiler_name, version=None
)
variation = spack.compilers.NameVariation(prefix="", suffix="")
return spack.compilers.DetectVersionArgs(
id=compiler_id, variation=variation, language="cc", path=path
)
return _factory
def test_multiple_conflicting_compiler_definitions(mutable_config):
compiler_def = {
"compiler": {
@@ -42,6 +63,40 @@ def test_multiple_conflicting_compiler_definitions(mutable_config):
assert cmp.f77 == "f77"
def test_get_compiler_duplicates(mutable_config, compiler_factory):
# In this case there is only one instance of the specified compiler in
# the test configuration (so it is not actually a duplicate), but the
# method behaves the same.
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
# CNL compiler has no target attribute, and this is essential to make detection pass
del cnl_compiler["compiler"]["target"]
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="SuSE11"), cnl_compiler]
)
cfg_file_to_duplicates = spack.compilers.get_compiler_duplicates(
"gcc@4.5.0", spack.spec.ArchSpec("cray-CNL-xeon")
)
assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iter(cfg_file_to_duplicates.items()))
assert len(duplicates) == 1
@pytest.mark.parametrize(
"input_version,expected_version,expected_error",
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
)
def test_version_detection_is_empty(
make_args_for_version, input_version, expected_version, expected_error
):
args = make_args_for_version(version=input_version)
result, error = spack.compilers.detect_version(args)
if not error:
assert result.id.version == expected_version
assert error == expected_error
def test_compiler_flags_from_config_are_grouped():
compiler_entry = {
"spec": "intel@17.0.2",
@@ -851,30 +906,51 @@ def prepare_executable(name):
@pytest.mark.parametrize(
"compilers_extra_attributes,expected_length",
"detected_versions,expected_length",
[
# If we detect a C compiler we expect the result to be valid
({"c": "/usr/bin/clang-12", "cxx": "/usr/bin/clang-12"}, 1),
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cc",
path="/usr/bin/clang-12",
),
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
),
],
1,
),
# If we detect only a C++ compiler we expect the result to be discarded
({"cxx": "/usr/bin/clang-12"}, 0),
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
)
],
0,
),
],
)
def test_detection_requires_c_compiler(compilers_extra_attributes, expected_length):
def test_detection_requires_c_compiler(detected_versions, expected_length):
"""Tests that compilers automatically added to the configuration have
at least a C compiler.
"""
packages_yaml = {
"llvm": {
"externals": [
{
"spec": "clang@12.0.0",
"prefix": "/usr",
"extra_attributes": {"compilers": compilers_extra_attributes},
}
]
}
}
result = spack.compilers.CompilerConfigFactory.from_packages_yaml(packages_yaml)
result = spack.compilers.make_compiler_list(detected_versions)
assert len(result) == expected_length

View File

@@ -0,0 +1,471 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test detection of compiler version"""
import pytest
import spack.compilers.aocc
import spack.compilers.arm
import spack.compilers.cce
import spack.compilers.clang
import spack.compilers.fj
import spack.compilers.gcc
import spack.compilers.intel
import spack.compilers.nag
import spack.compilers.nvhpc
import spack.compilers.oneapi
import spack.compilers.pgi
import spack.compilers.xl
import spack.compilers.xl_r
import spack.util.module_cmd
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n"
"Target: aarch64--linux-gnu\n"
"Thread model: posix\n"
"InstalledDir:\n"
"/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
"19.0",
),
(
"Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n"
"Target: aarch64--linux-gnu\n"
"Thread model: posix\n"
"InstalledDir:\n"
"/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
"19.3.1",
),
],
)
def test_arm_version_detection(version_str, expected_version):
version = spack.compilers.arm.Arm.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
("Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n", "8.4.6"),
("Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
("Cray clang Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
("Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n", "8.4.6"),
],
)
def test_cce_version_detection(version_str, expected_version):
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("10191")
@pytest.mark.parametrize(
"version_str,expected_version",
[
# macOS clang
(
"Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
"Target: x86_64-apple-darwin18.7.0\n"
"Thread model: posix\n"
"InstalledDir: "
"/Applications/Xcode.app/Contents/Developer/Toolchains/"
"XcodeDefault.xctoolchain/usr/bin\n",
"11.0.0",
),
(
"Apple LLVM version 7.0.2 (clang-700.1.81)\n"
"Target: x86_64-apple-darwin15.2.0\n"
"Thread model: posix\n",
"7.0.2",
),
],
)
def test_apple_clang_version_detection(version_str, expected_version):
cls = spack.compilers.class_for_compiler_name("apple-clang")
version = cls.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("10191")
@pytest.mark.parametrize(
"version_str,expected_version",
[
# LLVM Clang
(
"clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"6.0.1",
),
(
"clang version 3.1 (trunk 149096)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.1",
),
(
"clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"8.0.0",
),
(
"clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n"
"Target: x86_64-pc-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"9.0.1",
),
(
"clang version 8.0.0-3 (tags/RELEASE_800/final)\n"
"Target: aarch64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"8.0.0",
),
(
"clang version 11.0.0\n"
"Target: aarch64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /usr/bin\n",
"11.0.0",
),
],
)
def test_clang_version_detection(version_str, expected_version):
version = spack.compilers.clang.Clang.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# C compiler
(
"fcc (FCC) 4.0.0a 20190314\n"
"simulating gcc version 6.1\n"
"Copyright FUJITSU LIMITED 2019",
"4.0.0a",
),
# C++ compiler
(
"FCC (FCC) 4.0.0a 20190314\n"
"simulating gcc version 6.1\n"
"Copyright FUJITSU LIMITED 2019",
"4.0.0a",
),
# Fortran compiler
("frt (FRT) 4.0.0a 20190314\n" "Copyright FUJITSU LIMITED 2019", "4.0.0a"),
],
)
def test_fj_version_detection(version_str, expected_version):
version = spack.compilers.fj.Fj.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# Output of -dumpversion changed to return only major from GCC 7
("4.4.7\n", "4.4.7"),
("7\n", "7"),
],
)
def test_gcc_version_detection(version_str, expected_version):
version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"icpc (ICC) 12.1.5 20120612\n"
"Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
"12.1.5",
),
(
"ifort (IFORT) 12.1.5 20120612\n"
"Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
"12.1.5",
),
],
)
def test_intel_version_detection(version_str, expected_version):
version = spack.compilers.intel.Intel.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
( # ICX/ICPX
"Intel(R) oneAPI DPC++ Compiler 2021.1.2 (2020.10.0.1214)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.1.2",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++ Compiler 2021.2.0 (2021.2.0.20210317)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.2.0",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++/C++ Compiler 2021.3.0 (2021.3.0.20210619)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.3.0",
),
( # ICX/ICPX
"Intel(R) oneAPI DPC++/C++ Compiler 2021.4.0 (2021.4.0.20210924)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n"
"InstalledDir: /made/up/path",
"2021.4.0",
),
( # IFX
"ifx (IFORT) 2021.1.2 Beta 20201214\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.1.2",
),
( # IFX
"ifx (IFORT) 2021.2.0 Beta 20210317\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.2.0",
),
( # IFX
"ifx (IFORT) 2021.3.0 Beta 20210619\n"
"Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
"2021.3.0",
),
( # IFX
"ifx (IFORT) 2021.4.0 Beta 20210924\n"
"Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
"2021.4.0",
),
( # IFX
"ifx (IFORT) 2022.0.0 20211123\n"
"Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
"2022.0.0",
),
( # IFX
"ifx (IFX) 2023.1.0 20230320\n"
"Copyright (C) 1985-2023 Intel Corporation. All rights reserved.",
"2023.1.0",
),
],
)
def test_oneapi_version_detection(version_str, expected_version):
version = spack.compilers.oneapi.Oneapi.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
(
"NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n"
"Product NPL6A60NA for x86-64 Linux\n",
"6.0.1037",
)
],
)
def test_nag_version_detection(version_str, expected_version):
version = spack.compilers.nag.Nag.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# C compiler on x86-64
(
"nvc 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on x86-64
(
"nvc++ 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on x86-64
(
"nvfortran 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C compiler on Power
(
"nvc 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on Power
(
"nvc++ 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on Power
(
"nvfortran 20.9-0 linuxpower target on Linuxpower\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C compiler on Arm
(
"nvc 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# C++ compiler on Arm
(
"nvc++ 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
# Fortran compiler on Arm
(
"nvfortran 20.9-0 linuxarm64 target on aarch64 Linux\n"
"NVIDIA Compilers and Tools\n"
"Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
"20.9",
),
],
)
def test_nvhpc_version_detection(version_str, expected_version):
version = spack.compilers.nvhpc.Nvhpc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# Output on x86-64
(
"pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n"
"The Portland Group - PGI Compilers and Tools\n"
"Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n",
"15.10",
),
# Output on PowerPC
(
"pgcc 17.4-0 linuxpower target on Linuxpower\n"
"PGI Compilers and Tools\n"
"Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n",
"17.4",
),
# Output when LLVM-enabled
(
"pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
"PGI Compilers and Tools\n"
"Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n",
"18.4",
),
],
)
def test_pgi_version_detection(version_str, expected_version):
version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
("IBM XL C/C++ for Linux, V11.1 (5724-X14)\n" "Version: 11.01.0000.0000\n", "11.1"),
("IBM XL Fortran for Linux, V13.1 (5724-X16)\n" "Version: 13.01.0000.0000\n", "13.1"),
("IBM XL C/C++ for AIX, V11.1 (5724-X13)\n" "Version: 11.01.0000.0009\n", "11.1"),
(
"IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n" "Version: 09.00.0000.0017\n",
"9.0",
),
],
)
def test_xl_version_detection(version_str, expected_version):
version = spack.compilers.xl.Xl.extract_version_from_output(version_str)
assert version == expected_version
version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.parametrize(
"version_str,expected_version",
[
# This applies to C,C++ and FORTRAN compiler
(
"AMD clang version 12.0.0 (CLANG: AOCC_3_1_0-Build#126 2021_06_07)"
"(based on LLVM Mirror.Version.12.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.1.0",
),
(
"AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)"
"(based on LLVM Mirror.Version.12.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"3.0.0",
),
(
"AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)"
"(based on LLVM Mirror.Version.11.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"2.3.0",
),
(
"AMD clang version 10.0.0 (CLANG: AOCC_2.2.0-Build#93 2020_06_25)"
"(based on LLVM Mirror.Version.10.0.0)\n"
"Target: x86_64-unknown-linux-gnu\n"
"Thread model: posix\n",
"2.2.0",
),
],
)
def test_aocc_version_detection(version_str, expected_version):
version = spack.compilers.aocc.Aocc.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression("33901")
@pytest.mark.parametrize(
"version_str",
[
(
"Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
"Target: x86_64-apple-darwin18.7.0\n"
"Thread model: posix\n"
"InstalledDir: "
"/Applications/Xcode.app/Contents/Developer/Toolchains/"
"XcodeDefault.xctoolchain/usr/bin\n"
),
(
"Apple LLVM version 7.0.2 (clang-700.1.81)\n"
"Target: x86_64-apple-darwin15.2.0\n"
"Thread model: posix\n"
),
],
)
def test_apple_clang_not_detected_as_cce(version_str):
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
assert version == "unknown"

View File

@@ -1003,7 +1003,7 @@ def temporary_store(tmpdir, request):
def mock_fetch(mock_archive, monkeypatch):
"""Fake the URL for a package so it downloads from a file."""
monkeypatch.setattr(
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(url=mock_archive.url)
spack.package_base.PackageBase, "fetcher", URLFetchStrategy(mock_archive.url)
)
@@ -1418,24 +1418,6 @@ def mock_git_repository(git, tmpdir_factory):
r1 = rev_hash(branch)
r1_file = branch_file
multiple_directories_branch = "many_dirs"
num_dirs = 3
num_files = 2
dir_files = []
for i in range(num_dirs):
for j in range(num_files):
dir_files.append(f"dir{i}/file{j}")
git("checkout", "-b", multiple_directories_branch)
for f in dir_files:
repodir.ensure(f, file=True)
git("add", f)
git("-c", "commit.gpgsign=false", "commit", "-m", "many_dirs add files")
# restore default
git("checkout", default_branch)
# Map of version -> bunch. Each bunch includes; all the args
# that must be specified as part of a version() declaration (used to
# manufacture a version for the 'git-test' package); the associated
@@ -1455,11 +1437,6 @@ def mock_git_repository(git, tmpdir_factory):
"default-no-per-version-git": Bunch(
revision=default_branch, file=r0_file, args={"branch": default_branch}
),
"many-directories": Bunch(
revision=multiple_directories_branch,
file=dir_files[0],
args={"git": url, "branch": multiple_directories_branch},
),
}
t = Bunch(

View File

@@ -27,7 +27,7 @@ def test_listing_possible_os():
assert expected_os in output
@pytest.mark.not_on_windows("test unsupported on Windows")
@pytest.mark.skipif(str(spack.platforms.host()) == "windows", reason="test unsupported on Windows")
@pytest.mark.maybeslow
@pytest.mark.requires_executables("git")
def test_bootstrap_phase(minimal_configuration, config_dumper, capsys):

View File

@@ -16,7 +16,6 @@
import spack
import spack.cmd
import spack.cmd.external
import spack.compilers
import spack.config
import spack.cray_manifest as cray_manifest

View File

@@ -1,10 +0,0 @@
<html>
<head>
This is the root page.
</head>
<body>
This is a page with a Vue javascript drop down with links as used in GitLab.
<div class="js-source-code-dropdown" data-css-class="" data-download-artifacts="[]" data-download-links="[{&quot;text&quot;:&quot;tar.gz&quot;,&quot;path&quot;:&quot;/foo-5.0.0.tar.gz&quot;}]"></div>
</body>
</html>

View File

@@ -3,21 +3,54 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import spack.config
import spack.error
import spack.fetch_strategy
import spack.stage
def test_gcsfetchstrategy_downloaded(tmp_path):
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_without_url(_fetch_method):
"""Ensure constructor with no URL fails."""
with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
spack.fetch_strategy.GCSFetchStrategy(None)
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = spack.fetch_strategy.GCSFetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
with spack.stage.Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
with pytest.raises(spack.error.FetchError):
fetcher.fetch()
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_downloaded(tmpdir, _fetch_method):
"""Ensure fetch with archive file already downloaded is a noop."""
archive = tmp_path / "gcs.tar.gz"
testpath = str(tmpdir)
archive = os.path.join(testpath, "gcs.tar.gz")
class Archived_GCSFS(spack.fetch_strategy.GCSFetchStrategy):
@property
def archive_file(self):
return str(archive)
with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = Archived_GCSFS(url="gs://example/gcs.tar.gz")
with spack.stage.Stage(fetcher, path=str(tmp_path)):
fetcher.fetch()
class Archived_GCSFS(spack.fetch_strategy.GCSFetchStrategy):
@property
def archive_file(self):
return archive
url = "gcs:///{0}".format(archive)
fetcher = Archived_GCSFS(url=url)
with spack.stage.Stage(fetcher, path=testpath):
fetcher.fetch()

View File

@@ -390,38 +390,3 @@ def submodules_callback(package):
assert not os.path.isfile(file_path)
file_path = os.path.join(s.package.stage.source_path, "third_party/submodule1/r0_file_1")
assert not os.path.isfile(file_path)
@pytest.mark.disable_clean_stage_check
def test_git_sparse_paths_partial_clone(
mock_git_repository, git_version, default_mock_concretization, mutable_mock_repo, monkeypatch
):
"""
Test partial clone of repository when using git_sparse_paths property
"""
type_of_test = "many-directories"
sparse_paths = ["dir0"]
omitted_paths = ["dir1", "dir2"]
t = mock_git_repository.checks[type_of_test]
args = copy.copy(t.args)
args["git_sparse_paths"] = sparse_paths
s = default_mock_concretization("git-test")
monkeypatch.setitem(s.package.versions, Version("git"), args)
s.package.do_stage()
with working_dir(s.package.stage.source_path):
# top level directory files are cloned via sparse-checkout
assert os.path.isfile("r0_file")
for p in sparse_paths:
assert os.path.isdir(p)
if git_version < Version("2.26.0.0"):
# older versions of git should fall back to a full clone
for p in omitted_paths:
assert os.path.isdir(p)
else:
for p in omitted_paths:
assert not os.path.isdir(p)
# fixture file is in the sparse-path expansion tree
assert os.path.isfile(t.file)

View File

@@ -610,9 +610,10 @@ def test_install_from_binary_with_missing_patch_succeeds(
temporary_store.db.add(s, directory_layout=temporary_store.layout, explicit=True)
# Push it to a binary cache
mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
with binary_distribution.make_uploader(mirror=mirror) as uploader:
uploader.push_or_raise([s])
build_cache = tmp_path / "my_build_cache"
binary_distribution.push_or_raise(
[s], out_url=build_cache.as_uri(), signing_key=None, force=False
)
# Now re-install it.
s.package.do_uninstall()
@@ -623,7 +624,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
s.package.do_install()
# Binary install: succeeds, we don't need the patch.
spack.mirror.add(mirror)
spack.mirror.add(spack.mirror.Mirror.from_local_path(str(build_cache)))
s.package.do_install(package_cache_only=True, dependencies_cache_only=True, unsigned=True)
assert temporary_store.db.query_local_by_spec_hash(s.dag_hash())

View File

@@ -493,13 +493,11 @@ def fake_package_list(compiler, architecture, pkgs):
def test_bootstrapping_compilers_with_different_names_from_spec(
install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host
):
"""Tests that, when we bootstrap '%oneapi' we can translate it to the
'intel-oneapi-compilers' package.
"""
with spack.config.override("config:install_missing_compilers", True):
with spack.concretize.disable_compiler_existence_check():
spec = spack.spec.Spec("trivial-install-test-package%oneapi@=22.2.0").concretized()
spec.package.do_install()
assert (
spack.spec.CompilerSpec("oneapi@=22.2.0") in spack.compilers.all_compiler_specs()
)
@@ -584,7 +582,7 @@ def test_clear_failures_success(tmpdir):
assert os.path.isfile(failures.locker.lock_path)
@pytest.mark.not_on_windows("chmod does not prevent removal on Win")
@pytest.mark.xfail(sys.platform == "win32", reason="chmod does not prevent removal on Win")
def test_clear_failures_errs(tmpdir, capsys):
"""Test the clear_failures exception paths."""
failures = spack.database.FailureTracker(str(tmpdir), default_timeout=0.1)
@@ -751,6 +749,29 @@ def test_install_task_use_cache(install_mockery, monkeypatch):
assert request.pkg_id in installer.installed
def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
config_msg = "mock add_compilers_to_config"
def _add(_compilers):
tty.msg(config_msg)
installer = create_installer(["pkg-a"], {})
task = create_build_task(installer.build_requests[0].pkg)
task.compiler = True
# Preclude any meaningful side-effects
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _true)
monkeypatch.setattr(inst.PackageInstaller, "_setup_install_dir", _noop)
monkeypatch.setattr(spack.build_environment, "start_build_process", _noop)
monkeypatch.setattr(spack.database.Database, "add", _noop)
monkeypatch.setattr(spack.compilers, "add_compilers_to_config", _add)
installer._install_task(task, None)
out = capfd.readouterr()[0]
assert config_msg in out
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
"""Test _release_lock for supposed write lock with exception."""
installer = create_installer(["trivial-install-test-package"], {})

View File

@@ -274,7 +274,7 @@ def test_symlinks_false(self, stage):
assert not os.path.islink("dest/2")
check_added_exe_permissions("source/2", "dest/2")
@pytest.mark.not_on_windows("Broken symlinks not allowed on Windows")
@pytest.mark.skipif(sys.platform == "win32", reason="Broken symlinks not allowed on Windows")
def test_allow_broken_symlinks(self, stage):
"""Test installing with a broken symlink."""
with fs.working_dir(str(stage)):

View File

@@ -4,13 +4,19 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import contextlib
import multiprocessing
import os
import signal
import sys
import time
from types import ModuleType
from typing import Optional
import pytest
import llnl.util.lang as lang
import llnl.util.tty.log as log
import llnl.util.tty.pty as pty
from spack.util.executable import which
@@ -167,3 +173,342 @@ def test_log_subproc_and_echo_output_capfd(capfd, tmpdir):
print("logged")
assert capfd.readouterr()[0] == "echo\n"
#
# Tests below use a pseudoterminal to test llnl.util.tty.log
#
def simple_logger(**kwargs):
"""Mock logger (minion) process for testing log.keyboard_input."""
running = [True]
def handler(signum, frame):
running[0] = False
signal.signal(signal.SIGUSR1, handler)
log_path = kwargs["log_path"]
with log.log_output(log_path):
while running[0]:
print("line")
time.sleep(1e-3)
def mock_shell_fg(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_enabled()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_fg_no_termios(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_disabled_fg()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_bg(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_tstp_cont(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
ctl.cont()
ctl.wait_running()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_tstp_tstp_cont(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
ctl.tstp()
ctl.wait_stopped()
ctl.cont()
ctl.wait_running()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_tstp_tstp_cont_cont(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.tstp()
ctl.wait_stopped()
ctl.tstp()
ctl.wait_stopped()
ctl.cont()
ctl.wait_running()
ctl.cont()
ctl.wait_running()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_bg_fg(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
ctl.fg()
ctl.status()
ctl.wait_enabled()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_bg_fg_no_termios(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.bg()
ctl.status()
ctl.wait_disabled()
ctl.fg()
ctl.status()
ctl.wait_disabled_fg()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_fg_bg(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_enabled()
ctl.bg()
ctl.status()
ctl.wait_disabled()
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_fg_bg_no_termios(proc, ctl, **kwargs):
"""PseudoShell controller function for test_foreground_background."""
ctl.fg()
ctl.status()
ctl.wait_disabled_fg()
ctl.bg()
ctl.status()
ctl.wait_disabled()
os.kill(proc.pid, signal.SIGUSR1)
@contextlib.contextmanager
def no_termios():
saved = log.termios
log.termios = None
try:
yield
finally:
log.termios = saved
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
@pytest.mark.skipif(not termios, reason="requires termios support")
@pytest.mark.parametrize(
"test_fn,termios_on_or_off",
[
# tests with termios
(mock_shell_fg, lang.nullcontext),
(mock_shell_bg, lang.nullcontext),
(mock_shell_bg_fg, lang.nullcontext),
(mock_shell_fg_bg, lang.nullcontext),
(mock_shell_tstp_cont, lang.nullcontext),
(mock_shell_tstp_tstp_cont, lang.nullcontext),
(mock_shell_tstp_tstp_cont_cont, lang.nullcontext),
# tests without termios
(mock_shell_fg_no_termios, no_termios),
(mock_shell_bg, no_termios),
(mock_shell_bg_fg_no_termios, no_termios),
(mock_shell_fg_bg_no_termios, no_termios),
(mock_shell_tstp_cont, no_termios),
(mock_shell_tstp_tstp_cont, no_termios),
(mock_shell_tstp_tstp_cont_cont, no_termios),
],
)
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
"""Functional tests for foregrounding and backgrounding a logged process.
This ensures that things like SIGTTOU are not raised and that
terminal settings are corrected on foreground/background and on
process stop and start.
"""
shell = pty.PseudoShell(test_fn, simple_logger)
log_path = str(tmpdir.join("log.txt"))
# run the shell test
with termios_on_or_off():
shell.start(log_path=log_path, debug=True)
exitcode = shell.join()
# processes completed successfully
assert exitcode == 0
# assert log was created
assert os.path.exists(log_path)
def synchronized_logger(**kwargs):
"""Mock logger (minion) process for testing log.keyboard_input.
This logger synchronizes with the parent process to test that 'v' can
toggle output. It is used in ``test_foreground_background_output`` below.
"""
running = [True]
def handler(signum, frame):
running[0] = False
signal.signal(signal.SIGUSR1, handler)
log_path = kwargs["log_path"]
write_lock = kwargs["write_lock"]
v_lock = kwargs["v_lock"]
sys.stderr.write(os.getcwd() + "\n")
with log.log_output(log_path) as logger:
with logger.force_echo():
print("forced output")
while running[0]:
with write_lock:
if v_lock.acquire(False): # non-blocking acquire
print("off")
v_lock.release()
else:
print("on") # lock held; v is toggled on
time.sleep(1e-2)
def mock_shell_v_v(proc, ctl, **kwargs):
"""Controller function for test_foreground_background_output."""
write_lock = kwargs["write_lock"]
v_lock = kwargs["v_lock"]
ctl.fg()
ctl.wait_enabled()
time.sleep(0.1)
write_lock.acquire() # suspend writing
v_lock.acquire() # enable v lock
ctl.write(b"v") # toggle v on stdin
time.sleep(0.1)
write_lock.release() # resume writing
time.sleep(0.1)
write_lock.acquire() # suspend writing
ctl.write(b"v") # toggle v on stdin
time.sleep(0.1)
v_lock.release() # disable v lock
write_lock.release() # resume writing
time.sleep(0.1)
os.kill(proc.pid, signal.SIGUSR1)
def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
"""Controller function for test_foreground_background_output."""
write_lock = kwargs["write_lock"]
v_lock = kwargs["v_lock"]
ctl.fg()
ctl.wait_disabled_fg()
time.sleep(0.1)
write_lock.acquire() # suspend writing
v_lock.acquire() # enable v lock
ctl.write(b"v\n") # toggle v on stdin
time.sleep(0.1)
write_lock.release() # resume writing
time.sleep(0.1)
write_lock.acquire() # suspend writing
ctl.write(b"v\n") # toggle v on stdin
time.sleep(0.1)
v_lock.release() # disable v lock
write_lock.release() # resume writing
time.sleep(0.1)
os.kill(proc.pid, signal.SIGUSR1)
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
@pytest.mark.skipif(not termios, reason="requires termios support")
@pytest.mark.parametrize(
"test_fn,termios_on_or_off",
[(mock_shell_v_v, lang.nullcontext), (mock_shell_v_v_no_termios, no_termios)],
)
@pytest.mark.xfail(reason="Fails almost consistently when run with coverage and xdist")
def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
"""Tests hitting 'v' toggles output, and that force_echo works."""
if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
return
shell = pty.PseudoShell(test_fn, synchronized_logger)
log_path = str(tmpdir.join("log.txt"))
# Locks for synchronizing with minion
write_lock = multiprocessing.Lock() # must be held by minion to write
v_lock = multiprocessing.Lock() # held while controller is in v mode
with termios_on_or_off():
shell.start(write_lock=write_lock, v_lock=v_lock, debug=True, log_path=log_path)
exitcode = shell.join()
out, err = capfd.readouterr()
print(err) # will be shown if something goes wrong
print(out)
# processes completed successfully
assert exitcode == 0
# split output into lines
output = out.strip().split("\n")
# also get lines of log file
assert os.path.exists(log_path)
with open(log_path) as logfile:
log_data = logfile.read().strip().split("\n")
# Controller and minion process coordinate with locks such that the
# minion writes "off" when echo is off, and "on" when echo is on. The
# output should contain mostly "on" lines, but may contain "off"
# lines if the controller is slow. The important thing to observe
# here is that we started seeing 'on' in the end.
assert ["forced output", "on"] == lang.uniq(output) or [
"forced output",
"off",
"on",
] == lang.uniq(output)
# log should be off for a while, then on, then off
assert ["forced output", "off", "on", "off"] == lang.uniq(log_data) and log_data.count(
"off"
) > 2 # ensure some "off" lines were omitted

View File

@@ -9,13 +9,16 @@
This just tests whether the right args are getting passed to make.
"""
import os
import sys
import pytest
from spack.build_environment import MakeExecutable
from spack.util.environment import path_put_first
pytestmark = pytest.mark.not_on_windows("MakeExecutable not supported on Windows")
pytestmark = pytest.mark.skipif(
sys.platform == "win32", reason="MakeExecutable not supported on Windows"
)
@pytest.fixture(autouse=True)

View File

@@ -10,21 +10,20 @@
from llnl.util.symlink import resolve_link_target_relative_to_the_link
import spack.caches
import spack.config
import spack.fetch_strategy
import spack.mirror
import spack.patch
import spack.repo
import spack.stage
import spack.util.executable
import spack.util.spack_json as sjson
import spack.util.url as url_util
from spack.spec import Spec
from spack.stage import Stage
from spack.util.executable import which
from spack.util.spack_yaml import SpackYAMLError
pytestmark = [pytest.mark.usefixtures("mutable_config", "mutable_mock_repo")]
pytestmark = [
pytest.mark.not_on_windows("does not run on windows"),
pytest.mark.usefixtures("mutable_config", "mutable_mock_repo"),
]
# paths in repos that shouldn't be in the mirror tarballs.
exclude = [".hg", ".git", ".svn"]
@@ -52,7 +51,7 @@ def set_up_package(name, repository, url_attr):
def check_mirror():
with spack.stage.Stage("spack-mirror-test") as stage:
with Stage("spack-mirror-test") as stage:
mirror_root = os.path.join(stage.path, "test-mirror")
# register mirror with spack config
mirrors = {"spack-mirror-test": url_util.path_to_file_url(mirror_root)}
@@ -67,8 +66,8 @@ def check_mirror():
for spec in specs:
fetcher = spec.package.fetcher
per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
mirror_layout = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
expected_path = os.path.join(mirror_root, mirror_layout.path)
mirror_paths = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
expected_path = os.path.join(mirror_root, mirror_paths.storage_path)
assert os.path.exists(expected_path)
# Now try to fetch each package.
@@ -203,12 +202,14 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
def test_mirror_archive_paths_no_version(mock_packages, mock_archive):
spec = Spec("trivial-install-test-package@=nonexistingversion").concretized()
fetcher = spack.fetch_strategy.URLFetchStrategy(url=mock_archive.url)
spack.mirror.default_mirror_layout(fetcher, "per-package-ref", spec)
fetcher = spack.fetch_strategy.URLFetchStrategy(mock_archive.url)
spack.mirror.mirror_archive_paths(fetcher, "per-package-ref", spec)
def test_mirror_with_url_patches(mock_packages, monkeypatch):
spec = Spec("patch-several-dependencies").concretized()
spec = Spec("patch-several-dependencies")
spec.concretize()
files_cached_in_mirror = set()
def record_store(_class, fetcher, relative_dst, cosmetic_path=None):
@@ -227,25 +228,30 @@ def successful_expand(_class):
def successful_apply(*args, **kwargs):
pass
def successful_make_alias(*args, **kwargs):
def successful_symlink(*args, **kwargs):
pass
with spack.stage.Stage("spack-mirror-test") as stage:
with Stage("spack-mirror-test") as stage:
mirror_root = os.path.join(stage.path, "test-mirror")
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "fetch", successful_fetch)
monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
monkeypatch.setattr(spack.mirror.DefaultLayout, "make_alias", successful_make_alias)
monkeypatch.setattr(spack.caches.MirrorCache, "symlink", successful_symlink)
with spack.config.override("config:checksum", False):
spack.mirror.create(mirror_root, list(spec.traverse()))
assert {
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz",
}.issubset(files_cached_in_mirror)
assert not (
set(
[
"abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz",
]
)
- files_cached_in_mirror
)
class MockFetcher:
@@ -260,21 +266,23 @@ def archive(dst):
@pytest.mark.regression("14067")
def test_mirror_layout_make_alias(tmpdir):
def test_mirror_cache_symlinks(tmpdir):
"""Confirm that the cosmetic symlink created in the mirror cache (which may
be relative) targets the storage path correctly.
"""
alias = os.path.join("zlib", "zlib-1.2.11.tar.gz")
path = os.path.join("_source-cache", "archive", "c3", "c3e5.tar.gz")
cache = spack.caches.MirrorCache(root=str(tmpdir), skip_unstable_versions=False)
layout = spack.mirror.DefaultLayout(alias, path)
cosmetic_path = "zlib/zlib-1.2.11.tar.gz"
global_path = "_source-cache/archive/c3/c3e5.tar.gz"
cache = spack.caches.MirrorCache(str(tmpdir), False)
reference = spack.mirror.MirrorReference(cosmetic_path, global_path)
cache.store(MockFetcher(), layout.path)
layout.make_alias(cache.root)
cache.store(MockFetcher(), reference.storage_path)
cache.symlink(reference)
link_target = resolve_link_target_relative_to_the_link(os.path.join(cache.root, layout.alias))
link_target = resolve_link_target_relative_to_the_link(
os.path.join(cache.root, reference.cosmetic_path)
)
assert os.path.exists(link_target)
assert os.path.normpath(link_target) == os.path.join(cache.root, layout.path)
assert os.path.normpath(link_target) == os.path.join(cache.root, reference.storage_path)
@pytest.mark.regression("31627")

View File

@@ -11,7 +11,6 @@
import os
import pathlib
import re
import urllib.error
from contextlib import contextmanager
import pytest
@@ -294,10 +293,7 @@ def test_uploading_with_base_image_in_docker_image_manifest_v2_format(
def test_best_effort_upload(mutable_database: spack.database.Database, monkeypatch):
"""Failure to upload a blob or manifest should not prevent others from being uploaded -- it
should be a best-effort operation. If any runtime dep fails to upload, it results in a missing
layer for dependents. But we do still create manifests for dependents, so that the build cache
is maximally useful. (The downside is that container images are not runnable)."""
"""Failure to upload a blob or manifest should not prevent others from being uploaded"""
_push_blob = spack.binary_distribution._oci_push_pkg_blob
_push_manifest = spack.binary_distribution._oci_put_manifest
@@ -319,51 +315,32 @@ def put_manifest(base_images, checksums, image_ref, tmpdir, extra_config, annota
monkeypatch.setattr(spack.binary_distribution, "_oci_push_pkg_blob", push_blob)
monkeypatch.setattr(spack.binary_distribution, "_oci_put_manifest", put_manifest)
mirror("add", "oci-test", "oci://example.com/image")
registry = InMemoryOCIRegistry("example.com")
image = ImageReference.from_string("example.com/image")
with oci_servers(registry):
with pytest.raises(spack.error.SpackError, match="The following 2 errors occurred") as e:
mirror("add", "oci-test", "oci://example.com/image")
with pytest.raises(spack.error.SpackError, match="The following 4 errors occurred") as e:
buildcache("push", "--update-index", "oci-test", "mpileaks^mpich")
# mpich's blob failed to upload and libdwarf's manifest failed to upload
assert re.search("mpich.+: Exception: Blob Server Error", e.value)
assert re.search("libdwarf.+: Exception: Manifest Server Error", e.value)
error = str(e.value)
mpileaks: spack.spec.Spec = mutable_database.query_local("mpileaks^mpich")[0]
# mpich's blob failed to upload
assert re.search("mpich.+: Exception: Blob Server Error", error)
without_manifest = ("mpich", "libdwarf")
# libdwarf's manifest failed to upload
assert re.search("libdwarf.+: Exception: Manifest Server Error", error)
# Verify that manifests of mpich/libdwarf are missing due to upload failure.
for name in without_manifest:
tagged_img = image.with_tag(default_tag(mpileaks[name]))
with pytest.raises(urllib.error.HTTPError, match="404"):
get_manifest_and_config(tagged_img)
# since there is no blob for mpich, runtime dependents cannot refer to it in their
# manifests, which is a transitive error.
assert re.search("callpath.+: MissingLayerError: missing layer for mpich", error)
assert re.search("mpileaks.+: MissingLayerError: missing layer for mpich", error)
# Collect the layer digests of successfully uploaded packages. Every package should refer
# to its own tarballs and those of its runtime deps that were uploaded.
pkg_to_all_digests = {}
pkg_to_own_digest = {}
for s in mpileaks.traverse():
if s.name in without_manifest:
continue
# This should not raise a 404.
manifest, _ = get_manifest_and_config(image.with_tag(default_tag(s)))
mpileaks: spack.spec.Spec = mutable_database.query_local("mpileaks^mpich")[0]
# Collect layer digests
pkg_to_all_digests[s.name] = {layer["digest"] for layer in manifest["layers"]}
pkg_to_own_digest[s.name] = manifest["layers"][-1]["digest"]
# ensure that packages not affected by errors were uploaded still.
uploaded_tags = {tag for _, tag in registry.manifests.keys()}
failures = {"mpich", "libdwarf", "callpath", "mpileaks"}
expected_tags = {default_tag(s) for s in mpileaks.traverse() if s.name not in failures}
# Verify that all packages reference blobs of their runtime deps that uploaded fine.
for s in mpileaks.traverse():
if s.name in without_manifest:
continue
expected_digests = {
pkg_to_own_digest[t.name]
for t in s.traverse(deptype=("link", "run"), root=True)
if t.name not in without_manifest
}
# Test with issubset, cause we don't have the blob of libdwarf as it has no manifest.
assert expected_digests and expected_digests.issubset(pkg_to_all_digests[s.name])
assert expected_tags
assert uploaded_tags == expected_tags

View File

@@ -259,7 +259,6 @@ def test_git_url_top_level_git_versions(version_str, tag, commit, branch):
assert fetcher.tag == tag
assert fetcher.commit == commit
assert fetcher.branch == branch
assert fetcher.url == pkg_factory("git-url-top-level").git
@pytest.mark.usefixtures("mock_packages", "config")
@@ -320,14 +319,3 @@ def test_package_deprecated_version(mock_packages, mock_fetch, mock_stage):
assert spack.package_base.deprecated_version(pkg_cls, "1.1.0")
assert not spack.package_base.deprecated_version(pkg_cls, "1.0.0")
def test_package_can_have_sparse_checkout_properties(mock_packages, mock_fetch, mock_stage):
spec = Spec("git-sparsepaths-pkg")
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
assert hasattr(pkg_cls, "git_sparse_paths")
fetcher = spack.fetch_strategy.for_package_version(pkg_cls(spec), "1.0")
assert isinstance(fetcher, spack.fetch_strategy.GitFetchStrategy)
assert hasattr(fetcher, "git_sparse_paths")
assert fetcher.git_sparse_paths == pkg_cls.git_sparse_paths

View File

@@ -48,7 +48,7 @@
def test_buildcache(mock_archive, tmp_path, monkeypatch, mutable_config):
# Install a test package
spec = Spec("trivial-install-test-package").concretized()
monkeypatch.setattr(spec.package, "fetcher", URLFetchStrategy(url=mock_archive.url))
monkeypatch.setattr(spec.package, "fetcher", URLFetchStrategy(mock_archive.url))
spec.package.do_install()
pkghash = "/" + str(spec.dag_hash(7))

View File

@@ -3,19 +3,54 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import spack.config as spack_config
import spack.error
import spack.fetch_strategy as spack_fs
import spack.stage as spack_stage
def test_s3fetchstrategy_downloaded(tmp_path):
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_sans_url(_fetch_method):
"""Ensure constructor with no URL fails."""
with spack_config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
spack_fs.S3FetchStrategy(None)
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
with spack_config.override("config:url_fetch_method", _fetch_method):
fetcher = spack_fs.S3FetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
with spack_stage.Stage(fetcher, path=testpath) as stage:
assert stage is not None
assert fetcher.archive_file is None
with pytest.raises(spack.error.FetchError):
fetcher.fetch()
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_downloaded(tmpdir, _fetch_method):
"""Ensure fetch with archive file already downloaded is a noop."""
archive = tmp_path / "s3.tar.gz"
testpath = str(tmpdir)
archive = os.path.join(testpath, "s3.tar.gz")
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
return archive
with spack_config.override("config:url_fetch_method", _fetch_method):
fetcher = Archived_S3FS(url="s3://example/s3.tar.gz")
with spack_stage.Stage(fetcher, path=str(tmp_path)):
fetcher.fetch()
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
return archive
url = "s3:///{0}".format(archive)
fetcher = Archived_S3FS(url=url)
with spack_stage.Stage(fetcher, path=testpath):
fetcher.fetch()

Some files were not shown because too many files have changed in this diff Show More