Compare commits

..

3 Commits

Author SHA1 Message Date
Gregory Becker
2cc0e533c0 add test for incompatible version from specific package 2025-02-10 20:41:37 -08:00
Gregory Becker
7a8eaa1657 add test for incompatible repos 2025-02-10 18:04:42 -08:00
Gregory Becker
d8baa193b3 repo compat: initial draft 2025-02-10 15:08:51 -08:00
203 changed files with 1672 additions and 1872 deletions

View File

@@ -39,7 +39,7 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter

View File

@@ -63,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -344,6 +344,26 @@ def close(self):
self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs):
"""Context manager that logs its output to a file.
@@ -427,6 +447,7 @@ def __init__(
self.echo = echo
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@@ -498,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails
pass
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
with replace_environment(self.env):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
finally:
if input_fd:
@@ -707,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
@@ -764,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close()
self._active = True
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
with replace_environment(self.env):
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):

View File

@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
if not spec.virtual or not spec.variants:
return
error = error_cls(
f"{pkg_name}: {msg}",

View File

@@ -2529,10 +2529,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies
"""
# Early termination
if spec.external or not spec.concrete:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.installed and not force:
elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return

View File

@@ -16,7 +16,6 @@
from typing import Callable, Dict, List, Set
from urllib.request import Request
import llnl.path
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@@ -84,9 +83,6 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
# git returns posix paths always, normalize input to be comptaible
# with that
env_path = llnl.path.convert_to_posix_path(env_path)
git = spack.util.git.git()
if git:
with fs.working_dir(spack.paths.prefix):

View File

@@ -9,9 +9,9 @@
import spack.cmd
import spack.environment as ev
import spack.package_base
import spack.store
from spack.cmd.common import arguments
from spack.solver.input_analysis import create_graph_analyzer
description = "show dependencies of a package"
section = "basic"
@@ -68,17 +68,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
dependencies = spack.package_base.possible_dependencies(
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
allowed_deps=args.deptype,
depflag=args.deptype,
)
if not args.expand_virtuals:
dependencies.update(virtuals)
if spec.name in dependencies:
dependencies.remove(spec.name)
del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))

View File

@@ -125,7 +125,7 @@ def develop(parser, args):
version = spec.versions.concrete_range_as_version
if not version:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])

View File

@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(

View File

@@ -41,11 +41,7 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites
non_virtual = [
str(s)
for s in specs
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
]
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
if non_virtual:
msg = "non-virtual specs cannot be part of the query "
msg += "[{0}]\n".format(", ".join(non_virtual))

View File

@@ -6,7 +6,7 @@
import os
import re
import sys
from itertools import islice, zip_longest
from itertools import zip_longest
from typing import Dict, List, Optional
import llnl.util.tty as tty
@@ -423,8 +423,7 @@ def _run_import_check(
continue
for m in is_abs_import.finditer(contents):
# Find at most two occurences: the first is the import itself, the second is its usage.
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
if contents.count(m.group(1)) == 1:
to_remove.append(m.group(0))
exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
@@ -439,7 +438,7 @@ def _run_import_check(
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1

View File

@@ -252,9 +252,7 @@ def has_test_and_tags(pkg_class):
hashes = env.all_hashes() if env else None
specs = spack.store.STORE.db.query(hashes=hashes)
specs = list(
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
)
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
spack.cmd.display_specs(specs, long=True)

View File

@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
opt, i, answer = min(result.answers)
name = spec.name
# TODO: Consolidate this code with similar code in solve.py
if spack.repo.PATH.is_virtual(spec.name):
if spec.virtual:
providers = [s.name for s in answer.values() if s.package.provides(name)]
name = providers[0]

View File

@@ -41,8 +41,6 @@
Union,
)
import spack.repo
try:
import uuid
@@ -1558,12 +1556,7 @@ def _query(
# If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual.
if (
not results
and query_spec is not None
and deferred
and spack.repo.PATH.is_virtual(query_spec.name)
):
if not results and query_spec is not None and deferred and query_spec.virtual:
results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results

View File

@@ -259,8 +259,6 @@ def detect_specs(
)
return []
from spack.repo import PATH as repo_path
result = []
for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths)
@@ -307,10 +305,7 @@ def detect_specs(
resolved_specs[spec] = candidate_path
try:
# Validate the spec calling a package specific method
pkg_cls = repo_path.get_pkg_class(spec.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(spec, spec.extra_attributes)
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '

View File

@@ -42,10 +42,10 @@
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.spec
import spack.tengine
import spack.traverse
from spack.solver.input_analysis import create_graph_analyzer
def find(seq, predicate):
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
def _static_edges(specs, depflag):
for spec in specs:
*_, edges = create_graph_analyzer().possible_dependencies(
spec.name, expand_virtuals=True, allowed_deps=depflag
)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
for parent_name, dependencies in edges.items():
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),

View File

@@ -566,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
# copy test data into test stage data dir
try:
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
pkg_cls = test_spec.package_class
except spack.repo.UnknownPackageError:
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
return
@@ -623,7 +623,7 @@ def test_functions(
vpkgs = virtuals(pkg)
for vname in vpkgs:
try:
classes.append(spack.repo.PATH.get_pkg_class(vname))
classes.append((Spec(vname)).package_class)
except spack.repo.UnknownPackageError:
tty.debug(f"{vname}: virtual does not appear to have a package file")
@@ -668,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
# grab test functions associated with the spec, which may be virtual
try:
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
tests = test_functions(spec.package_class)
except spack.repo.UnknownPackageError:
# Some virtuals don't have a package so we don't want to report
# them as not having tests when that isn't appropriate.

View File

@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
log_contextmanager = log_output(
log_file, self.echo, True, filter_fn=self.filter_fn
log_file,
self.echo,
True,
env=self.unmodified_env,
filter_fn=self.filter_fn,
)
with log_contextmanager as logger:

View File

@@ -209,7 +209,7 @@ def provides(self):
# All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens:
if self.spec.package.provides(x):
provides[x] = self.spec
provides[x] = self.spec[x]
return provides
@property

View File

@@ -22,6 +22,7 @@
import textwrap
import time
import traceback
import typing
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
@@ -30,6 +31,7 @@
import llnl.util.tty as tty
from llnl.util.lang import classproperty, memoized
import spack
import spack.compilers
import spack.config
import spack.dependency
@@ -59,6 +61,7 @@
from spack.util.package_hash import package_hash
from spack.util.typing import SupportsRichComparison
from spack.version import GitVersion, StandardVersion
from spack.version import ver as version_from_str
FLAG_HANDLER_RETURN_TYPE = Tuple[
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
@@ -709,6 +712,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Do not include @ here in order not to unnecessarily ping the users.
maintainers: List[str] = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
#: It is used to ensure a compiler and build dependencies like 'cmake'
#: are available to build a custom test code.
@@ -717,10 +733,22 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: TestSuite instance used to manage stand-alone tests for 1+ specs.
test_suite: Optional[Any] = None
#: compatibility requirements with Spack
#: if value is ``None``, requirements from repo are still applied
required_spack_version = None
def __init__(self, spec):
# this determines how the package should be built.
self.spec: spack.spec.Spec = spec
# is this package more restrictive in compatibility than the repo is
if self.required_spack_version:
spack_version = version_from_str(spack.spack_version)
required_version = version_from_str(self.required_spack_version)
if not spack_version.satisfies(required_version):
msg = f"Package {self.name} requires Spack version {self.required_spack_version}."
raise PackageError(msg)
# Allow custom staging paths for packages
self.path = None
@@ -808,6 +836,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
except StopIteration:
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Return dict of possible dependencies of this package.
Args:
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
Returns:
(dict): dictionary mapping dependency names to *their*
immediate dependencies
Each item in the returned dictionary maps a (potentially
transitive) dependency of this package to its possible
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
"""
visited = {} if visited is None else visited
missing = {} if missing is None else missing
visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies_by_name(when=True).items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for deplist in conditions.values():
for dep in deplist:
depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
if spack.repo.PATH.is_virtual(name):
if virtuals is not None:
virtuals.add(name)
if expand_virtuals:
providers = spack.repo.PATH.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
dep_names = [name]
# add the dependency names to the visited dict
visited.setdefault(cls.name, set()).update(set(dep_names))
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
)
return visited
@classproperty
def package_dir(cls):
"""Directory where the package.py file lives."""
@@ -2172,6 +2298,55 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc"
os.environ["CXX"] = "CC"
os.environ["FC"] = "ftn"
os.environ["F77"] = "ftn"
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
packages.append(pos)
continue
if not isinstance(pos, spack.spec.Spec):
pos = spack.spec.Spec(pos)
if spack.repo.PATH.is_virtual(pos.name):
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.

View File

@@ -406,8 +406,8 @@ def fixup_macos_rpaths(spec):
entries which makes it harder to adjust with ``install_name_tool
-delete_rpath``.
"""
if spec.external or not spec.concrete:
tty.warn("external/abstract spec cannot be fixed up: {0!s}".format(spec))
if spec.external or spec.virtual:
tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
return False
if "platform=darwin" not in spec:

View File

@@ -32,6 +32,7 @@
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.caches
import spack.config
import spack.error
@@ -40,6 +41,7 @@
import spack.spec
import spack.tag
import spack.tengine
import spack.version
import spack.util.file_cache
import spack.util.git
import spack.util.naming as nm
@@ -49,6 +51,11 @@
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
ROOT_PYTHON_NAMESPACE = "spack.pkg"
_required_repo_version = "0:"
#: Version of the repo interface that this version of Spack is compatible with
required_repo_version = spack.version.ver(_required_repo_version)
def python_package_for_repo(namespace):
"""Returns the full namespace of a repository, given its relative one
@@ -951,7 +958,7 @@ def check(condition, msg):
self.config_file = os.path.join(self.root, repo_config_name)
check(os.path.isfile(self.config_file), f"No {repo_config_name} found in '{root}'")
# Read configuration and validate namespace
# Read configuration and validate
config = self._read_config()
check(
"namespace" in config,
@@ -965,6 +972,19 @@ def check(condition, msg):
"Namespaces must be valid python identifiers separated by '.'",
)
required_version = spack.version.ver(config.get("required_spack_version", ":"))
spack_version = spack.version.ver(spack.spack_version)
check(
spack_version.satisfies(required_version),
f"Repo {self.namespace} requires Spack version {required_version}",
)
repo_version = spack.version.ver(config.get("version", "0"))
check(
repo_version.satisfies(required_repo_version),
f"Spack requires repo version {required_repo_version}",
)
# Set up 'full_namespace' to include the super-namespace
self.full_namespace = python_package_for_repo(self.namespace)

View File

@@ -87,7 +87,6 @@
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"static_analysis": {"type": "boolean"},
"timeout": {"type": "integer", "minimum": 0},
"error_on_timeout": {"type": "boolean"},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},

View File

@@ -6,7 +6,6 @@
import copy
import enum
import functools
import io
import itertools
import os
import pathlib
@@ -63,7 +62,7 @@
parse_files,
parse_term,
)
from .input_analysis import create_counter, create_graph_analyzer
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
from .requirements import RequirementKind, RequirementParser, RequirementRule
from .version_order import concretization_version_order
@@ -74,19 +73,17 @@
#: Enable the addition of a runtime node
WITH_RUNTIME = sys.platform != "win32"
class OutputConfiguration(NamedTuple):
"""Data class that contains configuration on what a clingo solve should output."""
#: Print out coarse timers for different solve phases
timers: bool
#: Whether to output Clingo's internal solver statistics
stats: bool
#: Optional output stream for the generated ASP program
out: Optional[io.IOBase]
#: If True, stop after setup and don't solve
setup_only: bool
#: Data class that contain configuration on what a
#: clingo solve should output.
#:
#: Args:
#: timers (bool): Print out coarse timers for different solve phases.
#: stats (bool): Whether to output Clingo's internal solver statistics.
#: out: Optional output stream for the generated ASP program.
#: setup_only (bool): if True, stop after setup and don't solve (default False).
OutputConfiguration = collections.namedtuple(
"OutputConfiguration", ["timers", "stats", "out", "setup_only"]
)
#: Default output configuration for a solve
DEFAULT_OUTPUT_CONFIGURATION = OutputConfiguration(
@@ -274,6 +271,15 @@ def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunc
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests)
return NoDuplicatesCounter(specs, tests=tests)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
@@ -501,7 +507,7 @@ def _compute_specs_from_answer_set(self):
# The specs must be unified to get here, so it is safe to associate any satisfying spec
# with the input. Multiple inputs may be matched to the same concrete spec
node = SpecBuilder.make_node(pkg=input_spec.name)
if spack.repo.PATH.is_virtual(input_spec.name):
if input_spec.virtual:
providers = [
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
]
@@ -1115,8 +1121,6 @@ class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve."""
def __init__(self, tests: bool = False):
self.possible_graph = create_graph_analyzer()
# these are all initialized in setup()
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
self.requirement_parser = RequirementParser(spack.config.CONFIG)
@@ -2083,11 +2087,7 @@ def _spec_clauses(
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
if spec.name:
clauses.append(
f.node(spec.name)
if not spack.repo.PATH.is_virtual(spec.name)
else f.virtual_node(spec.name)
)
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
if spec.namespace:
clauses.append(f.namespace(spec.name, spec.namespace))
@@ -2114,7 +2114,7 @@ def _spec_clauses(
for value in variant.value_as_tuple:
# ensure that the value *can* be valid for the spec
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
if spec.name and not spec.concrete and not spec.virtual:
variant_defs = vt.prevalidate_variant_value(
self.pkg_class(spec.name), variant, spec
)
@@ -2397,20 +2397,38 @@ def keyfun(os):
def target_defaults(self, specs):
"""Add facts about targets and target compatibility."""
self.gen.h2("Default target")
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
self.gen.h2("Target compatibility")
# Add targets explicitly requested from specs
candidate_targets = []
for x in self.possible_graph.candidate_targets():
if all(
self.possible_graph.unreachable(pkg_name=pkg_name, when_spec=f"target={x}")
for pkg_name in self.pkgs
):
tty.debug(f"[{__name__}] excluding target={x}, cause no package can use it")
continue
candidate_targets.append(x)
# Construct the list of targets which are compatible with the host
candidate_targets = [uarch] + uarch.ancestors
host_compatible = spack.config.CONFIG.get("concretizer:targets:host_compatible")
# Get configuration options
granularity = spack.config.get("concretizer:targets:granularity")
host_compatible = spack.config.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == uarch.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
# Add targets explicitly requested from specs
for spec in specs:
if not spec.architecture or not spec.architecture.target:
continue
@@ -2426,8 +2444,6 @@ def target_defaults(self, specs):
if ancestor not in candidate_targets:
candidate_targets.append(ancestor)
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
best_targets = {uarch.family.name}
for compiler_id, known_compiler in enumerate(self.possible_compilers):
if not known_compiler.available:
@@ -2485,6 +2501,7 @@ def target_defaults(self, specs):
self.gen.newline()
self.default_targets = list(sorted(set(self.default_targets)))
self.target_preferences()
def virtual_providers(self):
@@ -2588,14 +2605,7 @@ def define_variant_values(self):
# Tell the concretizer about possible values from specs seen in spec_clauses().
# We might want to order these facts by pkg and name if we are debugging.
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
try:
vid = self.variant_ids_by_def_id[variant_def_id]
except KeyError:
tty.debug(
f"[{__name__}] cannot retrieve id of the {value} variant from {pkg_name}"
)
continue
vid = self.variant_ids_by_def_id[variant_def_id]
self.gen.fact(fn.pkg_fact(pkg_name, fn.variant_possible_value(vid, value)))
def register_concrete_spec(self, spec, possible):
@@ -2666,7 +2676,7 @@ def setup(
"""
check_packages_exist(specs)
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
@@ -2674,9 +2684,7 @@ def setup(
# Fail if we already know an unreachable node is requested
for spec in specs:
missing_deps = [
str(d)
for d in spec.traverse()
if d.name not in self.pkgs and not spack.repo.PATH.is_virtual(d.name)
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
]
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
@@ -2893,11 +2901,7 @@ def literal_specs(self, specs):
pkg_name = clause.args[1]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
requirements.append(
fn.attr(
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
)
)
requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
cache[imposed_spec_key] = (effect_id, requirements)
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
@@ -3485,7 +3489,7 @@ def external_spec_selected(self, node, idx):
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
# If this is an extension, update the dependencies to include the extendee
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
package = self._specs[node].package_class(self._specs[node])
extendee_spec = package.extendee_spec
if extendee_spec:
@@ -4098,10 +4102,10 @@ def _check_input_and_extract_concrete_specs(specs):
reusable = []
for root in specs:
for s in root.traverse():
if s.virtual:
continue
if s.concrete:
reusable.append(s)
elif spack.repo.PATH.is_virtual(s.name):
continue
spack.spec.Spec.ensure_valid_variants(s)
return reusable

View File

@@ -265,7 +265,6 @@ error(100, "Cannot select a single version for virtual '{0}'", Virtual)
% If we select a deprecated version, mark the package as deprecated
attr("deprecated", node(ID, Package), Version) :-
attr("version", node(ID, Package), Version),
not external(node(ID, Package)),
pkg_fact(Package, deprecated_version(Version)).
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)

View File

@@ -0,0 +1,179 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set
from llnl.util import lang
import spack.deptypes as dt
import spack.package_base
import spack.repo
import spack.spec
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
runtime_virtuals = set()
for x in runtime_pkgs:
pkg_class = spack.repo.PATH.get_pkg_class(x)
runtime_virtuals.update(pkg_class.provided_virtual_names())
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = (
set(x.name for x in specs if x.virtual) | runtime_virtuals
)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
virtuals, reals = lang.stable_partition(
build_dependencies, spack.repo.PATH.is_virtual_safe
)
self._possible_virtuals.update(virtuals)
for virtual_dep in virtuals:
providers = spack.repo.PATH.providers_for(virtual_dep)
self._direct_build.update(str(x) for x in providers)
self._direct_build.update(reals)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()

View File

@@ -1,526 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes to analyze the input of a solve, and provide information to set up the ASP problem"""
import collections
from typing import Dict, List, NamedTuple, Set, Tuple, Union
import archspec.cpu
from llnl.util import lang, tty
import spack.binary_distribution
import spack.config
import spack.deptypes as dt
import spack.platforms
import spack.repo
import spack.spec
import spack.store
from spack.error import SpackError
RUNTIME_TAG = "runtime"
class PossibleGraph(NamedTuple):
real_pkgs: Set[str]
virtuals: Set[str]
edges: Dict[str, Set[str]]
class PossibleDependencyGraph:
"""Returns information needed to set up an ASP problem"""
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
raise NotImplementedError
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
raise NotImplementedError
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
"""Returns the set of possible dependencies, and the set of possible virtuals.
Both sets always include runtime packages, which may be injected by compilers.
Args:
transitive: return transitive dependencies if True, only direct dependencies if False
allowed_deps: dependency types to consider
strict_depflag: if True, only the specific dep type is considered, if False any
deptype that intersects with allowed deptype is considered
expand_virtuals: expand virtual dependencies into all possible implementations
"""
raise NotImplementedError
class NoStaticAnalysis(PossibleDependencyGraph):
"""Implementation that tries to minimize the setup time (i.e. defaults to give fast
answers), rather than trying to reduce the ASP problem size with more complex analysis.
"""
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
self.configuration = configuration
self.repo = repo
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
self.runtime_virtuals = set()
self._platform_condition = spack.spec.Spec(
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
)
for x in self.runtime_pkgs:
pkg_class = self.repo.get_pkg_class(x)
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
try:
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
except spack.repo.UnknownPackageError:
self.libc_pkgs = []
def is_virtual(self, name: str) -> bool:
return self.repo.is_virtual(name)
@lang.memoized
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
"""Returns true if a package is allowed on the current host"""
pkg_cls = self.repo.get_pkg_class(pkg_name)
for when_spec, conditions in pkg_cls.requirements.items():
if not when_spec.intersects(self._platform_condition):
continue
for requirements, _, _ in conditions:
if not any(x.intersects(self._platform_condition) for x in requirements):
tty.debug(f"[{__name__}] {pkg_name} is not for this platform")
return False
return True
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
"""Returns a list of possible providers for the virtual string in input."""
return self.repo.providers_for(virtual_str)
def can_be_installed(self, *, pkg_name) -> bool:
"""Returns True if a package can be installed, False otherwise."""
return True
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
return False
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
platform = spack.platforms.host()
default_target = archspec.cpu.TARGETS[platform.default]
# Construct the list of targets which are compatible with the host
candidate_targets = [default_target] + default_target.ancestors
granularity = self.configuration.get("concretizer:targets:granularity")
host_compatible = self.configuration.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == default_target.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
return candidate_targets
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
stack = [x for x in self._package_list(specs)]
virtuals: Set[str] = set()
edges: Dict[str, Set[str]] = {}
while stack:
pkg_name = stack.pop()
if pkg_name in edges:
continue
edges[pkg_name] = set()
# Since libc is not buildable, there is no need to extend the
# search space with libc dependencies.
if pkg_name in self.libc_pkgs:
continue
pkg_cls = self.repo.get_pkg_class(pkg_name=pkg_name)
for name, conditions in pkg_cls.dependencies_by_name(when=True).items():
if all(self.unreachable(pkg_name=pkg_name, when_spec=x) for x in conditions):
tty.debug(
f"[{__name__}] Not adding {name} as a dep of {pkg_name}, because "
f"conditions cannot be met"
)
continue
if not self._has_deptypes(
conditions, allowed_deps=allowed_deps, strict=strict_depflag
):
continue
if name in virtuals:
continue
dep_names = set()
if self.is_virtual(name):
virtuals.add(name)
if expand_virtuals:
providers = self.providers_for(name)
dep_names = {spec.name for spec in providers}
else:
dep_names = {name}
edges[pkg_name].update(dep_names)
if not transitive:
continue
for dep_name in dep_names:
if dep_name in edges:
continue
if not self._is_possible(pkg_name=dep_name):
continue
stack.append(dep_name)
real_packages = set(edges)
if not transitive:
# We exit early, so add children from the edges information
for root, children in edges.items():
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
virtuals.update(self.runtime_virtuals)
real_packages = real_packages | self.runtime_pkgs
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
stack = []
for current_spec in specs:
if isinstance(current_spec, str):
current_spec = spack.spec.Spec(current_spec)
if self.repo.is_virtual(current_spec.name):
stack.extend([p.name for p in self.providers_for(current_spec.name)])
continue
stack.append(current_spec.name)
return sorted(set(stack))
def _has_deptypes(self, dependencies, *, allowed_deps: dt.DepFlag, strict: bool) -> bool:
if strict is True:
return any(
dep.depflag == allowed_deps for deplist in dependencies.values() for dep in deplist
)
return any(
dep.depflag & allowed_deps for deplist in dependencies.values() for dep in deplist
)
def _is_possible(self, *, pkg_name):
try:
return self.is_allowed_on_this_platform(pkg_name=pkg_name) and self.can_be_installed(
pkg_name=pkg_name
)
except spack.repo.UnknownPackageError:
return False
class StaticAnalysis(NoStaticAnalysis):
"""Performs some static analysis of the configuration, store, etc. to provide more precise
answers on whether some packages can be installed, or used as a provider.
It increases the setup time, but might decrease the grounding and solve time considerably,
especially when requirements restrict the possible choices for providers.
"""
def __init__(
self,
*,
configuration: spack.config.Configuration,
repo: spack.repo.RepoPath,
store: spack.store.Store,
binary_index: spack.binary_distribution.BinaryCacheIndex,
):
super().__init__(configuration=configuration, repo=repo)
self.store = store
self.binary_index = binary_index
@lang.memoized
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
candidates = super().providers_for(virtual_str)
result = []
for spec in candidates:
if not self._is_provider_candidate(pkg_name=spec.name, virtual=virtual_str):
continue
result.append(spec)
return result
@lang.memoized
def buildcache_specs(self) -> List[spack.spec.Spec]:
self.binary_index.update()
return self.binary_index.get_all_built_specs()
@lang.memoized
def can_be_installed(self, *, pkg_name) -> bool:
if self.configuration.get(f"packages:{pkg_name}:buildable", True):
return True
if self.configuration.get(f"packages:{pkg_name}:externals", []):
return True
reuse = self.configuration.get("concretizer:reuse")
if reuse is not False and self.store.db.query(pkg_name):
return True
if reuse is not False and any(x.name == pkg_name for x in self.buildcache_specs()):
return True
tty.debug(f"[{__name__}] {pkg_name} cannot be installed")
return False
@lang.memoized
def _is_provider_candidate(self, *, pkg_name: str, virtual: str) -> bool:
if not self.is_allowed_on_this_platform(pkg_name=pkg_name):
return False
if not self.can_be_installed(pkg_name=pkg_name):
return False
virtual_spec = spack.spec.Spec(virtual)
if self.unreachable(pkg_name=virtual_spec.name, when_spec=pkg_name):
tty.debug(f"[{__name__}] {pkg_name} cannot be a provider for {virtual}")
return False
return True
@lang.memoized
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
candidates = self.configuration.get(f"packages:{pkg_name}:require", [])
if not candidates and pkg_name != "all":
return self.unreachable(pkg_name="all", when_spec=when_spec)
if not candidates:
return False
if isinstance(candidates, str):
candidates = [candidates]
union_requirement = spack.spec.Spec()
for c in candidates:
if not isinstance(c, str):
continue
try:
union_requirement.constrain(c)
except SpackError:
# Less optimized, but shouldn't fail
pass
if not union_requirement.intersects(when_spec):
return True
return False
def create_graph_analyzer() -> PossibleDependencyGraph:
static_analysis = spack.config.CONFIG.get("concretizer:static_analysis", False)
if static_analysis:
return StaticAnalysis(
configuration=spack.config.CONFIG,
repo=spack.repo.PATH,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
)
return NoStaticAnalysis(configuration=spack.config.CONFIG, repo=spack.repo.PATH)
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
self.possible_graph = possible_graph
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: Set[str] = set()
self._possible_virtuals: Set[str] = {
x.name for x in specs if spack.repo.PATH.is_virtual(x.name)
}
def possible_dependencies(self) -> Set[str]:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self) -> None:
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self) -> None:
self._possible_dependencies, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
super().__init__(specs, tests, possible_graph)
self._link_run: Set[str] = set()
self._direct_build: Set[str] = set()
self._total_build: Set[str] = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self) -> None:
self._link_run, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.link_run_types
)
self._possible_virtuals.update(virtuals)
self._link_run_virtuals.update(virtuals)
for x in self._link_run:
reals, virtuals, _ = self.possible_graph.possible_dependencies(
x, allowed_deps=dt.BUILD, transitive=False, strict_depflag=True
)
self._possible_virtuals.update(virtuals)
self._direct_build.update(reals)
self._total_build, virtuals, _ = self.possible_graph.possible_dependencies(
*self._direct_build, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
def create_counter(
specs: List[spack.spec.Spec], tests: bool, possible_graph: PossibleDependencyGraph
) -> Counter:
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
return NoDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)

View File

@@ -1337,20 +1337,14 @@ class SpecBuildInterface(lang.ObjectWrapper):
"command", default_handler=_command_default_handler, _indirect=True
)
def __init__(
self,
spec: "Spec",
name: str,
query_parameters: List[str],
_parent: "Spec",
is_virtual: bool,
):
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters, _parent, is_virtual
self.token = original_spec, name, query_parameters, _parent
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
@@ -1911,22 +1905,10 @@ def package_class(self):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
warnings.warn(
"`Spec.package_class` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.get_pkg_class(spec.fullname) instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.get_pkg_class(self.fullname)
@property
def virtual(self):
warnings.warn(
"`Spec.virtual` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.is_virtual(spec.name)` instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.is_virtual(self.name)
@property
@@ -2826,6 +2808,24 @@ def from_detection(
s.extra_attributes = extra_attributes
return s
def validate_detection(self):
"""Validate the detection of an external spec.
This method is used as part of Spack's detection protocol, and is
not meant for client code use.
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
self
)
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2864,7 +2864,7 @@ def inject_patches_variant(root):
# Add any patches from the package to the spec.
patches = set()
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
for patch in patch_list:
patches.add(patch)
@@ -2877,7 +2877,7 @@ def inject_patches_variant(root):
if dspec.spec.concrete:
continue
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
pkg_deps = dspec.parent.package_class.dependencies
patches = []
for cond, deps_by_name in pkg_deps.items():
@@ -3084,7 +3084,7 @@ def validate_or_raise(self):
# FIXME: raise just the first one encountered
for spec in self.traverse():
# raise an UnknownPackageError if the spec's package isn't real.
if spec.name and not spack.repo.PATH.is_virtual(spec.name):
if (not spec.virtual) and spec.name:
spack.repo.PATH.get_pkg_class(spec.fullname)
# validate compiler in addition to the package name.
@@ -3093,7 +3093,7 @@ def validate_or_raise(self):
raise UnsupportedCompilerError(spec.compiler.name)
# Ensure correctness of variants (if the spec is not virtual)
if not spack.repo.PATH.is_virtual(spec.name):
if not spec.virtual:
Spec.ensure_valid_variants(spec)
substitute_abstract_variants(spec)
@@ -3111,7 +3111,7 @@ def ensure_valid_variants(spec):
if spec.concrete:
return
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spec.package_class
pkg_variants = pkg_cls.variant_names()
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
@@ -3328,9 +3328,7 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
self_virtual = spack.repo.PATH.is_virtual(self.name)
other_virtual = spack.repo.PATH.is_virtual(other.name)
if self_virtual and other_virtual:
if self.virtual and other.virtual:
# Two virtual specs intersect only if there are providers for both
lhs = spack.repo.PATH.providers_for(str(self))
rhs = spack.repo.PATH.providers_for(str(other))
@@ -3338,8 +3336,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return bool(intersection)
# A provider can satisfy a virtual dependency.
elif self_virtual or other_virtual:
virtual_spec, non_virtual_spec = (self, other) if self_virtual else (other, self)
elif self.virtual or other.virtual:
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
@@ -3444,9 +3442,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency.
if not spack.repo.PATH.is_virtual(self.name) and spack.repo.PATH.is_virtual(
other.name
):
if not self.virtual and other.virtual:
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
@@ -3514,7 +3510,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
if rhs_edge.spec.virtual:
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
if not rhs_edge.virtuals:
@@ -3560,7 +3556,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spack.repo.PATH.is_virtual(spec.name)]
return [spec for spec in self.traverse() if spec.virtual]
@property # type: ignore[misc] # decorated prop not supported in mypy
def patches(self):
@@ -3760,16 +3756,21 @@ def __getitem__(self, name: str):
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
# and prefer matches closest to the root.
try:
edge = next((e for e in order() if e.spec.name == name or name in e.virtuals))
child: Spec = next(
e.spec
for e in itertools.chain(
(e for e in order() if e.spec.name == name or name in e.virtuals),
# for historical reasons
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
)
)
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
)
return SpecBuildInterface(child, name, query_parameters, _parent=self)
return edge.spec
return child
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.
@@ -4704,7 +4705,7 @@ def concrete(self):
bool: True or False
"""
return self.spec._concrete or all(
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
v in self for v in self.spec.package_class.variant_names()
)
def copy(self) -> "VariantMap":
@@ -4764,14 +4765,14 @@ def substitute_abstract_variants(spec: Spec):
elif name in vt.reserved_names:
continue
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name)
variant_defs = spec.package_class.variant_definitions(name)
valid_defs = []
for when, vdef in variant_defs:
if when.intersects(spec):
valid_defs.append(vdef)
if not valid_defs:
if name not in spack.repo.PATH.get_pkg_class(spec.fullname).variant_names():
if name not in spec.package_class.variant_names():
unknown.append(name)
else:
whens = [str(when) for when, _ in variant_defs]
@@ -4843,9 +4844,7 @@ def reconstruct_virtuals_on_edges(spec):
possible_virtuals = set()
for node in spec.traverse():
try:
possible_virtuals.update(
{x for x in node.package.dependencies if spack.repo.PATH.is_virtual(x)}
)
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue

View File

@@ -24,24 +24,32 @@
mpi_deps = ["fake"]
@pytest.mark.parametrize(
"cli_args,expected",
[
(["mpileaks"], set(["callpath"] + mpis)),
(
["--transitive", "mpileaks"],
set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps),
),
(["--transitive", "--deptype=link,run", "dtbuild1"], {"dtlink2", "dtrun2"}),
(["--transitive", "--deptype=build", "dtbuild1"], {"dtbuild2", "dtlink2"}),
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}),
],
)
def test_direct_dependencies(cli_args, expected, mock_runtimes):
out = dependencies(*cli_args)
result = set(re.split(r"\s+", out.strip()))
expected.update(mock_runtimes)
assert expected == result
def test_direct_dependencies(mock_packages):
out = dependencies("mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath"] + mpis)
assert expected == actual
def test_transitive_dependencies(mock_packages):
out = dependencies("--transitive", "mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
assert expected == actual
def test_transitive_dependencies_with_deptypes(mock_packages):
out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2", "dtrun2"]) == deps
out = dependencies("--transitive", "--deptype=build", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtbuild2", "dtlink2"]) == deps
out = dependencies("--transitive", "--deptype=link", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2"]) == deps
@pytest.mark.db

View File

@@ -304,8 +304,6 @@ def test_run_import_check(tmp_path: pathlib.Path):
contents = '''
import spack.cmd
import spack.config # do not drop this import because of this comment
import spack.repo
import spack.repo_utils
# this comment about spack.error should not be removed
class Example(spack.build_systems.autotools.AutotoolsPackage):
@@ -316,7 +314,6 @@ def foo(config: "spack.error.SpackError"):
# the type hint is quoted, so it should not be removed
spack.util.executable.Executable("example")
print(spack.__version__)
print(spack.repo_utils.__file__)
'''
file.write_text(contents)
root = str(tmp_path)
@@ -332,7 +329,6 @@ def foo(config: "spack.error.SpackError"):
output = output_buf.getvalue()
assert "issues.py: redundant import: spack.cmd" in output
assert "issues.py: redundant import: spack.repo" in output
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
assert "issues.py: missing import: spack" in output # used by spack.__version__
assert "issues.py: missing import: spack.build_systems.autotools" in output

View File

@@ -1243,7 +1243,7 @@ def test_transitive_conditional_virtual_dependency(self, mutable_config):
def test_conditional_provides_or_depends_on(self):
# Check that we can concretize correctly a spec that can either
# provide a virtual or depend on it based on the value of a variant
s = spack.concretize.concretize_one("v1-consumer ^conditional-provider +disable-v1")
s = spack.concretize.concretize_one("conditional-provider +disable-v1")
assert "v1-provider" in s
assert s["v1"].name == "v1-provider"
assert s["v2"].name == "conditional-provider"

View File

@@ -259,7 +259,7 @@ def test_develop(self):
def test_external_mpi(self):
# make sure this doesn't give us an external first.
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(
@@ -293,7 +293,7 @@ def mock_module(cmd, module):
monkeypatch.setattr(spack.util.module_cmd, "module", mock_module)
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib
import pytest
@@ -199,11 +200,11 @@ def test_requirement_adds_version_satisfies(
@pytest.mark.parametrize("require_checksum", (True, False))
def test_requirement_adds_git_hash_version(
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
):
# A full commit sha is a checksummed version, so this test should pass in both cases
if require_checksum:
monkeypatch.setenv("SPACK_CONCRETIZER_REQUIRE_CHECKSUM", "yes")
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr(

View File

@@ -2171,8 +2171,3 @@ def getcode(self):
def info(self):
return self.headers
@pytest.fixture()
def mock_runtimes(config, mock_packages):
return mock_packages.packages_with_tags("runtime")

View File

@@ -206,7 +206,7 @@ def test_repo(_create_test_repo, monkeypatch, mock_stage):
)
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
spec = spack.spec.Spec(spec_str)
assert spack.repo.PATH.get_pkg_class(spec.fullname).redistribute_source(spec) == distribute_src
assert spec.package_class.redistribute_source(spec) == distribute_src
concretized_spec = spack.concretize.concretize_one(spec)
assert concretized_spec.package.redistribute_binary == distribute_bin

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test class methods on Package objects.
This doesn't include methods on package *instances* (like do_patch(),
@@ -15,7 +16,6 @@
import llnl.util.filesystem as fs
import spack.binary_distribution
import spack.compilers
import spack.concretize
import spack.deptypes as dt
@@ -23,11 +23,15 @@
import spack.install_test
import spack.package
import spack.package_base
import spack.repo
import spack.spec
import spack.store
from spack.build_systems.generic import Package
from spack.error import InstallError
from spack.solver.input_analysis import NoStaticAnalysis, StaticAnalysis
@pytest.fixture(scope="module")
def mpi_names(mock_repo_path):
return [spec.name for spec in mock_repo_path.providers_for("mpi")]
@pytest.fixture()
@@ -49,94 +53,78 @@ def mpileaks_possible_deps(mock_packages, mpi_names):
return possible
@pytest.fixture(params=[NoStaticAnalysis, StaticAnalysis])
def mock_inspector(config, mock_packages, request):
inspector_cls = request.param
if inspector_cls is NoStaticAnalysis:
return inspector_cls(configuration=config, repo=mock_packages)
return inspector_cls(
configuration=config,
repo=mock_packages,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
expanded_possible_deps = pkg_cls.possible_dependencies(expand_virtuals=True)
assert mpileaks_possible_deps == expanded_possible_deps
assert {
"callpath": {"dyninst", "mpi"},
"dyninst": {"libdwarf", "libelf"},
"libdwarf": {"libelf"},
"libelf": set(),
"mpi": set(),
"mpileaks": {"callpath", "mpi"},
} == pkg_cls.possible_dependencies(expand_virtuals=False)
def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
deps = pkg_cls.possible_dependencies(transitive=False, expand_virtuals=False)
assert {"callpath": set(), "mpi": set(), "mpileaks": {"callpath", "mpi"}} == deps
def test_possible_dependencies_virtual(mock_packages, mpi_names):
expected = dict(
(name, set(dep for dep in spack.repo.PATH.get_pkg_class(name).dependencies_by_name()))
for name in mpi_names
)
# only one mock MPI has a dependency
expected["fake"] = set()
assert expected == spack.package_base.possible_dependencies("mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
pkg_cls = spack.repo.PATH.get_pkg_class("missing-dependency")
missing = {}
pkg_cls.possible_dependencies(transitive=True, missing=missing)
assert {"this-is-a-missing-dependency"} == missing["missing-dependency"]
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.PATH.get_pkg_class("dtbuild1")
assert {
"dtbuild1": {"dtrun2", "dtlink2"},
"dtlink2": set(),
"dtrun2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.LINK | dt.RUN)
assert {
"dtbuild1": {"dtbuild2", "dtlink2"},
"dtbuild2": set(),
"dtlink2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.BUILD)
assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies(
depflag=dt.LINK
)
@pytest.fixture
def mpi_names(mock_inspector):
return [spec.name for spec in mock_inspector.providers_for("mpi")]
@pytest.mark.parametrize(
"pkg_name,fn_kwargs,expected",
[
(
"mpileaks",
{"expand_virtuals": True, "allowed_deps": dt.ALL},
{
"fake",
"mpileaks",
"multi-provider-mpi",
"callpath",
"dyninst",
"mpich2",
"libdwarf",
"zmpi",
"low-priority-provider",
"intel-parallel-studio",
"mpich",
"libelf",
},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL},
{"callpath", "dyninst", "libdwarf", "libelf", "mpileaks"},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL, "transitive": False},
{"callpath", "mpileaks"},
),
("dtbuild1", {"allowed_deps": dt.LINK | dt.RUN}, {"dtbuild1", "dtrun2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.BUILD}, {"dtbuild1", "dtbuild2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.LINK}, {"dtbuild1", "dtlink2"}),
],
)
def test_possible_dependencies(pkg_name, fn_kwargs, expected, mock_runtimes, mock_inspector):
"""Tests possible nodes of mpileaks, under different scenarios."""
expected.update(mock_runtimes)
result, *_ = mock_inspector.possible_dependencies(pkg_name, **fn_kwargs)
assert expected == result
def test_possible_dependencies_virtual(mock_inspector, mock_packages, mock_runtimes, mpi_names):
expected = set(mpi_names)
for name in mpi_names:
expected.update(dep for dep in mock_packages.get_pkg_class(name).dependencies_by_name())
expected.update(mock_runtimes)
real_pkgs, *_ = mock_inspector.possible_dependencies(
"mpi", transitive=False, allowed_deps=dt.ALL
)
assert expected == real_pkgs
def test_possible_dependencies_missing(mock_inspector):
result, *_ = mock_inspector.possible_dependencies("missing-dependency", allowed_deps=dt.ALL)
assert "this-is-a-missing-dependency" not in result
def test_possible_dependencies_with_multiple_classes(
mock_inspector, mock_packages, mpileaks_possible_deps
):
def test_possible_dependencies_with_multiple_classes(mock_packages, mpileaks_possible_deps):
pkgs = ["dt-diamond", "mpileaks"]
expected = set(mpileaks_possible_deps)
expected.update({"dt-diamond", "dt-diamond-left", "dt-diamond-right", "dt-diamond-bottom"})
expected.update(mock_packages.packages_with_tags("runtime"))
expected = mpileaks_possible_deps.copy()
expected.update(
{
"dt-diamond": set(["dt-diamond-left", "dt-diamond-right"]),
"dt-diamond-left": set(["dt-diamond-bottom"]),
"dt-diamond-right": set(["dt-diamond-bottom"]),
"dt-diamond-bottom": set(),
}
)
real_pkgs, *_ = mock_inspector.possible_dependencies(*pkgs, allowed_deps=dt.ALL)
assert set(expected) == real_pkgs
assert expected == spack.package_base.possible_dependencies(*pkgs)
def setup_install_test(source_paths, test_root):

View File

@@ -38,6 +38,27 @@ def extra_repo(tmp_path_factory, request):
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
@pytest.fixture(scope="function")
def versioned_repo(tmp_path_factory, request):
def _execute(spack_version, repo_version):
repo_namespace = "extra_test_repo"
repo_dir = tmp_path_factory.mktemp(repo_namespace)
cache_dir = tmp_path_factory.mktemp("cache")
(repo_dir / "packages").mkdir(parents=True, exist_ok=True)
(repo_dir / "repo.yaml").write_text(
f"""
repo:
namespace: extra_test_repo
required_spack_version: '{spack_version}'
version: '{repo_version}'
"""
)
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
return spack.repo.Repo(str(repo_dir), cache=repo_cache)
return _execute
def test_repo_getpkg(mutable_mock_repo):
mutable_mock_repo.get_pkg_class("pkg-a")
mutable_mock_repo.get_pkg_class("builtin.mock.pkg-a")
@@ -303,3 +324,24 @@ def test_get_repo(self, mock_test_cache):
# foo is not there, raise
with pytest.raises(spack.repo.UnknownNamespaceError):
repo.get_repo("foo")
def test_incompatible_repo(versioned_repo):
with pytest.raises(spack.repo.BadRepoError, match="requires Spack version"):
# test added after Spack passed version 0.22
versioned_repo(":0.22", ":")
with pytest.raises(spack.repo.BadRepoError, match="requires repo version"):
# ":a" < "0", and all Spack versions require at least "0:"
versioned_repo(":", ":a")
def test_incompatible_package_version(mock_packages, monkeypatch):
spec = spack.concretize.concretize_one("pkg-a")
package = spack.repo.PATH.get(spec)
pkg_class = spec.package_class
monkeypatch.setattr(pkg_class, "required_spack_version", ":0.22")
with pytest.raises(spack.error.PackageError, match="requires Spack version"):
_ = spack.repo.PATH.get(spec)

View File

@@ -201,15 +201,3 @@ def test_drop_redundant_rpath(tmpdir, binary_with_rpaths):
new_rpaths = elf.get_rpaths(binary)
assert set(existing_dirs).issubset(new_rpaths)
assert set(non_existing_dirs).isdisjoint(new_rpaths)
def test_elf_invalid_e_shnum(tmp_path):
# from llvm/test/Object/Inputs/invalid-e_shnum.elf
path = tmp_path / "invalid-e_shnum.elf"
with open(path, "wb") as file:
file.write(
b"\x7fELF\x02\x010000000000\x03\x00>\x0000000000000000000000"
b"\x00\x00\x00\x00\x00\x00\x00\x000000000000@\x000000"
)
with open(path, "rb") as file, pytest.raises(elf.ElfParsingError):
elf.parse_elf(file)

View File

@@ -195,10 +195,7 @@ def parse_program_headers(f: BinaryIO, elf: ElfFile) -> None:
elf: ELF file parser data
"""
# Forward to the program header
try:
f.seek(elf.elf_hdr.e_phoff)
except OSError:
raise ElfParsingError("Could not seek to program header")
f.seek(elf.elf_hdr.e_phoff)
# Here we have to make a mapping from virtual address to offset in the file.
ph_fmt = elf.byte_order + ("LLQQQQQQ" if elf.is_64_bit else "LLLLLLLL")
@@ -248,10 +245,7 @@ def parse_pt_interp(f: BinaryIO, elf: ElfFile) -> None:
f: file handle
elf: ELF file parser data
"""
try:
f.seek(elf.pt_interp_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_INTERP entry")
f.seek(elf.pt_interp_p_offset)
data = read_exactly(f, elf.pt_interp_p_filesz, "Malformed PT_INTERP entry")
elf.pt_interp_str = parse_c_string(data)
@@ -270,10 +264,7 @@ def find_strtab_size_at_offset(f: BinaryIO, elf: ElfFile, offset: int) -> int:
"""
section_hdr_fmt = elf.byte_order + ("LLQQQQLLQQ" if elf.is_64_bit else "LLLLLLLLLL")
section_hdr_size = calcsize(section_hdr_fmt)
try:
f.seek(elf.elf_hdr.e_shoff)
except OSError:
raise ElfParsingError("Could not seek to section header table")
f.seek(elf.elf_hdr.e_shoff)
for _ in range(elf.elf_hdr.e_shnum):
data = read_exactly(f, section_hdr_size, "Malformed section header")
sh = SectionHeader(*unpack(section_hdr_fmt, data))
@@ -295,10 +286,7 @@ def retrieve_strtab(f: BinaryIO, elf: ElfFile, offset: int) -> bytes:
Returns: file offset
"""
size = find_strtab_size_at_offset(f, elf, offset)
try:
f.seek(offset)
except OSError:
raise ElfParsingError("Could not seek to string table")
f.seek(offset)
return read_exactly(f, size, "Could not read string table")
@@ -331,10 +319,7 @@ def parse_pt_dynamic(f: BinaryIO, elf: ElfFile) -> None:
count_runpath = 0
count_strtab = 0
try:
f.seek(elf.pt_dynamic_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
f.seek(elf.pt_dynamic_p_offset)
# In case of broken ELF files, don't read beyond the advertized size.
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
@@ -493,10 +478,7 @@ def get_interpreter(path: str) -> Optional[str]:
def _delete_dynamic_array_entry(
f: BinaryIO, elf: ElfFile, should_delete: Callable[[int, int], bool]
) -> None:
try:
f.seek(elf.pt_dynamic_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
f.seek(elf.pt_dynamic_p_offset)
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
dynamic_array_size = calcsize(dynamic_array_fmt)
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array

View File

@@ -8,6 +8,7 @@
import spack.directives_meta
import spack.error
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.util.hash
@@ -60,18 +61,10 @@ class RemoveDirectives(ast.NodeTransformer):
"""
def __init__(self, spec):
#: List of attributes to be excluded from a package's hash.
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies] + [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
# list of URL attributes and metadata attributes
# these will be removed from packages.
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies]
self.metadata_attrs += spack.package_base.PackageBase.metadata_attrs
self.spec = spec
self.in_classdef = False # used to avoid nested classdefs

View File

@@ -2,16 +2,10 @@ spack:
view: false
packages:
all:
require:
- target=x86_64_v3
- ~cuda
- ~rocm
require: target=x86_64_v3
concretizer:
unify: true
reuse: false
static_analysis: true
definitions:
- default_specs:
# editors

View File

@@ -4,32 +4,16 @@ spack:
concretizer:
reuse: false
unify: false
static_analysis: true
packages:
all:
require:
- '%gcc target=x86_64_v3'
require: '%gcc target=x86_64_v3'
providers:
blas: [openblas]
mpi: [mpich]
variants: +mpi
mpi:
require:
- mpich
blas:
require:
- openblas
lapack:
require:
- openblas
binutils:
variants: +ld +gold +headers +libiberty ~nls
cmake:
require:
- "~qtgui"
- '%gcc target=x86_64_v3'
gmake:
require:
- "~guile"
- '%gcc target=x86_64_v3'
hdf5:
variants: +fortran +hl +shared
libfabric:
@@ -43,25 +27,19 @@ spack:
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
mpi:
require: mpich
mpich:
require:
- '~wrapperrpath ~hwloc'
- '%gcc target=x86_64_v3'
require: '~wrapperrpath ~hwloc target=x86_64_v3'
tbb:
require:
- intel-tbb
require: intel-tbb
vtk-m:
require:
- "+examples"
- '%gcc target=x86_64_v3'
require: "+examples target=x86_64_v3"
visit:
require:
- "~gui target=x86_64_v3"
require: "~gui target=x86_64_v3"
paraview:
# Don't build GUI support or GLX rendering for HPC/container deployments
require:
- "+examples ~qt ^[virtuals=gl] osmesa target=x86_64_v3"
- '%gcc target=x86_64_v3'
require: "+examples ~qt ^[virtuals=gl] osmesa target=x86_64_v3"
specs:
# CPU

View File

@@ -19,8 +19,6 @@ spack:
require: +geant4 +hepmc3 +root +shared cxxstd=20
hip:
require: '@5.7.1 +rocm'
rivet:
require: hepmc=3
root:
require: +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20
# note: root cxxstd=20 not concretizable within sherpa
@@ -95,7 +93,7 @@ spack:
- py-uproot +lz4 +xrootd +zstd
- py-vector
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
- rivet
- rivet hepmc=3
- root ~cuda
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo cxxstd=20
- tauola +hepmc3 +lhapdf cxxstd=20

View File

@@ -12,13 +12,6 @@ spack:
require: ~cuda
mpi:
require: openmpi
py-torch:
require:
- target=aarch64
- ~rocm
- +cuda
- cuda_arch=80
- ~flash_attention
specs:
# Horovod

View File

@@ -12,13 +12,6 @@ spack:
require: ~cuda
mpi:
require: openmpi
py-torch:
require:
- target=x86_64_v3
- ~rocm
- +cuda
- cuda_arch=80
- ~flash_attention
specs:
# Horovod

View File

@@ -11,13 +11,6 @@ spack:
require: "osmesa"
mpi:
require: openmpi
py-torch:
require:
- target=x86_64_v3
- ~cuda
- +rocm
- amdgpu_target=gfx90a
- ~flash_attention
specs:
# Horovod

View File

@@ -36,7 +36,7 @@ bin/spack -h
bin/spack help -a
# Profile and print top 20 lines for a simple call to spack spec
spack -p --lines 20 spec mpileaks%gcc
spack -p --lines 20 spec mpileaks%gcc ^dyninst@10.0.0 ^elfutils@0.170
$coverage_run $(which spack) bootstrap status --dev --optional
# Check that we can import Spack packages directly as a first import

View File

@@ -1,2 +1,4 @@
repo:
namespace: builtin.mock
version: 0
required_spack_version: '0.23:1.0.0.dev0'

View File

@@ -194,8 +194,7 @@ class Acts(CMakePackage, CudaPackage):
version("0.08.1", commit="289bdcc320f0b3ff1d792e29e462ec2d3ea15df6")
version("0.08.0", commit="99eedb38f305e3a1cd99d9b4473241b7cd641fa9")
depends_on("c", type="build", when="+dd4hep") # DD4hep requires C
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
# Variants that affect the core Acts library
variant(

View File

@@ -19,7 +19,6 @@ class Amdsmi(CMakePackage):
libraries = ["libamd_smi"]
license("MIT")
version("6.3.2", sha256="1ed452eedfe51ac6e615d7bfe0bd7a0614f21113874ae3cbea7df72343cc2d13")
version("6.3.1", sha256="a3a5a711052e813b9be9304d5e818351d3797f668ec2a455e61253a73429c355")
version("6.3.0", sha256="7234c46648938239385cd5db57516ed53985b8c09d2f0828ae8f446386d8bd1e")
version("6.2.4", sha256="5ebe8d0f176bf4a73b0e7000d9c47cb7f65ecca47011d3f9b08b93047dcf7ac5")

View File

@@ -27,7 +27,6 @@ class Apfel(AutotoolsPackage, CMakePackage):
version("3.0.6", sha256="7063c9eee457e030b97926ac166cdaedd84625b31397e1dfd01ae47371fb9f61")
version("3.0.4", sha256="c7bfae7fe2dc0185981850f2fe6ae4842749339d064c25bf525b4ef412bbb224")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("fortran", type="build")

View File

@@ -36,7 +36,7 @@ class Apptainer(SingularityBase):
checked_by="tgamblin",
)
version("main", branch="main", get_full_repo=True) # apptainer version uses git describe
version("main", branch="main")
version("1.3.6", sha256="b5343369e7fdf67572f887d81f8d2b938f099fb39c876d96430d747935960d51")
version("1.3.5", sha256="fe1c977da952edf1056915b2df67ae2203ef06065d4e4901a237c902329306b2")
version("1.3.4", sha256="c6ccfdd7c967e5c36dde8711f369c4ac669a16632b79fa0dcaf7e772b7a47397")

View File

@@ -8,20 +8,6 @@
from spack.package import *
_versions = {
"6.3.2": {
"apt": (
"bef302bf344c9297f9fb64a4a93f360721a467185bc4fefbeecb307dd956c504",
"https://repo.radeon.com/rocm/apt/6.3.2/pool/main/h/hsa-amd-aqlprofile/hsa-amd-aqlprofile_1.0.0.60302-66~20.04_amd64.deb",
),
"yum": (
"1e01de060073cb72a97fcddf0f3b637b48cf89a08b34f2447d010031abc0e099",
"https://repo.radeon.com/rocm/rhel8/6.3.2/main/hsa-amd-aqlprofile-1.0.0.60302-66.el8.x86_64.rpm",
),
"zyp": (
"408fb29e09ba59a9e83e8f7d703ba53e1ef3b3acbae1103b2a82d4f87f321752",
"https://repo.radeon.com/rocm/zyp/6.3.2/main/hsa-amd-aqlprofile-1.0.0.60302-sles155.66.x86_64.rpm",
),
},
"6.3.1": {
"apt": (
"76b129345a1a7caa04859fd738e0ba5bfa6f7bc1ad11171f1a7b2d46e0c0b158",
@@ -289,7 +275,6 @@ class Aqlprofile(Package):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hsa-rocr-dev@{ver}", when=f"@{ver}")

View File

@@ -110,15 +110,22 @@ class Arborx(CMakePackage, CudaPackage, ROCmPackage):
conflicts("~serial", when="+trilinos")
def cmake_args(self):
kokkos_pkg = self["trilinos"] if self.spec.satisfies("+trilinos") else self["kokkos"]
spec = self.spec
if "+trilinos" in spec:
kokkos_spec = spec["trilinos"]
else:
kokkos_spec = spec["kokkos"]
options = [
self.define("Kokkos_ROOT", kokkos_pkg.prefix),
f"-DKokkos_ROOT={kokkos_spec.prefix}",
self.define_from_variant("ARBORX_ENABLE_MPI", "mpi"),
]
if self.spec.satisfies("+cuda"):
options.append(self.define("CMAKE_CXX_COMPILER", kokkos_pkg.kokkos_cxx))
if self.spec.satisfies("+rocm"):
options.append(self.define("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc))
if spec.satisfies("+cuda"):
options.append(f"-DCMAKE_CXX_COMPILER={kokkos_spec.kokkos_cxx}")
if spec.satisfies("+rocm"):
options.append("-DCMAKE_CXX_COMPILER=%s" % spec["hip"].hipcc)
return options

View File

@@ -148,14 +148,6 @@ class Ascent(CMakePackage, CudaPackage):
# https://github.com/Alpine-DAV/ascent/pull/1123
patch("ascent-find-raja-pr1123.patch", when="@0.9.0")
# patch for fix typo in coord_type
# https://github.com/Alpine-DAV/ascent/pull/1408
patch(
"https://github.com/Alpine-DAV/ascent/pull/1408.patch?full_index=1",
when="@0.9.3 %oneapi@2025:",
sha256="7de7f51e57f3d743c39ad80d8783a4eb482be1def51eb2d3f9259246c661f164",
)
##########################################################################
# package dependencies
###########################################################################
@@ -476,9 +468,6 @@ def hostconfig(self):
if cflags:
cfg.write(cmake_cache_entry("CMAKE_C_FLAGS", cflags))
cxxflags = cppflags + " ".join(spec.compiler_flags["cxxflags"])
if spec.satisfies("%oneapi@2025:"):
cxxflags += "-Wno-error=missing-template-arg-list-after-template-kw "
cxxflags += "-Wno-missing-template-arg-list-after-template-kw"
if cxxflags:
cfg.write(cmake_cache_entry("CMAKE_CXX_FLAGS", cxxflags))
fflags = " ".join(spec.compiler_flags["fflags"])

View File

@@ -4,6 +4,7 @@
import spack.store
from spack.package import *
from spack.pkg.builtin.boost import Boost
class CbtfKrell(CMakePackage):
@@ -50,17 +51,9 @@ class CbtfKrell(CMakePackage):
description="build only the FE tool using the runtime_dir to point to target build.",
)
# Fix build errors with gcc >= 10
patch(
"https://github.com/OpenSpeedShop/cbtf-krell/commit/7d47761c6cd9110883bff9ca1e694af1475676f5.patch?full_index=1",
sha256="64ed80d18163ca04a67be4a13ac2d2553243fc24c6274d26981472e6e2050b8a",
)
# Dependencies for cbtf-krell
depends_on("cmake@3.0.2:", type="build")
depends_on("gotcha")
# For rpcgen
depends_on("rpcsvc-proto", type="build")
@@ -71,11 +64,16 @@ class CbtfKrell(CMakePackage):
depends_on("binutils@2.32")
# For boost
depends_on("boost@1.70.0:+filesystem+graph+program_options+python+test+thread")
depends_on("boost@1.70.0:")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
# For Dyninst
depends_on("dyninst@10.1.0:", when="@develop")
depends_on("dyninst@10.1.0:", when="@1.9.3:9999")
depends_on("dyninst@10.1.0", when="@develop")
depends_on("dyninst@10.1.0", when="@1.9.3:9999")
# For MRNet
depends_on("mrnet@5.0.1-3:+lwthreads", when="@develop", type=("build", "link", "run"))

View File

@@ -72,7 +72,6 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
depends_on("nlohmann-json")
depends_on("geant4@10.5:", when="@0.4.2: +geant4")
depends_on("geant4@10.5:11.1", when="@0.3.1:0.4.1 +geant4")
depends_on("geant4@:11.2", when="@:0.5.0 +geant4")
depends_on("hepmc3", when="+hepmc3")
depends_on("root", when="+root")
depends_on("swig@4.1:", when="+swig")

View File

@@ -21,9 +21,8 @@ class Cepgen(CMakePackage):
"1.0.2patch1", sha256="333bba0cb1965a98dec127e00c150eab1a515cd348a90f7b1d66d5cd8d206d21"
)
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("fortran", type="build")
depends_on("cxx", type="build") # generated
depends_on("fortran", type="build") # generated
generator("ninja")

View File

@@ -52,8 +52,7 @@ class Clhep(CMakePackage):
version("2.2.0.4", sha256="9bf7fcd9892313c8d1436bc4a4a285a016c4f8e81e1fc65bdf6783207ae57550")
version("2.1.2.3", sha256="4353231be09c134507092161cd3ced27a065ca0ebb31ee0256e60a8163c47c3b")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
variant(
"cxxstd",

View File

@@ -92,9 +92,9 @@ def pgo_train(self):
# Set PGO training flags.
generate_mods = EnvironmentModifications()
generate_mods.append_flags("CFLAGS", f"-fprofile-generate={reports}")
generate_mods.append_flags("CXXFLAGS", f"-fprofile-generate={reports}")
generate_mods.append_flags("LDFLAGS", f"-fprofile-generate={reports}")
generate_mods.append_flags("CFLAGS", "-fprofile-generate={}".format(reports))
generate_mods.append_flags("CXXFLAGS", "-fprofile-generate={}".format(reports))
generate_mods.append_flags("LDFLAGS", "-fprofile-generate={} --verbose".format(reports))
with working_dir(self.build_directory, create=True):
cmake(*cmake_options, sources, extra_env=generate_mods)
@@ -118,14 +118,14 @@ def pgo_train(self):
# Clean the build dir.
rmtree(self.build_directory, ignore_errors=True)
if self.spec.satisfies("%clang") or self.spec.satisfies("%apple-clang"):
if self.spec.satisfies("%clang") or self.spec.satisfies("apple-clang"):
# merge reports
use_report = join_path(reports, "merged.prof")
raw_files = glob.glob(join_path(reports, "*.profraw"))
llvm_profdata("merge", f"--output={use_report}", *raw_files)
use_flag = f"-fprofile-instr-use={use_report}"
llvm_profdata("merge", "--output={}".format(use_report), *raw_files)
use_flag = "-fprofile-instr-use={}".format(use_report)
else:
use_flag = f"-fprofile-use={reports}"
use_flag = "-fprofile-use={}".format(reports)
# Set PGO use flags for next cmake phase.
use_mods = EnvironmentModifications()

View File

@@ -125,9 +125,9 @@ class Cmake(Package):
patch("mr-9623.patch", when="@3.22.0:3.30")
depends_on("ninja", when="platform=windows")
depends_on("gmake", type=("build", "run"), when="platform=linux")
depends_on("gmake", type=("build", "run"), when="platform=darwin")
depends_on("gmake", type=("build", "run"), when="platform=freebsd")
depends_on("gmake", when="platform=linux")
depends_on("gmake", when="platform=darwin")
depends_on("gmake", when="platform=freebsd")
depends_on("qt", when="+qtgui")
# Qt depends on libmng, which is a CMake package;

View File

@@ -31,9 +31,7 @@ class Collier(CMakePackage):
version("1.1", sha256="80fd54e2c30029d3d7d646738ae9469ad3a6f5ea7aa1179b951030df048e36bc")
version("1.0", sha256="54f40c1ed07a6829230af400abfe48791e74e56eac2709c0947cec3410a4473d")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("fortran", type="build")
depends_on("fortran", type="build") # generated
@property
def parallel(self):

View File

@@ -29,7 +29,6 @@ def url_for_version(self, version):
license("NCSA")
version("master", branch="amd-stg-open")
version("6.3.2", sha256="1f52e45660ea508d3fe717a9903fe27020cee96de95a3541434838e0193a4827")
version("6.3.1", sha256="e9c2481cccacdea72c1f8d3970956c447cec47e18dfb9712cbbba76a2820552c")
version("6.3.0", sha256="79580508b039ca6c50dfdfd7c4f6fbcf489fe1931037ca51324818851eea0c1c")
version("6.2.4", sha256="7af782bf5835fcd0928047dbf558f5000e7f0207ca39cf04570969343e789528")
@@ -89,7 +88,6 @@ def url_for_version(self, version):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
]:
# llvm libs are linked statically, so this *could* be a build dep
@@ -117,7 +115,6 @@ def url_for_version(self, version):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"rocm-core@{ver}", when=f"@{ver}")

View File

@@ -66,7 +66,7 @@ def cmake_args(self):
[
"-DKokkosCore_PREFIX={0}".format(kokkos.prefix),
"-DKokkosKernels_PREFIX={0}".format(kokkos_kernels.prefix),
"-DCMAKE_CXX_COMPILER:STRING={0}".format(self["kokkos"].kokkos_cxx),
"-DCMAKE_CXX_COMPILER:STRING={0}".format(spec["kokkos"].kokkos_cxx),
# Compadre_USE_PYTHON is OFF by default
"-DCompadre_USE_PYTHON=OFF",
]

View File

@@ -18,7 +18,6 @@ class ComposableKernel(CMakePackage):
license("MIT")
version("master", branch="develop")
version("6.3.2", sha256="875237fe493ff040f8f63b827cddf2ff30a8d3aa18864f87d0e35323c7d62a2d")
version("6.3.1", sha256="3e8c8c832ca3f9ceb99ab90f654b93b7db876f08d90eda87a70bc629c854052a")
version("6.3.0", sha256="274f87fc27ec2584c76b5bc7ebdbe172923166b6b93e66a24f98475b44be272d")
version("6.2.4", sha256="5598aea4bce57dc95b60f2029831edfdade80b30a56e635412cc02b2a6729aa6")
@@ -61,7 +60,6 @@ class ComposableKernel(CMakePackage):
for ver in [
"master",
"6.3.2",
"6.3.1",
"6.3.0",
"6.2.4",

View File

@@ -4,24 +4,20 @@
import os
import llnl.util.tty as tty
from spack.package import *
from spack.pkg.builtin.mpich import MpichEnvironmentModifications
from spack.util.module_cmd import get_path_args_from_module_line, module
class CrayMpich(MpichEnvironmentModifications, Package, CudaPackage, ROCmPackage):
class CrayMpich(MpichEnvironmentModifications, Package):
"""Cray's MPICH is a high performance and widely portable implementation of
the Message Passing Interface (MPI) standard."""
homepage = "https://docs.nersc.gov/development/compilers/wrappers/"
has_code = False # Skip attempts to fetch source that is not available
maintainers("etiennemlb", "haampie")
maintainers("haampie")
version("8.1.30")
version("8.1.28")
version("8.1.25")
version("8.1.24")
version("8.1.21")
@@ -127,86 +123,3 @@ def libs(self):
libs += find_libraries(libraries, root=self.prefix.lib64, recursive=True)
return libs
@property
def gtl_lib(self):
# GPU transport Layer (GTL) handling background:
# - The cray-mpich module defines an environment variable per supported
# GPU (say, PE_MPICH_GTL_LIBS_amd_gfx942). So we should read the
# appropriate variable.
# In practice loading a module and checking its content is a PITA. We
# simplify by assuming that the GTL for a given vendor (say, AMD), is
# one and the same for all the targets of this vendor (one GTL for all
# Nvidia or one GTL for all AMD devices).
# - Second, except if you have a very weird mpich layout, the GTL are
# located in /opt/cray/pe/mpich/<cray_mpich_version>/gtl/lib when the
# MPI libraries are in
# /opt/cray/pe/mpich/<cray_mpich_version>/ofi/<vendor>/<vendor_version>.
# Example:
# /opt/cray/pe/mpich/8.1.28/gtl/lib
# /opt/cray/pe/mpich/8.1.28/ofi/<vendor>/<vendor_version>
# /opt/cray/pe/mpich/8.1.28/ofi/<vendor>/<vendor_version>/../../../gtl/lib
gtl_kinds = {
"cuda": {
"lib": "libmpi_gtl_cuda",
"variant": "cuda_arch",
"values": {"70", "80", "90"},
},
"rocm": {
"lib": "libmpi_gtl_hsa",
"variant": "amdgpu_target",
"values": {"gfx906", "gfx908", "gfx90a", "gfx940", "gfx942"},
},
}
for variant, gtl_kind in gtl_kinds.items():
arch_variant = gtl_kind["variant"]
arch_values = gtl_kind["values"]
gtl_lib = gtl_kind["lib"]
if self.spec.satisfies(f"+{variant} {arch_variant}=*"):
accelerator_architecture_set = set(self.spec.variants[arch_variant].value)
if len(
accelerator_architecture_set
) >= 1 and not accelerator_architecture_set.issubset(arch_values):
raise InstallError(
f"cray-mpich variant '+{variant} {arch_variant}'"
" was specified but no GTL support could be found for it."
)
mpi_root = os.path.abspath(
os.path.join(self.prefix, os.pardir, os.pardir, os.pardir)
)
gtl_root = os.path.join(mpi_root, "gtl", "lib")
gtl_shared_libraries = find_libraries(
[gtl_lib], root=gtl_root, shared=True, recursive=False
)
if len(gtl_shared_libraries) != 1:
raise InstallError(
f"cray-mpich variant '+{variant} {arch_variant}'"
" was specified and GTL support was found for it but"
f" the '{gtl_lib}' could not be correctly found on disk."
)
gtl_library_fullpath = list(gtl_shared_libraries)[0]
tty.debug(f"Selected GTL: {gtl_library_fullpath}")
gtl_library_directory = os.path.dirname(gtl_library_fullpath)
gtl_library_name = os.path.splitext(
os.path.basename(gtl_library_fullpath).split("lib")[1]
)[0]
# Early break. Only one GTL can be active at a given time.
return {
"ldflags": [
f"-L{gtl_library_directory}",
f"-Wl,-rpath,{gtl_library_directory}",
],
"ldlibs": [f"-l{gtl_library_name}"],
}
return {}

View File

@@ -57,9 +57,6 @@ class Davix(CMakePackage):
description="Use the specified C++ standard when building.",
)
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("pkgconfig", type="build")
depends_on("libxml2")
depends_on("uuid")
@@ -71,10 +68,14 @@ class Davix(CMakePackage):
depends_on("gsoap", when="+thirdparty")
def cmake_args(self):
return [
cmake_args = [
self.define_from_variant("CMAKE_CXX_STANDARD", variant="cxxstd"),
self.define_from_variant("ENABLE_THIRD_PARTY_COPY", variant="thirdparty"),
# Disable the use of embedded packages; use Spack to fetch them instead
self.define("EMBEDDED_LIBCURL", False),
self.define("CMAKE_MACOSX_RPATH", self.spec.satisfies("platform=darwin")),
]
# Disable the use of embedded packages; use Spack to fetch them instead.
cmake_args.append("-DEMBEDDED_LIBCURL=OFF")
if "darwin" in self.spec.architecture:
cmake_args.append("-DCMAKE_MACOSX_RPATH=ON")
return cmake_args

View File

@@ -47,8 +47,7 @@ class Dd4hep(CMakePackage):
version("1.17", sha256="036a9908aaf1e13eaf5f2f43b6f5f4a8bdda8183ddc5befa77a4448dbb485826")
version("1.16.1", sha256="c8b1312aa88283986f89cc008d317b3476027fd146fdb586f9f1fbbb47763f1a")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
generator("ninja")

View File

@@ -552,7 +552,7 @@ def cmake_args(self):
)
# Make sure we use the same compiler that Trilinos uses
if spec.satisfies("+trilinos"):
options.extend([self.define("CMAKE_CXX_COMPILER", self["trilinos"].kokkos_cxx)])
options.extend([self.define("CMAKE_CXX_COMPILER", spec["trilinos"].kokkos_cxx)])
# Complex support
options.append(self.define_from_variant("DEAL_II_WITH_COMPLEX_VALUES", "complex"))

View File

@@ -19,7 +19,6 @@ class Detray(CMakePackage):
license("MPL-2.0", checked_by="stephenswat")
version("0.88.0", sha256="bda15501c9c96af961e24ce243982f62051c535b9fe458fb28336a19b54eb47d")
version("0.87.0", sha256="2d4a76432dd6ddbfc00b88b5d482072e471fefc264b60748bb1f9a123963576e")
version("0.86.0", sha256="98350c94e8a2395b8712b7102fd449536857e8158b38a96cc913c79b70301170")
version("0.85.0", sha256="a0121a27fd08243d4a6aab060e8ab379ad5129e96775b45f6a683835767fa8e7")

View File

@@ -17,7 +17,6 @@ class Duckdb(MakefilePackage):
maintainers("glentner", "teaguesterling")
version("master", branch="master")
version("1.2.0", sha256="f22c97e18c071fa8e43b5e150c03c6ab4bcc510cca6e6b50cbe13af8535fa701")
version("1.1.3", sha256="2aea0af898ad753fee82b776fea1bf78ccbc9648986e7f7a87372df5e74cdb98")
version("1.1.2", sha256="a3319a64c390ed0454c869b2e4fc0af2413cd49f55cd0f1400aaed9069cdbc4c")
version("1.1.1", sha256="a764cef80287ccfd8555884d8facbe962154e7c747043c0842cd07873b4d6752")
@@ -93,6 +92,7 @@ class Duckdb(MakefilePackage):
# Extensions
variant("autocomplete", default=True, description="Include autocomplete for CLI in build")
variant("excel", default=True, description="Include Excel formatting extension in build")
variant("fts", default=True, description="Include FTS (full text search) support in build")
variant("httpfs", default=True, description="Include HTTPFS (& S3) support in build")
variant("inet", default=True, description="Include INET (ip address) support in build")
variant("json", default=True, description="Include JSON support in build")
@@ -100,14 +100,6 @@ class Duckdb(MakefilePackage):
variant("tpce", default=False, description="Include TPCE in build")
variant("tpch", default=False, description="Include TPCH in build")
# FTS was moved to an out-of-tree extension after v1.1.3
variant(
"fts",
default=True,
description="Include FTS (full text search) support in build",
when="@:1.1",
)
# APIs
variant("python", default=True, description="Build with Python driver")
extends("python", when="+python")

View File

@@ -22,8 +22,7 @@ class Evtgen(CMakePackage):
version("02.02.00", sha256="0c626e51cb17e799ad0ffd0beea5cb94d7ac8a5f8777b746aa1944dd26071ecf")
version("02.00.00", sha256="02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
variant("pythia8", default=True, description="Build with pythia8")
variant("tauola", default=False, description="Build with tauola")

View File

@@ -121,9 +121,9 @@ def setup_build_environment(self, env):
# Manually turn off device self.defines to solve Kokkos issues in Nalu-Wind headers
env.append_flags("CXXFLAGS", "-U__HIP_DEVICE_COMPILE__ -DDESUL_HIP_RDC")
if self.spec.satisfies("+cuda"):
env.set("OMPI_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
env.set("MPICH_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
env.set("MPICXX_CXX", self["kokkos-nvcc-wrapper"].kokkos_cxx)
env.set("OMPI_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
env.set("MPICH_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
env.set("MPICXX_CXX", self.spec["kokkos-nvcc-wrapper"].kokkos_cxx)
if self.spec.satisfies("+rocm"):
env.set("OMPI_CXX", self.spec["hip"].hipcc)
env.set("MPICH_CXX", self.spec["hip"].hipcc)

View File

@@ -73,6 +73,10 @@ class Extrae(AutotoolsPackage):
depends_on("mpi")
depends_on("libunwind")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on("libdwarf")
depends_on("elf", type="link")
depends_on("libxml2")
@@ -88,10 +92,6 @@ class Extrae(AutotoolsPackage):
variant("dyninst", default=False, description="Use dyninst for dynamic code installation")
with when("+dyninst"):
depends_on("dyninst@10.1.0:")
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on("elfutils", when="@4.1.2:")
depends_on("intel-oneapi-tbb", when="@4.1.2:")
@@ -127,6 +127,7 @@ def configure_args(self):
args = [
"--with-mpi=%s" % mpiroot,
"--with-unwind=%s" % spec["libunwind"].prefix,
"--with-boost=%s" % spec["boost"].prefix,
"--with-dwarf=%s" % spec["libdwarf"].prefix,
"--with-elf=%s" % spec["elf"].prefix,
"--with-xml-prefix=%s" % spec["libxml2"].prefix,
@@ -140,10 +141,7 @@ def configure_args(self):
)
if spec.satisfies("+dyninst"):
args += [
f"--with-dyninst={spec['dyninst'].prefix}",
f"--with-boost={spec['boost'].prefix}",
]
args += ["--with-dyninst={spec['dyninst'].prefix}"]
if spec.satisfies("@4.1.2:"):
args += [

View File

@@ -19,7 +19,6 @@ class Fargparse(CMakePackage):
version("develop", branch="develop")
version("main", branch="main")
version("1.9.0", sha256="c83c13fa90b6b45adf8d84fe00571174acfa118d2a0d1e8c467f74bbd7dec49d")
version("1.8.0", sha256="37108bd3c65d892d8c24611ce4d8e5451767e4afe81445fde67eab652178dd01")
version("1.7.0", sha256="9889e7eca9c020b742787fba2be0ba16edcc3fcf52929261ccb7d09996a35f89")
version("1.6.0", sha256="055a0af44f50c302f8f20a8bcf3d26c5bbeacf5222cdbaa5b19da4cff56eb9c0")

View File

@@ -59,7 +59,6 @@ class Fastjet(AutotoolsPackage):
version("2.3.0", sha256="e452fe4a9716627bcdb726cfb0917f46a7ac31f6006330a6ccc1abc43d9c2d53")
# older version use .tar instead of .tar.gz extension, to be added
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("fortran", type="build", when="plugins=all")
depends_on("fortran", type="build", when="plugins=pxcone")

View File

@@ -147,7 +147,7 @@ def cmake_args(self):
# CMake pulled in via find_package(Legion) won't work without this
options.append(self.define("HIP_PATH", "{0}/hip".format(spec["hip"].prefix)))
elif self.spec.satisfies("^kokkos"):
options.append(self.define("CMAKE_CXX_COMPILER", self["kokkos"].kokkos_cxx))
options.append(self.define("CMAKE_CXX_COMPILER", self.spec["kokkos"].kokkos_cxx))
else:
# kept for supporing version prior to 2.2
options = [

View File

@@ -22,7 +22,6 @@ class FluxSched(CMakePackage, AutotoolsPackage):
license("LGPL-3.0-only")
version("master", branch="master")
version("0.42.1", sha256="ab56b257e4918ad7e26ef6a375d0ea500a4929bf6633937f0c11c06e21db56b9")
version("0.41.0", sha256="c89baf72867031847748c157aa99f3b36755f2801df917aae66010d2112e10fe")
version("0.40.0", sha256="1484befcf8628b0af7833bf550d0bb3864db32b70f2c1bb363c35e30ada1ecc5")
version("0.39.0", sha256="7e87029f8ad17b9286096e4e2d44982b5d6634908aefde3282497bdd3f44f2f8")

View File

@@ -9,15 +9,12 @@ class G4vg(CMakePackage):
"""Generate VecGeom geometry representations from in-memory Geant4 geometry."""
homepage = "https://github.com/celeritas-project/g4vg"
git = "https://github.com/celeritas-project/g4vg.git"
url = "https://github.com/celeritas-project/g4vg/releases/download/v1.0.1/g4vg-1.0.1.tar.gz"
maintainers("sethrj", "drbenmorgan")
license("Apache-2.0", checked_by="sethrj")
version("develop", branch="main", get_full_repo=True)
version("1.0.1", sha256="add7ce4bc37889cac2101323a997cea8574b18da6cbeffdab44a2b714d134e99")
variant("debug", default=False, description="Enable runtime debug assertions")

View File

@@ -11,7 +11,6 @@
import spack.compiler
import spack.platforms
import spack.repo
import spack.util.libc
from spack.operating_systems.mac_os import macos_sdk_path, macos_version
from spack.package import *
@@ -1218,7 +1217,7 @@ def _post_buildcache_install_hook(self):
)
if header_dir and all(
os.path.exists(os.path.join(header_dir, h))
for h in spack.repo.PATH.get_pkg_class(libc.fullname).representative_headers
for h in libc.package_class.representative_headers
):
relocation_args.append(f"-idirafter {header_dir}")
else:

View File

@@ -49,7 +49,6 @@ class Geant4(CMakePackage):
version("10.3.3", sha256="bcd36a453da44de9368d1d61b0144031a58e4b43a6d2d875e19085f2700a89d8")
version("10.0.4", sha256="97f3744366b00143d1eed52f8786823034bbe523f45998106f798af61d83f863")
depends_on("c", type="build")
depends_on("cxx", type="build")
_cxxstd_values = (

View File

@@ -17,7 +17,6 @@ class Geomodel(CMakePackage):
license("Apache-2.0", checked_by="wdconinc")
version("6.9.0", sha256="ea34dad8a0cd392e06794b8a1b7407dd6ad617fefd19fb4cccdf36b154749793")
version("6.8.0", sha256="4dfd5a932955ee2618a880bb210aed9ce7087cfadd31f23f92e5ff009c8384eb")
version("6.7.0", sha256="bfa69062ba191d0844d7099b28c0d6c3c0f87e726dacfaa21dba7a6f593d34bf")
version("6.6.0", sha256="3cefeaa409177d45d3fa63e069b6496ca062991b0d7d71275b1748487659e91b")

View File

@@ -24,7 +24,6 @@ class GftlShared(CMakePackage):
version("main", branch="main")
version("1.10.0", sha256="42158fe75fa6bee336516c7531b4c6c4e7252dee2fed541eec740209a07ceafe")
version("1.9.0", sha256="a3291ce61b512fe88628cc074b02363c2ba3081e7b453371089121988482dd6f")
version("1.8.0", sha256="3450161508c573ea053b2a23cdbf2a1d6fd6fdb78c162d31fc0019da0f8dd03c")
version("1.7.0", sha256="8ba567133fcee6b93bc71f61b3bb2053b4b07c6d78f6ad98a04dfc40aa478de7")

View File

@@ -38,7 +38,6 @@ class Gftl(CMakePackage):
version("develop", branch="develop")
version("main", branch="main")
version("1.15.1", sha256="13b9e17b7ec5e9ba19d0ee3ad1957bfa2015055b654891c6bb0bbe68b7a040d7")
version("1.14.0", sha256="bf8e3ba3f708ea327c7eb1a5bd1afdce41358c6df1a323aba0f73575c25d5fc8")
version("1.13.0", sha256="d8ef4bca5fb67e63dcd69e5377a0cef8336b00178a97450e79010552000d0a52")
version("1.12.0", sha256="b50e17cb2109372819b3ee676e6e61fd3a517dc4c1ea293937c8a83f03b0cbd6")

View File

@@ -99,11 +99,3 @@ def setup_dependent_package(self, module, dspec):
self.spec.prefix.bin.make,
jobs=determine_number_of_jobs(parallel=dspec.package.parallel),
)
@property
def libs(self):
return LibraryList([])
@property
def headers(self):
return HeaderList([])

View File

@@ -38,8 +38,6 @@ class Go(Package):
license("BSD-3-Clause")
version("1.23.6", sha256="039c5b04e65279daceee8a6f71e70bd05cf5b801782b6f77c6e19e2ed0511222")
version("1.23.5", sha256="a6f3f4bbd3e6bdd626f79b668f212fbb5649daf75084fb79b678a0ae4d97423b")
version("1.23.4", sha256="ad345ac421e90814293a9699cca19dd5238251c3f687980bbcae28495b263531")
version("1.23.3", sha256="8d6a77332487557c6afa2421131b50f83db4ae3c579c3bc72e670ee1f6968599")
version("1.23.2", sha256="36930162a93df417d90bd22c6e14daff4705baac2b02418edda671cdfa9cd07f")

View File

@@ -16,9 +16,8 @@ class GosamContrib(AutotoolsPackage):
version("2.0", sha256="c05beceea74324eb51c1049773095e2cb0c09c8c909093ee913d8b0da659048d")
version("1.0", sha256="a29d4232d9190710246abc2ed97fdcd8790ce83580f56a360f3456b0377c40ec")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("fortran", type="build")
depends_on("cxx", type="build") # generated
depends_on("fortran", type="build") # generated
# whizard checks for .la files ( but does not use them )
install_libtool_archives = True

View File

@@ -25,8 +25,8 @@ class Hepmc(CMakePackage):
version("2.06.06", sha256="8cdff26c10783ed4248220a84a43b7e1f9b59cc2c9a29bd634d024ca469db125")
version("2.06.05", sha256="4c411077cc97522c03b74f973264b8d9fd2b6ccec0efc7ceced2645371c73618")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
depends_on("fortran", type="build") # generated
variant("length", default="MM", values=("CM", "MM"), multi=False, description="Unit of length")
variant(

View File

@@ -42,8 +42,7 @@ class Highfive(CMakePackage):
version("1.1", sha256="430fc312fc1961605ffadbfad82b9753a5e59482e9fbc64425fb2c184123d395")
version("1.0", sha256="d867fe73d00817f686d286f3c69a23731c962c3e2496ca1657ea7302cd0bb944")
depends_on("c", type="build")
depends_on("cxx", type="build")
depends_on("cxx", type="build") # generated
variant("boost", default=False, description="Support Boost")
variant("mpi", default=True, description="Support MPI")
@@ -58,9 +57,10 @@ class Highfive(CMakePackage):
depends_on("hdf5 +mpi", when="+mpi")
def cmake_args(self):
return [
self.define_from_variant("USE_BOOST", "boost"),
self.define_from_variant("HIGHFIVE_PARALLEL_HDF5", "mpi"),
self.define("HIGHFIVE_UNIT_TESTS", False),
self.define("HIGHFIVE_EXAMPLES", False),
args = [
"-DUSE_BOOST:Bool={0}".format(self.spec.satisfies("+boost")),
"-DHIGHFIVE_PARALLEL_HDF5:Bool={0}".format(self.spec.satisfies("+mpi")),
"-DHIGHFIVE_UNIT_TESTS:Bool=false",
"-DHIGHFIVE_EXAMPLES:Bool=false",
]
return args

View File

@@ -16,7 +16,6 @@ class HipTensor(CMakePackage, ROCmPackage):
maintainers("srekolam", "afzpatel")
version("master", branch="master")
version("6.3.2", sha256="094db6d759eb32e9d15c36fce7f5b5d46ba81416953a8d9435b2fb9c161d8c83")
version("6.3.1", sha256="142401331526e6da3fa172cce283f1c053056cb59cf431264443da76cee2f168")
version("6.3.0", sha256="9a4acef722e838ec702c6b111ebc1fff9d5686ae5c79a9f5a82e5fac2a5e406a")
version("6.2.4", sha256="54c378b440ede7a07c93b5ed8d16989cc56283a56ea35e41f3666bb05b6bc984")
@@ -46,13 +45,12 @@ class HipTensor(CMakePackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
]:
depends_on(f"composable-kernel@{ver}", when=f"@{ver}")
depends_on(f"rocm-cmake@{ver}", when=f"@{ver}")
for ver in ["6.1.0", "6.1.1", "6.1.2", "6.2.0", "6.2.1", "6.2.4", "6.3.0", "6.3.1", "6.3.2"]:
for ver in ["6.1.0", "6.1.1", "6.1.2", "6.2.0", "6.2.1", "6.2.4", "6.3.0", "6.3.1"]:
depends_on(f"hipcc@{ver}", when=f"@{ver}")
def setup_build_environment(self, env):

View File

@@ -25,7 +25,6 @@ class Hip(CMakePackage):
license("MIT")
version("master", branch="master")
version("6.3.2", sha256="2832e21d75369f87beee767949177a93ac113710afae6b73da5548c0047962ec")
version("6.3.1", sha256="ebde9fa80ad1f4ba3fbe04fd36d90548492ebe5828ac459995b5f9d384a29783")
version("6.3.0", sha256="d8dba8cdf05463afb7879de2833983cafa6a006ba719815a35b96d9b92fc7fc4")
version("6.2.4", sha256="76e4583ae3d31786270fd92abbb2e3dc5e665b22fdedb5ceff0093131d4dc0ca")
@@ -118,7 +117,6 @@ class Hip(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hsa-rocr-dev@{ver}", when=f"@{ver}")
depends_on(f"comgr@{ver}", when=f"@{ver}")
@@ -145,7 +143,6 @@ class Hip(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hipify-clang@{ver}", when=f"@{ver}")
@@ -166,7 +163,6 @@ class Hip(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"rocm-core@{ver}", when=f"@{ver}")
@@ -184,11 +180,10 @@ class Hip(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hipcc@{ver}", when=f"@{ver}")
for ver in ["6.2.0", "6.2.1", "6.2.4", "6.3.0", "6.3.1", "6.3.2"]:
for ver in ["6.2.0", "6.2.1", "6.2.4", "6.3.0", "6.3.1"]:
depends_on(f"rocprofiler-register@{ver}", when=f"@{ver}")
# roc-obj-ls requirements
@@ -250,7 +245,6 @@ class Hip(CMakePackage):
)
# Add hip-clr sources thru the below
for d_version, d_shasum in [
("6.3.2", "ec13dc4ffe212beee22171cb2825d2b16cdce103c835adddb482b9238cf4f050"),
("6.3.1", "bfb8a4a59e7bd958e2cd4bf6f14c6cdea601d9827ebf6dc7af053a90e963770f"),
("6.3.0", "829e61a5c54d0c8325d02b0191c0c8254b5740e63b8bfdb05eec9e03d48f7d2c"),
("6.2.4", "0a3164af7f997a4111ade634152957378861b95ee72d7060eb01c86c87208c54"),
@@ -309,7 +303,6 @@ class Hip(CMakePackage):
)
# Add hipother sources thru the below
for d_version, d_shasum in [
("6.3.2", "1623d823de49471aae3ecb1fad0e9cdddf9301a4089f1fd44f78ac2ff0c20fb2"),
("6.3.1", "caa69147227bf72fa7b076867f84579456ef55af63efec29914265a80602df42"),
("6.3.0", "a28eb1e8fe239b41e744584d45d676925ca210968ecb21bfa60678cf8e86eeb7"),
("6.2.4", "b7ebcf8a2679e50d27c49ebec0dbea5a67573f8b8c3f4a29108c84b28b5bedee"),

View File

@@ -15,6 +15,5 @@ class HipblasCommon(CMakePackage):
license("MIT")
version("6.3.2", sha256="29aa1ac1a0f684a09fe2ea8a34ae8af3622c27708c7df403a7481e75174e1984")
version("6.3.1", sha256="512e652483b5580713eca14db3fa633d0441cd7c02cdb0d26e631ea605b9231b")
version("6.3.0", sha256="240bb1b0f2e6632447e34deae967df259af1eec085470e58a6d0aa040c8530b0")

View File

@@ -24,7 +24,6 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage):
version("develop", branch="develop")
version("master", branch="master")
version("6.3.2", sha256="6e86d4f8657e13665e37fdf3174c3a30f4c7dff2c4e2431d1be110cd7d463971")
version("6.3.1", sha256="77a1845254d738c43a48bc52fa3e94499ed83535b5771408ff476122bc4b7b7c")
version("6.3.0", sha256="72604c1896e42e65ea2b3e905159af6ec5eede6a353678009c47d0a24f462c92")
version("6.2.4", sha256="3137ba35e0663d6cceed70086fc6397d9e74803e1711382be62809b91beb2f32")
@@ -93,7 +92,6 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"rocm-cmake@{ver}", when=f"+rocm @{ver}")
depends_on(f"rocm-openmp-extras@{ver}", type="test", when=f"+rocm @{ver}")
@@ -121,7 +119,6 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
"develop",
]:
@@ -130,7 +127,7 @@ class Hipblas(CMakePackage, CudaPackage, ROCmPackage):
for tgt in ROCmPackage.amdgpu_targets:
depends_on(f"rocblas amdgpu_target={tgt}", when=f"+rocm amdgpu_target={tgt}")
depends_on(f"rocsolver amdgpu_target={tgt}", when=f"+rocm amdgpu_target={tgt}")
for ver in ["6.3.0", "6.3.1", "6.3.2"]:
for ver in ["6.3.0", "6.3.1"]:
depends_on(f"hipblas-common@{ver}", when=f"@{ver}")
@classmethod
@@ -158,7 +155,7 @@ def cmake_args(self):
# FindHIP.cmake is still used for +cuda
if self.spec.satisfies("+cuda"):
args.append(self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip))
if self.spec.satisfies("@5.2.0:6.3.1"):
if self.spec.satisfies("@5.2.0:"):
args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True))
if self.spec.satisfies("@5.3.0:"):
args.append("-DCMAKE_INSTALL_LIBDIR=lib")

View File

@@ -16,7 +16,6 @@ class Hipblaslt(CMakePackage):
maintainers("srekolam", "afzpatel", "renjithravindrankannath")
license("MIT")
version("6.3.2", sha256="cc4875b1a5cf1708a7576c42aff6b4cb790cb7337f5dc2df33119a4aadcef027")
version("6.3.1", sha256="9a18a2e44264a21cfe58ed102fd3e34b336f23d6c191ca2da726e8e0883ed663")
version("6.3.0", sha256="e570996037ea42eeca4c9b8b0b77a202d40be1a16068a6245595c551d80bdcad")
version("6.2.4", sha256="b8a72cb1ed4988b0569817c6387fb2faee4782795a0d8f49b827b32b52572cfd")
@@ -52,7 +51,6 @@ class Hipblaslt(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hip@{ver}", when=f"@{ver}")
depends_on(f"llvm-amdgpu@{ver}", when=f"@{ver}")
@@ -61,7 +59,7 @@ class Hipblaslt(CMakePackage):
for ver in ["6.0.0", "6.0.2", "6.1.0", "6.1.1", "6.1.2", "6.2.0", "6.2.1", "6.2.4"]:
depends_on(f"hipblas@{ver}", when=f"@{ver}")
for ver in ["6.3.0", "6.3.1", "6.3.2"]:
for ver in ["6.3.0", "6.3.1"]:
depends_on(f"hipblas-common@{ver}", when=f"@{ver}")
depends_on(f"rocm-smi-lib@{ver}", when=f"@{ver}")

View File

@@ -23,7 +23,6 @@ def url_for_version(self, version):
maintainers("srekolam", "renjithravindrankannath", "afzpatel")
license("MIT")
version("6.3.2", sha256="1f52e45660ea508d3fe717a9903fe27020cee96de95a3541434838e0193a4827")
version("6.3.1", sha256="e9c2481cccacdea72c1f8d3970956c447cec47e18dfb9712cbbba76a2820552c")
version("6.3.0", sha256="79580508b039ca6c50dfdfd7c4f6fbcf489fe1931037ca51324818851eea0c1c")
version("6.2.4", sha256="7af782bf5835fcd0928047dbf558f5000e7f0207ca39cf04570969343e789528")

View File

@@ -17,7 +17,6 @@ class Hipcub(CMakePackage, CudaPackage, ROCmPackage):
license("BSD-3-Clause")
maintainers("srekolam", "renjithravindrankannath", "afzpatel")
version("6.3.2", sha256="4a1443c2ea12c3aa05fb65703eb309ccf8b893f9e6cbebec4ccf5502ba54b940")
version("6.3.1", sha256="e5d100c7b8f95fe6243ad9f22170c136aa34db4e588136bec54ede7cb2e7f12f")
version("6.3.0", sha256="a609cde18cefa90a1970049cc5630f2ec263f12961aa85993897580da2ca0456")
version("6.2.4", sha256="06f3655b110d3d2e2ecf0aca052d3ba3f2ef012c069e5d2d82f2b75d50555f46")
@@ -85,7 +84,6 @@ class Hipcub(CMakePackage, CudaPackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"rocprim@{ver}", when=f"+rocm @{ver}")
depends_on(f"rocm-cmake@{ver}:", type="build", when=f"@{ver}")
@@ -109,7 +107,7 @@ def cmake_args(self):
# FindHIP.cmake is still used for +cuda
if self.spec.satisfies("+cuda"):
args.append(self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip))
if self.spec.satisfies("@5.2.0:6.3.1"):
if self.spec.satisfies("@5.2.0:"):
args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True))
return args

View File

@@ -24,7 +24,6 @@ class Hipfft(CMakePackage, CudaPackage, ROCmPackage):
license("MIT")
version("master", branch="master")
version("6.3.2", sha256="5d9e662c7d67f4c814cad70476b57651df5ae6b65f371ca6dbb5aa51d9eeb6f5")
version("6.3.1", sha256="b709df2d0115748ed004d0cddce829cb0f9ec3761eb855e61f0097cab04e4806")
version("6.3.0", sha256="08a0c800f531247281b4dbe8de9567a6fde4f432829a451a720d0b0a3c711059")
version("6.2.4", sha256="308b81230498b01046f7fc3299a9e9c2c5456d80fd71a94f490ad97f51ed9de8")
@@ -92,7 +91,6 @@ class Hipfft(CMakePackage, CudaPackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
]:
depends_on(f"rocm-cmake@{ver}:", type="build", when=f"@{ver}")
@@ -119,7 +117,7 @@ def cmake_args(self):
if self.spec["hip"].satisfies("@5.2:"):
args.append(self.define("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip))
if self.spec.satisfies("@5.2.0:6.3.1"):
if self.spec.satisfies("@5.2.0:"):
args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True))
if self.spec.satisfies("@5.3.0:"):

View File

@@ -16,7 +16,6 @@ class Hipfort(CMakePackage):
license("MIT")
maintainers("cgmb", "srekolam", "renjithravindrankannath", "afzpatel")
version("6.3.2", sha256="d2438971199637eb2e09519c1f2300cdd7a84b4d948034a7cd1ce3e441faf5de")
version("6.3.1", sha256="8141bf3d05ab4f91c561815134707123e3d06486bf775224b9a3a4cc8ee8f56f")
version("6.3.0", sha256="9e7f4420c75430cdb9046c0c4dbe656f22128b0672b2e261d50a6e92e47cc6d3")
version("6.2.4", sha256="32daa4ee52c2d44790bff7a7ddde9d572e4785b2f54766a5e45d10228da0534b")
@@ -69,7 +68,6 @@ class Hipfort(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"hip@{ver}", type="build", when=f"@{ver}")

View File

@@ -19,7 +19,6 @@ class HipifyClang(CMakePackage):
license("MIT")
version("master", branch="master")
version("6.3.2", sha256="c0da5118be8207fab6d19803417c0b8d2db5bc766279038527cbd6fa92b25c67")
version("6.3.1", sha256="5f9d9a65545f97b18c6a0d4394dca1bcdee10737a5635b79378ea505081f9315")
version("6.3.0", sha256="9fced04f9e36350bdbabd730c446b55a898e2f4ba82078855bcf5dea3b5e8dc8")
version("6.2.4", sha256="981af55ab4243f084b3e75007e827f7c94ac317fa84fe08d59c5872124a7d3c7")
@@ -77,7 +76,6 @@ class HipifyClang(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
]:
depends_on(f"llvm-amdgpu@{ver}", when=f"@{ver}")
@@ -99,7 +97,6 @@ class HipifyClang(CMakePackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
]:
depends_on(f"rocm-core@{ver}", when=f"@{ver}")

View File

@@ -24,7 +24,6 @@ class Hiprand(CMakePackage, CudaPackage, ROCmPackage):
version("develop", branch="develop")
version("master", branch="master")
version("6.3.2", sha256="0a08ed7554c161b095c866cd5e6f0d63cdf063e5b3c1183afa6ac18bad94a575")
version("6.3.1", sha256="ec43bf64eda348cf53c2767e553fd9561540dc50ae3ce95ca916404aa9a3eafb")
version("6.3.0", sha256="7464c1e48f4e1a97a5e5978146641971d068886038810876b60acb5dfb6c4d39")
version("6.2.4", sha256="b6010f5e0c63a139acd92197cc1c0d64a428f7a0ad661bce0cd1e553ad6fd6eb")
@@ -104,7 +103,6 @@ class Hiprand(CMakePackage, CudaPackage, ROCmPackage):
"6.2.4",
"6.3.0",
"6.3.1",
"6.3.2",
"master",
"develop",
]:
@@ -142,7 +140,7 @@ def cmake_args(self):
else:
args.append(self.define("BUILD_WITH_LIB", "ROCM"))
if self.spec.satisfies("@5.2.0:6.3.1"):
if self.spec.satisfies("@5.2.0:"):
args.append(self.define("BUILD_FILE_REORG_BACKWARD_COMPATIBILITY", True))
if self.spec.satisfies("@5.3.0:"):

Some files were not shown because too many files have changed in this diff Show More