Compare commits
7 Commits
refactor/r
...
hep/acts-o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46432f7bbd | ||
|
|
145bb5039a | ||
|
|
61ffb87757 | ||
|
|
950b4c5847 | ||
|
|
ac078f262d | ||
|
|
fd62f0f3a8 | ||
|
|
ca977ea9e1 |
@@ -215,7 +215,6 @@ def create_external_pruner() -> Callable[[spack.spec.Spec], RebuildDecision]:
|
||||
"""Return a filter that prunes external specs"""
|
||||
|
||||
def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
||||
print(s.name, "external:", s.external)
|
||||
if not s.external:
|
||||
return RebuildDecision(True, "not external")
|
||||
return RebuildDecision(False, "external spec")
|
||||
|
||||
@@ -16,8 +16,6 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hooks
|
||||
import spack.hooks.report
|
||||
import spack.paths
|
||||
import spack.report
|
||||
import spack.spec
|
||||
@@ -331,10 +329,13 @@ def install(parser, args):
|
||||
|
||||
arguments.sanitize_reporter_options(args)
|
||||
|
||||
# TODO: This is hacky as hell
|
||||
if args.log_format is not None:
|
||||
spack.hooks.report.reporter = args.reporter()
|
||||
spack.hooks.report.report_file = args.log_file
|
||||
def reporter_factory(specs):
|
||||
if args.log_format is None:
|
||||
return lang.nullcontext()
|
||||
|
||||
return spack.report.build_context_manager(
|
||||
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||
)
|
||||
|
||||
install_kwargs = install_kwargs_from_args(args)
|
||||
|
||||
@@ -345,9 +346,9 @@ def install(parser, args):
|
||||
|
||||
try:
|
||||
if env:
|
||||
install_with_active_env(env, args, install_kwargs)
|
||||
install_with_active_env(env, args, install_kwargs, reporter_factory)
|
||||
else:
|
||||
install_without_active_env(args, install_kwargs)
|
||||
install_without_active_env(args, install_kwargs, reporter_factory)
|
||||
except InstallError as e:
|
||||
if args.show_log_on_error:
|
||||
_dump_log_on_error(e)
|
||||
@@ -381,7 +382,7 @@ def _maybe_add_and_concretize(args, env, specs):
|
||||
env.write(regenerate=False)
|
||||
|
||||
|
||||
def install_with_active_env(env: ev.Environment, args, install_kwargs):
|
||||
def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_factory):
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
|
||||
# The following two commands are equivalent:
|
||||
@@ -415,7 +416,8 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs):
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in specs_to_install]
|
||||
|
||||
try:
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
if env.views:
|
||||
with env.write_transaction():
|
||||
@@ -459,17 +461,18 @@ def concrete_specs_from_file(args):
|
||||
return result
|
||||
|
||||
|
||||
def install_without_active_env(args, install_kwargs):
|
||||
def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
concrete_specs = concrete_specs_from_cli(args, install_kwargs) + concrete_specs_from_file(args)
|
||||
|
||||
if len(concrete_specs) == 0:
|
||||
tty.die("The `spack install` command requires a spec to install.")
|
||||
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
with reporter_factory(concrete_specs):
|
||||
if args.overwrite:
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, **install_kwargs)
|
||||
builder.install()
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, **install_kwargs)
|
||||
builder.install()
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
class _HookRunner:
|
||||
#: Order in which hooks are executed
|
||||
HOOK_ORDER = [
|
||||
"spack.hooks.report",
|
||||
"spack.hooks.module_file_generation",
|
||||
"spack.hooks.licensing",
|
||||
"spack.hooks.sbang",
|
||||
@@ -68,6 +67,3 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
pre_uninstall = _HookRunner("pre_uninstall")
|
||||
post_uninstall = _HookRunner("post_uninstall")
|
||||
|
||||
pre_installer = _HookRunner("pre_installer")
|
||||
post_installer = _HookRunner("post_installer")
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Hooks to produce reports of spec installations"""
|
||||
import collections
|
||||
import gzip
|
||||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_environment
|
||||
import spack.util.spack_json as sjson
|
||||
|
||||
reporter = None
|
||||
report_file = None
|
||||
|
||||
Property = collections.namedtuple("Property", ["name", "value"])
|
||||
|
||||
|
||||
class Record(dict):
|
||||
def __getattr__(self, name):
|
||||
# only called if no attribute exists
|
||||
if name in self:
|
||||
return self[name]
|
||||
raise AttributeError(f"RequestRecord for {self.name} has no attribute {name}")
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name.startswith("_"):
|
||||
super().__setattr__(name, value)
|
||||
else:
|
||||
self[name] = value
|
||||
|
||||
|
||||
class RequestRecord(Record):
|
||||
def __init__(self, spec):
|
||||
super().__init__()
|
||||
self.name = spec.name
|
||||
self.errors = None
|
||||
self.nfailures = None
|
||||
self.npackages = None
|
||||
self.time = None
|
||||
self.timestamp = time.strftime("%a, d %b %Y %H:%M:%S", time.gmtime())
|
||||
self.properties = [
|
||||
Property("architecture", spec.architecture),
|
||||
Property("compiler", spec.compiler),
|
||||
]
|
||||
self.packages = []
|
||||
self._seen = set()
|
||||
|
||||
def append_record(self, record, key):
|
||||
self.packages.append(record)
|
||||
self._seen.add(key)
|
||||
|
||||
def seen(self, key):
|
||||
return key in self._seen
|
||||
|
||||
def summarize(self):
|
||||
self.npackages = len(self.packages)
|
||||
self.nfailures = len([r for r in self.packages if r.result == "failure"])
|
||||
self.nerrors = len([r for r in self.packages if r.result == "error"])
|
||||
self.time = sum(float(r.elapsed_time or 0.0) for r in self.packages)
|
||||
|
||||
|
||||
class SpecRecord(Record):
|
||||
pass
|
||||
|
||||
|
||||
class InstallRecord(SpecRecord):
|
||||
def __init__(self, spec):
|
||||
super().__init__()
|
||||
self._spec = spec
|
||||
self._package = spec.package
|
||||
self._start_time = time.time()
|
||||
self.name = spec.name
|
||||
self.id = spec.dag_hash()
|
||||
self.elapsed_time = None
|
||||
self.result = None
|
||||
self.message = None
|
||||
self.installed_from_binary_cache = None
|
||||
|
||||
def fetch_log(self):
|
||||
try:
|
||||
if os.path.exists(self._package.install_log_path):
|
||||
stream = gzip.open(self._package.install_log_path, "rt", encoding="utf-8")
|
||||
else:
|
||||
stream = open(self._package.log_path, encoding="utf-8")
|
||||
with stream as f:
|
||||
return f.read()
|
||||
except OSError:
|
||||
return f"Cannot open log for {self._spec.cshort_spec}"
|
||||
|
||||
def fetch_time(self):
|
||||
try:
|
||||
with open(self._package.times_log_path, "r", encoding="utf-8") as f:
|
||||
data = sjson.load(f.read())
|
||||
return data["total"]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def skip(self, msg):
|
||||
self.result = "skipped"
|
||||
self.elapsed_time = 0.0
|
||||
self.message = msg
|
||||
|
||||
def succeed(self):
|
||||
self.result = "success"
|
||||
self.stdout = self.fetch_log()
|
||||
self.installed_from_binary_cache = self._package.installed_from_binary_cache
|
||||
self.elapsed_time = self.fetch_time()
|
||||
|
||||
def fail(self, exc):
|
||||
if isinstance(exc, spack.build_environment.InstallError):
|
||||
self.result = "failure"
|
||||
self.message = exc.message or "Installation failure"
|
||||
self.exception = exc.traceback
|
||||
else:
|
||||
self.result = "error"
|
||||
self.message = str(exc) or "Unknown error"
|
||||
self.exception = traceback.format_exc()
|
||||
self.stdout = self.fetch_log() + self.message
|
||||
|
||||
|
||||
requests = {}
|
||||
|
||||
|
||||
def pre_installer(specs):
|
||||
global requests
|
||||
|
||||
for root in specs:
|
||||
request = RequestRecord(root)
|
||||
requests[root.dag_hash()] = request
|
||||
|
||||
for dep in filter(lambda x: x.installed, root.traverse()):
|
||||
record = InstallRecord(dep)
|
||||
record.skip(msg="Spec already installed")
|
||||
request.append_record(record, dep.dag_hash())
|
||||
|
||||
|
||||
def post_installer(specs, hashes_to_failures):
|
||||
global requests
|
||||
global report_file
|
||||
global reporter
|
||||
|
||||
try:
|
||||
for root in specs:
|
||||
request = requests[root.dag_hash()]
|
||||
|
||||
# Associate all dependency jobs with this request
|
||||
for dep in root.traverse():
|
||||
if request.seen(dep.dag_hash()):
|
||||
continue # Already handled
|
||||
|
||||
record = InstallRecord(dep)
|
||||
if dep.dag_hash() in hashes_to_failures:
|
||||
record.fail(hashes_to_failures[dep.dag_hash()])
|
||||
elif dep.installed:
|
||||
record.succeed()
|
||||
else:
|
||||
# This package was never reached because of an earlier failure
|
||||
continue
|
||||
request.append_record(record, dep.dag_hash())
|
||||
|
||||
# Aggregate request-level data
|
||||
request.summarize()
|
||||
|
||||
# Write the actual report
|
||||
if not report_file:
|
||||
basename = specs[0].format("test-{name}-{version}-{hash}.xml")
|
||||
dirname = os.path.join(spack.paths.reports_path, "junit")
|
||||
fs.mkdirp(dirname)
|
||||
report_file = os.path.join(dirname, basename)
|
||||
if reporter:
|
||||
reporter.build_report(report_file, specs=list(requests.values()))
|
||||
|
||||
finally:
|
||||
# Clean up after ourselves
|
||||
requests = {}
|
||||
reporter = None
|
||||
report_file = None
|
||||
|
||||
|
||||
# This is not thread safe, but that should be ok
|
||||
# We only have one top-level thread launching build requests, and all parallelism
|
||||
# is between the jobs of different requests
|
||||
# requests: Dict[str, RequestRecord] = {}
|
||||
# specs: Dict[str, InstallRecord] = {}
|
||||
|
||||
|
||||
# def pre_installer(specs):
|
||||
# global requests
|
||||
# global specs
|
||||
|
||||
# for spec in specs:
|
||||
# record = RequestRecord(spec)
|
||||
# requests[spec.dag_hash()] = record
|
||||
|
||||
# for dep in filter(lambda x: x.installed, spec.traverse()):
|
||||
# spec_record = InstallRecord(dep)
|
||||
# spec_record.elapsed_time = "0.0"
|
||||
# spec_record.result = "skipped"
|
||||
# spec_record.message = "Spec already installed"
|
||||
# specs[dep.dag_hash()] = spec_record
|
||||
|
||||
# def pre_install(spec):
|
||||
# global specs
|
||||
|
||||
# specs[spec.dag_hash()] = InstallRecord(spec)
|
||||
|
||||
|
||||
# def post_install(spec, explicit: bool):
|
||||
# global specs
|
||||
|
||||
# record = specs[spec.dag_hash()]
|
||||
# record.result = "success"
|
||||
# record.stdout = record.fetch_log()
|
||||
# record.installed_from_binary_cache = record._package.installed_from_binary_cache
|
||||
# record.elapsed_time = time.time() - record._start_time
|
||||
|
||||
|
||||
# def post_failure(spec, error):
|
||||
# global specs
|
||||
|
||||
# record = specs[spec.dag_hash()]
|
||||
# if isinstance(error, spack.build_environment.InstallError):
|
||||
# record.result = "failure"
|
||||
# record.message = exc.message or "Installation failure"
|
||||
# record.exception = exc.traceback
|
||||
# else:
|
||||
# record.result = "error"
|
||||
# record.message = str(exc) or "Unknown error"
|
||||
# record.exception = traceback.format_exc()
|
||||
# record.stdout = record.fetch_log() + record.message
|
||||
# record.elapsed_time = time.time() - record._start_time
|
||||
|
||||
|
||||
# def post_installer(specs):
|
||||
# global requests
|
||||
# global specs
|
||||
# global reporter
|
||||
# global report_file
|
||||
|
||||
# for spec in specs:
|
||||
# # Find all associated spec records
|
||||
# request_record = requests[spec.dag_hash()]
|
||||
# for dep in spec.traverse(root=True):
|
||||
# spec_record = specs[dep.dag_hash()]
|
||||
# request_record.records.append(spec_record)
|
||||
|
||||
# # Aggregate statistics
|
||||
# request_record.npackages = len(request_record.records)
|
||||
# request_record.nfailures = len([r for r in request_record.records if r.result == "failure"])
|
||||
# request_record.errors = len([r for r in request_record.records if r.result == "error"])
|
||||
# request_record.time = sum(float(r.elapsed_time) for r in request_record.records)
|
||||
|
||||
# # Write the actual report
|
||||
# filename = report_file or specs[0].name
|
||||
# reporter.build_report(filename, specs=specs)
|
||||
|
||||
# # Clean up after ourselves
|
||||
# requests = {}
|
||||
# specs = {}
|
||||
@@ -2014,13 +2014,11 @@ def _install_action(self, task: Task) -> InstallAction:
|
||||
|
||||
def install(self) -> None:
|
||||
"""Install the requested package(s) and or associated dependencies."""
|
||||
spack.hooks.pre_installer([r.pkg.spec for r in self.build_requests])
|
||||
|
||||
self._init_queue()
|
||||
fail_fast_err = "Terminating after first install failure"
|
||||
single_requested_spec = len(self.build_requests) == 1
|
||||
failed_build_requests = []
|
||||
failed_tasks = [] # self.failed tracks dependents of failed tasks, here only failures
|
||||
|
||||
install_status = InstallStatus(len(self.build_pq))
|
||||
|
||||
@@ -2173,24 +2171,13 @@ def install(self) -> None:
|
||||
except KeyboardInterrupt as exc:
|
||||
# The build has been terminated with a Ctrl-C so terminate
|
||||
# regardless of the number of remaining specs.
|
||||
failed_tasks.append((pkg, exc))
|
||||
tty.error(
|
||||
f"Failed to install {pkg.name} due to " f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
hashes_to_failures = {pkg.spec.dag_hash(): exc for pkg, exc in failed_tasks}
|
||||
spack.hooks.post_installer(
|
||||
[r.pkg.spec for r in self.build_requests], hashes_to_failures
|
||||
)
|
||||
print("DDDDDD")
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
if task.cache_only:
|
||||
failed_tasks.append((pkg, exc))
|
||||
hashes_to_failures = {pkg.spec.dag_hash(): exc for pkg, exc in failed_tasks}
|
||||
spack.hooks.post_installer(
|
||||
[r.pkg.spec for r in self.build_requests], hashes_to_failures
|
||||
)
|
||||
raise
|
||||
|
||||
# Checking hash on downloaded binary failed.
|
||||
@@ -2205,7 +2192,6 @@ def install(self) -> None:
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
self._update_failed(task, True, exc)
|
||||
failed_tasks.append((pkg, exc))
|
||||
|
||||
# Best effort installs suppress the exception and mark the
|
||||
# package as a failure.
|
||||
@@ -2218,14 +2204,8 @@ def install(self) -> None:
|
||||
f"Failed to install {pkg.name} due to "
|
||||
f"{exc.__class__.__name__}: {str(exc)}"
|
||||
)
|
||||
|
||||
# Terminate if requested to do so on the first failure.
|
||||
if self.fail_fast:
|
||||
hashes_to_failures = {pkg.spec.dag_hash(): exc for pkg, exc in failed_tasks}
|
||||
spack.hooks.post_installer(
|
||||
[r.pkg.spec for r in self.build_requests], hashes_to_failures
|
||||
)
|
||||
print("AAAAAAA")
|
||||
raise spack.error.InstallError(
|
||||
f"{fail_fast_err}: {str(exc)}", pkg=pkg
|
||||
) from exc
|
||||
@@ -2233,15 +2213,8 @@ def install(self) -> None:
|
||||
# Terminate when a single build request has failed, or summarize errors later.
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
hashes_to_failures = {
|
||||
pkg.spec.dag_hash(): exc for pkg, exc in failed_tasks
|
||||
}
|
||||
spack.hooks.post_installer(
|
||||
[r.pkg.spec for r in self.build_requests], hashes_to_failures
|
||||
)
|
||||
print("BBBBB")
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, exc))
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2265,13 +2238,9 @@ def install(self) -> None:
|
||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||
]
|
||||
|
||||
hashes_to_failures = {pkg.spec.dag_hash(): exc for pkg, exc in failed_tasks}
|
||||
spack.hooks.post_installer([r.pkg.spec for r in self.build_requests], hashes_to_failures)
|
||||
print("CCCCC", failed_build_requests)
|
||||
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {str(err)}")
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
||||
@@ -1406,6 +1406,8 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
config.write(f" namespace: '{namespace}'\n")
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
x, y = spack.package_api_version
|
||||
config.write(f" api: v{x}.{y}\n")
|
||||
|
||||
except OSError as e:
|
||||
# try to clean up.
|
||||
|
||||
@@ -217,7 +217,6 @@ def build_report_for_package(self, report_dir, package, duration):
|
||||
nerrors = len(errors)
|
||||
|
||||
if nerrors > 0:
|
||||
print("NERRORS")
|
||||
self.success = False
|
||||
if phase == "configure":
|
||||
report_data[phase]["status"] = 1
|
||||
@@ -411,7 +410,6 @@ def concretization_report(self, report_dir, msg):
|
||||
self.current_package_name = self.base_buildname
|
||||
self.upload(output_filename)
|
||||
self.success = False
|
||||
print("CONCRETIZATION")
|
||||
self.finalize_report()
|
||||
|
||||
def initialize_report(self, report_dir):
|
||||
|
||||
@@ -450,8 +450,6 @@ def just_throw(*args, **kwargs):
|
||||
|
||||
content = filename.open().read()
|
||||
|
||||
print(content)
|
||||
|
||||
# Only libelf error is reported (through libdwarf root spec). libdwarf
|
||||
# install is skipped and it is not an error.
|
||||
assert 'tests="1"' in content
|
||||
|
||||
@@ -17,7 +17,7 @@ spack:
|
||||
tbb: [intel-tbb]
|
||||
variants: +mpi
|
||||
acts:
|
||||
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python ~svg +tgeo cxxstd=20
|
||||
require: +alignment +analysis +dd4hep +edm4hep +examples +fatras +fatras_geant4 +geant4 +geomodel +hepmc3 +json +odd +onnx +podio +pythia8 +python +svg +tgeo cxxstd=20
|
||||
celeritas:
|
||||
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
||||
hip:
|
||||
@@ -109,7 +109,7 @@ spack:
|
||||
- yoda +root
|
||||
|
||||
# CUDA
|
||||
#- acts +cuda +traccc cuda_arch=80
|
||||
- acts +cuda +traccc cuda_arch=80
|
||||
#- celeritas +cuda ~openmp +vecgeom cuda_arch=80
|
||||
- root +cuda +cudnn +tmva-gpu
|
||||
- vecgeom +cuda cuda_arch=80
|
||||
|
||||
@@ -360,7 +360,9 @@ class Acts(CMakePackage, CudaPackage):
|
||||
depends_on("actsvg@0.4.35:", when="@28:")
|
||||
depends_on("actsvg@0.4.39:", when="@32:")
|
||||
depends_on("actsvg@0.4.40:", when="@32.1:")
|
||||
depends_on("actsvg@0.4.50:", when="@37:")
|
||||
depends_on(
|
||||
"actsvg@0.4.51:", when="@37:"
|
||||
) # https://github.com/acts-project/actsvg/issues/94
|
||||
depends_on("acts-algebra-plugins @0.24:", when="+traccc")
|
||||
depends_on("autodiff @0.6:", when="@17: +autodiff")
|
||||
depends_on("autodiff @0.5.11:0.5.99", when="@1.2:16 +autodiff")
|
||||
|
||||
@@ -19,6 +19,7 @@ class Actsvg(CMakePackage):
|
||||
|
||||
license("MPL-2.0")
|
||||
|
||||
version("0.4.51", sha256="937385f7703c0d2d9c0af03bd5083d3f1fdac17ff573476a5fc5b1f8e3cd98b7")
|
||||
version("0.4.50", sha256="c97fb1cc75cbf23caebd3c6fb8716354bdbd0a77ad39dc43dae963692f3256e1")
|
||||
version("0.4.48", sha256="0f230c31c64b939e4d311afd997dbaa87a375454cf1595661a449b97943412c9")
|
||||
version("0.4.47", sha256="11924fddbdd01f6337875797dc3a97b62be565688e678485e992bcfc9bfb142f")
|
||||
|
||||
@@ -282,6 +282,7 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on("hipsparse", when="@4.4.0:+rocm")
|
||||
depends_on("hipblas", when="@4.4.0:+rocm")
|
||||
|
||||
with when("+mpi"):
|
||||
depends_on("hypre")
|
||||
@@ -986,9 +987,10 @@ def find_optional_library(name, prefix):
|
||||
if "^rocprim" in spec and not spec["hip"].external:
|
||||
# rocthrust [via petsc+rocm] has a dependency on rocprim
|
||||
hip_headers += spec["rocprim"].headers
|
||||
if "^hipblas" in spec and not spec["hip"].external:
|
||||
# superlu-dist+rocm needs the hipblas header path
|
||||
hip_headers += spec["hipblas"].headers
|
||||
if "^hipblas" in spec:
|
||||
hipblas = spec["hipblas"]
|
||||
hip_headers += hipblas.headers
|
||||
hip_libs += hipblas.libs
|
||||
if "%cce" in spec:
|
||||
# We assume the proper Cray CCE module (cce) is loaded:
|
||||
proc = str(spec.target.family)
|
||||
|
||||
@@ -61,6 +61,8 @@ class PyRpy2(PythonPackage):
|
||||
|
||||
depends_on("py-backports-zoneinfo", type=("build", "run"), when="@3.5.17: ^python@:3.8")
|
||||
|
||||
depends_on("iconv", type=("link"))
|
||||
|
||||
# These are from 2019 and predate the pyproject.toml config that currently exists
|
||||
with when("@3.0.0:3.0.4"):
|
||||
# Doesn't support post-distutil removal until 3.5.13
|
||||
|
||||
@@ -175,6 +175,13 @@ class Raja(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
when="^hip@6.0",
|
||||
)
|
||||
|
||||
# Fix compilation issue reported by Intel from their new compiler version
|
||||
patch(
|
||||
"https://github.com/LLNL/RAJA/pull/1668.patch?full_index=1",
|
||||
sha256="c0548fc5220f24082fb2592d5b4e8b7c8c783b87906d5f0950d53953d25161f6",
|
||||
when="@2024.02.1:2024.02.99 %oneapi@2025:",
|
||||
)
|
||||
|
||||
variant("openmp", default=False, description="Build OpenMP backend")
|
||||
variant("shared", default=False, description="Build shared libs")
|
||||
variant("desul", default=False, description="Build desul atomics backend")
|
||||
|
||||
Reference in New Issue
Block a user