Merge branch 'develop' into bugfix/compiler-flag-propagation

This commit is contained in:
Richarda Butler 2023-09-07 13:52:08 -07:00 committed by GitHub
commit 8760d3885a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 734 additions and 234 deletions

View File

@ -48,6 +48,7 @@
import spack.util.gpg
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
from spack.caches import misc_cache_location
@ -1798,10 +1799,11 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
m.linkname = m.linkname[result.end() :]
def extract_tarball(spec, download_result, unsigned=False, force=False):
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
"""
extract binary tarball for given package into install area
"""
timer.start("extract")
if os.path.exists(spec.prefix):
if force:
shutil.rmtree(spec.prefix)
@ -1881,7 +1883,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
os.remove(tarfile_path)
os.remove(specfile_path)
timer.stop("extract")
timer.start("relocate")
try:
relocate_package(spec)
except Exception as e:
@ -1902,6 +1906,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
if os.path.exists(filename):
os.remove(filename)
_delete_staged_downloads(download_result)
timer.stop("relocate")
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:

View File

@ -15,9 +15,9 @@
from llnl.util import tty
import spack.build_environment
import spack.environment
import spack.tengine
import spack.util.cpus
import spack.util.executable
from spack.environment import depfile
@ -137,7 +137,7 @@ def _install_with_depfile(self) -> None:
"-C",
str(self.environment_root()),
"-j",
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
**kwargs,
)

View File

@ -68,7 +68,7 @@
from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.installer import InstallError
from spack.util.cpus import cpus_available
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import (
SYSTEM_DIRS,
EnvironmentModifications,
@ -537,39 +537,6 @@ def update_compiler_args_for_dep(dep):
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def determine_number_of_jobs(
parallel=False, command_line=None, config_default=None, max_cpus=None
):
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel (bool or None): true when package supports parallel builds
command_line (int or None): command line override
config_default (int or None): config default number of jobs
max_cpus (int or None): maximum number of CPUs available. When None, this
value is automatically determined.
"""
if not parallel:
return 1
if command_line is None and "command_line" in spack.config.scopes():
command_line = spack.config.get("config:build_jobs", scope="command_line")
if command_line is not None:
return command_line
max_cpus = max_cpus or cpus_available()
# in some rare cases _builtin config may not be set, so default to max 16
config_default = config_default or spack.config.get("config:build_jobs", 16)
return min(max_cpus, config_default)
def set_module_variables_for_package(pkg):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.

View File

@ -154,7 +154,7 @@ def cuda_flags(arch_list):
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")

View File

@ -10,9 +10,10 @@
import llnl.util.tty as tty
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
from spack.directives import build_system, extends, maintainers
from spack.package_base import PackageBase
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
@ -92,7 +93,7 @@ def install(self, pkg, spec, prefix):
"--copy",
"-i",
"-j",
str(determine_number_of_jobs(parallel)),
str(determine_number_of_jobs(parallel=parallel)),
"--",
os.getcwd(),
]

View File

@ -19,6 +19,7 @@
import spack.hash_types as ht
import spack.mirror
import spack.util.gpg as gpg_util
import spack.util.timer as timer
import spack.util.url as url_util
import spack.util.web as web_util
@ -253,6 +254,8 @@ def ci_rebuild(args):
check a single spec against the remote mirror, and rebuild it from source if the mirror does
not contain the hash
"""
rebuild_timer = timer.Timer()
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
# Make sure the environment is "gitlab-enabled", or else there's nothing
@ -736,6 +739,14 @@ def ci_rebuild(args):
print(reproduce_msg)
rebuild_timer.stop()
try:
with open("install_timers.json", "w") as timelog:
extra_attributes = {"name": ".ci-rebuild"}
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
# Tie job success/failure to the success/failure of building the spec
return install_exit_code

View File

@ -857,12 +857,12 @@ def add_from_file(filename, scope=None):
def add(fullpath, scope=None):
"""Add the given configuration to the specified config scope.
Add accepts a path. If you want to add from a filename, use add_from_file"""
components = process_config_path(fullpath)
has_existing_value = True
path = ""
override = False
value = syaml.load_config(components[-1])
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = "::" if override else ":" if path else ""
@ -883,14 +883,14 @@ def add(fullpath, scope=None):
existing = get_valid_type(path)
# construct value from this point down
value = syaml.load_config(components[-1])
for component in reversed(components[idx + 1 : -1]):
value = {component: value}
break
if override:
path += "::"
if has_existing_value:
path, _, value = fullpath.rpartition(":")
value = syaml.load_config(value)
existing = get(path, scope=scope)
# append values to lists
@ -1231,11 +1231,17 @@ def they_are(t):
return copy.copy(source)
#
# Process a path argument to config.set() that may contain overrides ('::' or
# trailing ':')
#
def process_config_path(path):
"""Process a path argument to config.set() that may contain overrides ('::' or
trailing ':')
Note: quoted value path components will be processed as a single value (escaping colons)
quoted path components outside of the value will be considered ill formed and will
raise.
e.g. `this:is:a:path:'value:with:colon'` will yield:
[this, is, a, path, value:with:colon]
"""
result = []
if path.startswith(":"):
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
@ -1263,6 +1269,17 @@ def process_config_path(path):
front.append = True
result.append(front)
quote = "['\"]"
not_quote = "[^'\"]"
if re.match(f"^{quote}", path):
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
if not m:
raise ValueError("Quotes indicate value, but there are additional path entries")
result.append(m.group(1))
break
return result

View File

@ -1504,7 +1504,7 @@ def _concretize_separately(self, tests=False):
start = time.time()
max_processes = min(
len(arguments), # Number of specs
spack.config.get("config:build_jobs"), # Cap on build jobs
spack.util.cpus.determine_number_of_jobs(parallel=True),
)
# TODO: revisit this print as soon as darwin is parallel too

View File

@ -90,6 +90,16 @@
STATUS_REMOVED = "removed"
def _write_timer_json(pkg, timer, cache):
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
try:
with open(pkg.times_log_path, "w") as timelog:
timer.write_json(timelog, extra_attributes=extra_attributes)
except Exception as e:
tty.debug(str(e))
return
class InstallAction:
#: Don't perform an install
NONE = 0
@ -399,6 +409,8 @@ def _install_from_cache(
return False
t.stop()
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
_write_timer_json(pkg, t, True)
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec, explicit)
@ -481,7 +493,7 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(
pkg.spec, download_result, unsigned=unsigned, force=False
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
)
pkg.installed_from_binary_cache = True
@ -2093,7 +2105,6 @@ def install(self) -> None:
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
ltype, lock = self._ensure_locked("read", pkg)
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
@ -2373,8 +2384,7 @@ def run(self) -> bool:
# Stop the timer and save results
self.timer.stop()
with open(self.pkg.times_log_path, "w") as timelog:
self.timer.write_json(timelog)
_write_timer_json(self.pkg, self.timer, False)
print_install_test_log(self.pkg)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)

View File

@ -96,6 +96,7 @@
on_package_attributes,
)
from spack.spec import InvalidSpecDetected, Spec
from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.variant import (
any_combination_of,

View File

@ -139,6 +139,8 @@ def craype_type_and_version(cls):
# If no default version, sort available versions and return latest
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
versions_available.sort(reverse=True)
if not versions_available:
return (craype_type, None)
return (craype_type, versions_available[0])
@classmethod

View File

@ -16,8 +16,9 @@
import spack.package_base
import spack.spec
import spack.util.spack_yaml as syaml
from spack.build_environment import _static_to_shared_library, determine_number_of_jobs, dso_suffix
from spack.build_environment import _static_to_shared_library, dso_suffix
from spack.paths import build_env_path
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
from spack.util.path import Path, convert_to_platform_path
@ -442,7 +443,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
spack.build_environment.set_module_variables_for_package(s["b"].package)
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
s["b"].package.parallel
parallel=s["b"].package.parallel
)
@ -474,28 +475,62 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
def test_build_jobs_sequential_is_sequential():
assert (
determine_number_of_jobs(parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
determine_number_of_jobs(
parallel=False,
max_cpus=8,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 8}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 8}}),
),
)
== 1
)
def test_build_jobs_command_line_overrides():
assert (
determine_number_of_jobs(parallel=True, command_line=10, config_default=1, max_cpus=1)
determine_number_of_jobs(
parallel=True,
max_cpus=1,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}}),
),
)
== 10
)
assert (
determine_number_of_jobs(parallel=True, command_line=10, config_default=100, max_cpus=100)
determine_number_of_jobs(
parallel=True,
max_cpus=100,
config=spack.config.Configuration(
spack.config.InternalConfigScope("command_line", {"config": {"build_jobs": 10}}),
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}}),
),
)
== 10
)
def test_build_jobs_defaults():
assert (
determine_number_of_jobs(parallel=True, command_line=None, config_default=1, max_cpus=10)
determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 1}})
),
)
== 1
)
assert (
determine_number_of_jobs(parallel=True, command_line=None, config_default=100, max_cpus=10)
determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
spack.config.InternalConfigScope("defaults", {"config": {"build_jobs": 100}})
),
)
== 10
)

View File

@ -277,6 +277,25 @@ def test_add_config_path(mutable_config):
compilers = spack.config.get("packages")["all"]["compiler"]
assert "gcc" in compilers
# Try quotes to escape brackets
path = "config:install_tree:projections:cmake:\
'{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
spack.config.add(path)
set_value = spack.config.get("config")["install_tree"]["projections"]["cmake"]
assert set_value == "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
# NOTE:
# The config path: "config:install_tree:root:<path>" is unique in that it can accept multiple
# schemas (such as a dropped "root" component) which is atypical and may lead to passing tests
# when the behavior is in reality incorrect.
# the config path below is such that no subkey accepts a string as a valid entry in our schema
# try quotes to escape colons
path = "config:build_stage:'C:\\path\\to\\config.yaml'"
spack.config.add(path)
set_value = spack.config.get("config")["build_stage"]
assert "C:\\path\\to\\config.yaml" in set_value
@pytest.mark.regression("17543,23259")
def test_add_config_path_with_enumerated_type(mutable_config):

View File

@ -126,8 +126,8 @@ def test_timer_write():
deserialized = json.loads(json_buffer.getvalue())
assert deserialized == {
"phases": [{"name": "timer", "seconds": 1.0}],
"total": {"seconds": 3.0},
"phases": [{"name": "timer", "path": "timer", "seconds": 1.0, "count": 1}],
"total": 3.0,
}

View File

@ -5,6 +5,9 @@
import multiprocessing
import os
from typing import Optional
import spack.config
def cpus_available():
@ -18,3 +21,36 @@ def cpus_available():
return len(os.sched_getaffinity(0)) # novermin
except Exception:
return multiprocessing.cpu_count()
def determine_number_of_jobs(
*,
parallel: bool = False,
max_cpus: int = cpus_available(),
config: Optional["spack.config.Configuration"] = None,
) -> int:
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
defaults (which is usually set through the builtin config scope), but we
cap to the number of CPUs available to avoid oversubscription.
Parameters:
parallel: true when package supports parallel builds
max_cpus: maximum number of CPUs to use (defaults to cpus_available())
config: configuration object (defaults to global config)
"""
if not parallel:
return 1
cfg = config or spack.config.CONFIG
# Command line overrides all
try:
command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
if command_line is not None:
return command_line
except ValueError:
pass
return min(max_cpus, cfg.get("config:build_jobs", 16))

View File

@ -13,31 +13,32 @@
import sys
import time
from contextlib import contextmanager
from typing import Dict
from typing import Callable, Dict, List
from llnl.util.lang import pretty_seconds_formatter
import spack.util.spack_json as sjson
Interval = collections.namedtuple("Interval", ("begin", "end"))
TimerEvent = collections.namedtuple("TimerEvent", ("time", "running", "label"))
TimeTracker = collections.namedtuple("TimeTracker", ("total", "start", "count", "path"))
#: name for the global timer (used in start(), stop(), duration() without arguments)
global_timer_name = "_global"
class BaseTimer:
def start(self, name=global_timer_name):
def start(self, name=None):
pass
def stop(self, name=global_timer_name):
def stop(self, name=None):
pass
def duration(self, name=global_timer_name):
def duration(self, name=None):
return 0.0
@contextmanager
def measure(self, name):
yield
yield self
@property
def phases(self):
@ -60,16 +61,18 @@ class NullTimer(BaseTimer):
class Timer(BaseTimer):
"""Simple interval timer"""
def __init__(self, now=time.time):
def __init__(self, now: Callable[[], float] = time.time):
"""
Arguments:
now: function that gives the seconds since e.g. epoch
"""
self._now = now
self._timers: Dict[str, Interval] = collections.OrderedDict()
self._timers: Dict[str, TimeTracker] = {}
self._timer_stack: List[str] = []
# _global is the overal timer since the instance was created
self._timers[global_timer_name] = Interval(self._now(), end=None)
self._events: List[TimerEvent] = []
# Push start event
self._events.append(TimerEvent(self._now(), True, global_timer_name))
def start(self, name=global_timer_name):
"""
@ -79,7 +82,7 @@ def start(self, name=global_timer_name):
name (str): Optional name of the timer. When no name is passed, the
global timer is started.
"""
self._timers[name] = Interval(self._now(), None)
self._events.append(TimerEvent(self._now(), True, name))
def stop(self, name=global_timer_name):
"""
@ -90,10 +93,7 @@ def stop(self, name=global_timer_name):
name (str): Optional name of the timer. When no name is passed, all
timers are stopped.
"""
interval = self._timers.get(name, None)
if not interval:
return
self._timers[name] = Interval(interval.begin, self._now())
self._events.append(TimerEvent(self._now(), False, name))
def duration(self, name=global_timer_name):
"""
@ -107,13 +107,13 @@ def duration(self, name=global_timer_name):
Returns:
float: duration of timer.
"""
try:
interval = self._timers[name]
except KeyError:
self._flatten()
if name in self._timers:
if name in self._timer_stack:
return self._timers[name].total + (self._now() - self._timers[name].start)
return self._timers[name].total
else:
return 0.0
# Take either the interval end, the global timer, or now.
end = interval.end or self._timers[global_timer_name].end or self._now()
return end - interval.begin
@contextmanager
def measure(self, name):
@ -123,23 +123,72 @@ def measure(self, name):
Arguments:
name (str): Name of the timer
"""
begin = self._now()
yield
self._timers[name] = Interval(begin, self._now())
self.start(name)
yield self
self.stop(name)
@property
def phases(self):
"""Get all named timers (excluding the global/total timer)"""
return [k for k in self._timers.keys() if k != global_timer_name]
self._flatten()
return [k for k in self._timers.keys() if not k == global_timer_name]
def write_json(self, out=sys.stdout):
def _flatten(self):
for event in self._events:
if event.running:
if event.label not in self._timer_stack:
self._timer_stack.append(event.label)
# Only start the timer if it is on top of the stack
# restart doesn't work after a subtimer is started
if event.label == self._timer_stack[-1]:
timer_path = "/".join(self._timer_stack[1:])
tracker = self._timers.get(
event.label, TimeTracker(0.0, event.time, 0, timer_path)
)
assert tracker.path == timer_path
self._timers[event.label] = TimeTracker(
tracker.total, event.time, tracker.count, tracker.path
)
else: # if not event.running:
if event.label in self._timer_stack:
index = self._timer_stack.index(event.label)
for label in self._timer_stack[index:]:
tracker = self._timers[label]
self._timers[label] = TimeTracker(
tracker.total + (event.time - tracker.start),
None,
tracker.count + 1,
tracker.path,
)
self._timer_stack = self._timer_stack[: max(0, index)]
# clear events
self._events = []
def write_json(self, out=sys.stdout, extra_attributes={}):
"""Write a json object with times to file"""
phases = [{"name": p, "seconds": self.duration(p)} for p in self.phases]
times = {"phases": phases, "total": {"seconds": self.duration()}}
out.write(sjson.dump(times))
self._flatten()
data = {
"total": self._timers[global_timer_name].total,
"phases": [
{
"name": phase,
"path": self._timers[phase].path,
"seconds": self._timers[phase].total,
"count": self._timers[phase].count,
}
for phase in self.phases
],
}
if extra_attributes:
data.update(extra_attributes)
if out:
out.write(sjson.dump(data))
else:
return data
def write_tty(self, out=sys.stdout):
"""Write a human-readable summary of timings"""
"""Write a human-readable summary of timings (depth is 1)"""
self._flatten()
times = [self.duration(p) for p in self.phases]

View File

@ -20,7 +20,11 @@ ci:
- k=$CI_GPG_KEY_ROOT/intermediate_ci_signing_key.gpg; [[ -r $k ]] && spack gpg trust $k
- k=$CI_GPG_KEY_ROOT/spack_public_key.gpg; [[ -r $k ]] && spack gpg trust $k
script::
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
- - spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
- - spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
--prefix /home/software/spack:${CI_PROJECT_DIR}
--log install_times.json
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json
after_script:
- - cat /proc/loadavg || true
variables:

View File

@ -0,0 +1,38 @@
#!/usr/bin/env spack-python
"""
This script is meant to be run using:
`spack python aggregate_logs.spack.py`
"""
import os
def find_logs(prefix, filename):
for root, _, files in os.walk(prefix):
if filename in files:
yield os.path.join(root, filename)
if __name__ == "__main__":
import json
from argparse import ArgumentParser
parser = ArgumentParser("aggregate_logs")
parser.add_argument("output_file")
parser.add_argument("--log", default="install_times.json")
parser.add_argument("--prefix", required=True)
args = parser.parse_args()
prefixes = [p for p in args.prefix.split(":") if os.path.exists(p)]
# Aggregate the install timers into a single json
data = []
for prefix in prefixes:
time_logs = find_logs(prefix, args.log)
for log in time_logs:
with open(log) as fd:
data.append(json.load(fd))
with open(args.output_file, "w") as fd:
json.dump(data, fd)

View File

@ -7,7 +7,7 @@
from spack.pkg.builtin.kokkos import Kokkos
class Cabana(CMakePackage):
class Cabana(CMakePackage, CudaPackage, ROCmPackage):
"""The Exascale Co-Design Center for Particle Applications Toolkit"""
homepage = "https://github.com/ECP-copa/Cabana"
@ -53,9 +53,24 @@ class Cabana(CMakePackage):
_kk_spec = "kokkos-legacy+pthreads"
elif _kk_version == "-legacy" and _backend not in ["serial", "openmp", "cuda"]:
continue
# Handled separately by Cuda/ROCmPackage below
elif _backend == "cuda" or _backend == "hip":
continue
else:
_kk_spec = "kokkos{0}+{1}".format(_kk_version, _backend)
depends_on(_kk_spec, when="@{0}+{1}".format(_version, _backend))
for arch in CudaPackage.cuda_arch_values:
cuda_dep = "+cuda cuda_arch={0}".format(arch)
depends_on("kokkos {0}".format(cuda_dep), when=cuda_dep)
for arch in ROCmPackage.amdgpu_targets:
rocm_dep = "+rocm amdgpu_target={0}".format(arch)
depends_on("kokkos {0}".format(rocm_dep), when=rocm_dep)
conflicts("+cuda", when="cuda_arch=none")
depends_on("kokkos+cuda_lambda", when="+cuda")
depends_on("arborx", when="@0.3.0:+arborx")
depends_on("hypre-cmake@2.22.0:", when="@0.4.0:+hypre")
depends_on("hypre-cmake@2.22.1:", when="@0.5.0:+hypre")

View File

@ -46,6 +46,7 @@ class Cgns(CMakePackage):
variant("legacy", default=False, description="Enable legacy options")
variant("mem_debug", default=False, description="Enable memory debugging option")
variant("tools", default=False, description="Enable CGNS tools")
variant("pic", default=False, description="Produce position-independent code")
depends_on("cmake@3.12:", when="@4.3:", type="build")
depends_on("cmake@3.8:", when="@4.2:", type="build")
@ -62,6 +63,8 @@ class Cgns(CMakePackage):
depends_on("libxmu", when="+tools")
depends_on("libsm", when="+tools")
conflicts("~pic", when="+fortran", msg="+pic required when +fortran")
# patch for error undefined reference to `matherr, see
# https://bugs.gentoo.org/662210
patch("no-matherr.patch", when="@:3.3.1 +tools")
@ -83,6 +86,7 @@ def cmake_args(self):
self.define_from_variant("CGNS_ENABLE_BASE_SCOPE", "base_scope"),
self.define_from_variant("CGNS_ENABLE_LEGACY", "legacy"),
self.define_from_variant("CGNS_ENABLE_MEM_DEBUG", "mem_debug"),
self.define_from_variant("CMAKE_POSITION_INDEPENDENT_CODE", "pic"),
]
)

View File

@ -20,11 +20,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
periodic, material, crystal, and biological systems
"""
build_system(
conditional("cmake", when="@master:"),
conditional("makefile", when="@:2023.1"),
default="makefile",
)
build_system(conditional("cmake", when="@2023.2:"), "makefile", default="makefile")
homepage = "https://www.cp2k.org"
url = "https://github.com/cp2k/cp2k/releases/download/v3.0.0/cp2k-3.0.tar.bz2"
@ -195,7 +191,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
with when("+cosma"):
depends_on("cosma+scalapack")
depends_on("cosma@2.5.1:", when="@9:")
depends_on("cosma@2.6.3:", when="@master:")
depends_on("cosma@2.6.3:", when="@2023.2:")
depends_on("cosma+cuda", when="+cuda")
depends_on("cosma+rocm", when="+rocm")
conflicts("~mpi")
@ -235,7 +231,7 @@ class Cp2k(MakefilePackage, CudaPackage, CMakePackage, ROCmPackage):
depends_on("sirius@7.0.0:7.0", when="@8:8.2")
depends_on("sirius@7.2", when="@8.3:8.9")
depends_on("sirius@7.3:", when="@9.1")
depends_on("sirius@7.4:", when="@master")
depends_on("sirius@7.4:", when="@2023.2")
conflicts("~mpi", msg="SIRIUS requires MPI")
# sirius support was introduced in 7+
conflicts("@:6")
@ -850,8 +846,7 @@ def check(self):
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
"""Use the new cmake build system to build cp2k. It is the default when
building the master branch of cp2k."""
"""Use the new CMake build system to build CP2K."""
def cmake_args(self):
spec = self.spec

View File

@ -16,11 +16,17 @@ class Damaris(CMakePackage):
maintainers("jcbowden")
version("master", branch="master")
version("1.6.0", tag="v1.6.0", commit="1fe4c61cce03babd24315b8e6156f226baac97a2")
version("1.5.0", tag="v1.5.0", commit="68206a696ad430aa8426ca370501aa71914fbc87")
version("1.3.3", tag="v1.3.3", commit="f1c473507c080738f7092f6a7d72deb938ade786")
version("1.3.2", tag="v1.3.2", commit="38b50664523e56900809a19f0cf52fc0ab5dca53")
version("1.3.1", tag="v1.3.1", commit="6cee3690fa7d387acc8f5f650a7b019e13b90284")
version("1.9.0", tag="v1.9.0")
version("1.8.2", tag="v1.8.2")
version("1.8.1", tag="v1.8.1")
version("1.8.0", tag="v1.8.0")
version("1.7.1", tag="v1.7.1")
version("1.7.0", tag="v1.7.0")
version("1.6.0", tag="v1.6.0", deprecated=True)
version("1.5.0", tag="v1.5.0", deprecated=True)
version("1.3.3", tag="v1.3.3", deprecated=True)
version("1.3.2", tag="v1.3.2", deprecated=True)
version("1.3.1", tag="v1.3.1", deprecated=True)
variant("fortran", default=True, description="Enables Fortran support")
variant("hdf5", default=False, description="Enables the HDF5 storage plugin")
@ -36,24 +42,19 @@ class Damaris(CMakePackage):
variant(
"python",
default=False,
description="Enables building of Python enabled Damaris library - "
"boost::python boost::numpy needed",
description="Enables building of Python enabled Damaris library using Boost::python",
)
depends_on("mpi")
depends_on("cmake@3.18.0:", type=("build"))
depends_on(
"boost"
"+exception+locale+system+serialization+chrono+atomic"
"+container+regex+thread+log+filesystem+date_time"
"@1.67:"
)
depends_on("xsd")
depends_on("xerces-c")
depends_on("mpi")
depends_on("cmake@3.18.0:", type=("build"))
depends_on("boost+thread+log+filesystem+date_time" "@1.67:")
depends_on("boost+thread+log+filesystem+date_time+python+numpy" "@1.67:", when="+python")
depends_on("py-mpi4py", when="+python", type=("build", "run"))
depends_on("hdf5@1.8.20:", when="+hdf5")
depends_on("paraview+python", when="+catalyst")
depends_on("paraview+python+mpi+development_files", when="+catalyst")
depends_on("visit+mpi", when="+visit")
depends_on("boost+thread+log+filesystem+date_time+python+numpy @1.67:", when="+python")
def cmake_args(self):
args = []
@ -77,6 +78,7 @@ def cmake_args(self):
if self.spec.variants["catalyst"].value:
args.extend(["-DENABLE_CATALYST:BOOL=ON"])
args.extend(["-DParaView_ROOT:PATH=%s" % self.spec["catalyst"].prefix])
if self.spec.variants["examples"].value:
args.extend(["-DENABLE_EXAMPLES:BOOL=ON"])

View File

@ -11,8 +11,9 @@ class Duckdb(CMakePackage):
"""DuckDB is an in-process SQL OLAP Database Management System."""
homepage = "https://duckdb.org"
url = "https://github.com/duckdb/duckdb/archive/refs/tags/v0.7.1.tar.gz"
url = "https://github.com/duckdb/duckdb/archive/refs/tags/v0.8.1.tar.gz"
git = "https://github.com/duckdb/duckdb.git"
version("master", branch="master")
version("0.8.1", sha256="a0674f7e320dc7ebcf51990d7fc1c0e7f7b2c335c08f5953702b5285e6c30694")
version("0.7.1", sha256="67f840f861e5ffbe137d65a8543642d016f900b89dd035492d562ad11acf0e1e")

View File

@ -43,9 +43,9 @@ class Eigen(CMakePackage):
# there is a bug in 3.3.8 that provokes a compile error in dependent packages, see https://gitlab.com/libeigen/eigen/-/issues/2011
patch(
"https://gitlab.com/libeigen/eigen/-/commit/6d822a1052fc665f06dc51b4729f6a38e0da0546.diff",
"https://gitlab.com/libeigen/eigen/-/commit/ef3cc72cb65e2d500459c178c63e349bacfa834f.diff",
when="@3.3.8",
sha256="62590e9b33a8f72b608a72b87147a306e7cb20766ea53c6b8e0a183fa6cb7635",
sha256="b8877a84c4338f08ab8a6bb8b274c768e93d36ac05b733b078745198919a74bf",
)
# there is a bug in 3.3.4 that provokes a compile error with the xl compiler

View File

@ -23,6 +23,10 @@ class EpicsBase(MakefilePackage):
depends_on("readline")
depends_on("perl", type=("build", "run"))
def patch(self):
filter_file(r"^\s*CC\s*=.*", "CC = " + spack_cc, "configure/CONFIG.gnuCommon")
filter_file(r"^\s*CCC\s*=.*", "CCC = " + spack_cxx, "configure/CONFIG.gnuCommon")
@property
def install_targets(self):
return ["INSTALL_LOCATION={0}".format(self.prefix), "install"]

View File

@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
from spack.package import *
@ -11,20 +13,22 @@ class Fftx(CMakePackage, CudaPackage, ROCmPackage):
package for executing the Fast Fourier Transform as well as higher-level
operations composed of linear operations combined with DFT transforms."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.0.3.tar.gz"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/fftx/archive/refs/tags/1.1.2.tar.gz"
git = "https://github.com/spiral-software/fftx.git"
maintainers("spiralgen")
version("develop", branch="develop")
version("main", branch="main")
version("1.1.2", sha256="b2c4a7791305481af9e1bd358c1215efa4506c91c943cddca3780a1ccbc27810")
version("1.1.1", sha256="5cbca66ef09eca02ee8f336f58eb45cfac69cfb29cd6eb945852ad74085d8a60")
version("1.1.0", sha256="a6f95605abc11460bbf51839727a456a31488e27e12a970fc29a1b8c42f4e3b5")
version("1.0.3", sha256="b5ff275facce4a2fbabd0aecc65dd55b744794f2e07cd8cfa91363001c664896")
depends_on("spiral-software")
depends_on("spiral-package-fftx")
depends_on("spiral-package-simt")
# depends_on('spiral-package-mpi')
depends_on("spiral-software+fftx+simt+jit+mpi")
# depend only on spiral-software, but spiral-software must be installed with variants:
# +fftx +simt +mpi +jit
conflicts("+rocm", when="+cuda", msg="FFTX only supports one GPU backend at a time")
@ -39,41 +43,37 @@ def create_lib_source_code(self):
backend = "HIP"
self.build_config = "-D_codegen=%s" % backend
# From directory examples/library run the build-lib-code.sh script
with working_dir(join_path(self.stage.source_path, "src", "library")):
# From the root directory run the config-fftx-libs.sh script
with working_dir(self.stage.source_path):
bash = which("bash")
bash("./build-lib-code.sh", backend)
bash("./config-fftx-libs.sh", backend)
def cmake_args(self):
spec = self.spec
args = ["-DSPIRAL_HOME:STRING={0}".format(spec["spiral-software"].prefix)]
args.append("-DCMAKE_INSTALL_PREFIX:PATH={0}".format(self.stage.source_path))
args.append("-DCMAKE_INSTALL_PREFIX:PATH={0}".format(self.prefix))
if "+rocm" in spec:
args.append("-DCMAKE_CXX_COMPILER={0}".format(self.spec["hip"].hipcc))
args.append(self.build_config)
print("Args = " + str(args))
return args
@property
def build_targets(self):
return ["-j1", "install"]
return ["install"]
def install(self, spec, prefix):
mkdirp(prefix.bin)
mkdirp(prefix.CMakeIncludes)
mkdirp(prefix.examples)
mkdirp(prefix.include)
mkdirp(prefix.lib)
with working_dir(self.stage.source_path):
files = ("License.txt", "README.md", "ReleaseNotes.md")
files = ("License.txt", "README.md", "ReleaseNotes.md", "supercomputer-README.md")
for fil in files:
install(fil, prefix)
mkdirp(prefix.cache_jit_files)
with working_dir(self.stage.source_path):
install_tree("bin", prefix.bin)
install_tree("CMakeIncludes", prefix.CMakeIncludes)
install_tree("examples", prefix.examples)
install_tree("include", prefix.include)
install_tree("lib", prefix.lib)
dir = join_path(self.stage.source_path, "cache_jit_files")
if os.path.isdir(dir):
install_tree("cache_jit_files", prefix.cache_jit_files)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set("FFTX_HOME", self.prefix)

View File

@ -0,0 +1,16 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Figcone(CMakePackage):
"""figcone - is a C++17 library, providing a convenient declarative interface for configuration
parsers and built-in support for reading JSON, YAML, TOML, XML, INI and shoal config files."""
homepage = "https://github.com/kamchatka-volcano/figcone"
url = "https://github.com/kamchatka-volcano/figcone/archive/refs/tags/v2.4.9.tar.gz"
version("2.4.9", sha256="735399e849621a4923e71a50d5e2ba928d5dfa3b01e54d56e0bac8e5102b7697")

View File

@ -20,6 +20,7 @@ class FluxCore(AutotoolsPackage):
maintainers("grondo")
version("master", branch="master")
version("0.54.0", sha256="721fc3fff64b3b167ae55d0e29379ff3211729248ef97e3b9855816219063b42")
version("0.53.0", sha256="2f14d032a2d54f34e066c8a15c79917089e9f7f8558baa03dbfe63dbf56918b7")
version("0.52.0", sha256="dca434238405e4cae4686c8143f2cc79919bfd9e26b09c980e1e5f69ffd0c448")
version("0.51.0", sha256="e57b71b708482f20d2a2195a000c0c3b9176faa6aaadfad4d2117f8671ca67ce")

View File

@ -20,6 +20,7 @@ class FluxSecurity(AutotoolsPackage):
maintainers("grondo")
version("master", branch="master")
version("0.10.0", sha256="b0f39c5e32322f901454469ffd6154019b6dffafc064b55b3e593f70db6a6f68")
version("0.9.0", sha256="2258120c6f32ca0b5b13b166bae56d9bd82a44c6eeaa6bc6187e4a4419bdbcc0")
version("0.8.0", sha256="9963628063b4abdff6bece03208444c8f23fbfda33c20544c48b21e9f4819ce2")

View File

@ -6,7 +6,6 @@
import os
import re
from spack.build_environment import MakeExecutable, determine_number_of_jobs
from spack.package import *

View File

@ -0,0 +1,44 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Libpostal(AutotoolsPackage):
"""A C library for parsing/normalizing street addresses around the world.
Powered by statistical NLP and open geo data."""
homepage = "https://github.com/openvenues/libpostal"
url = "https://github.com/openvenues/libpostal/archive/refs/tags/v1.1.tar.gz"
maintainers("jgaeb")
version("1.1", sha256="8cc473a05126895f183f2578ca234428d8b58ab6fadf550deaacd3bd0ae46032")
version("1.0.0", sha256="3035af7e15b2894069753975d953fa15a86d968103913dbf8ce4b8aa26231644")
version("0.3.4", sha256="8b3b95660c5b5d4fe48045b9acb000d1a0eb19d58d0c2d2041e78d9a96d88716")
version("0.3.3", sha256="dc73de37d7f7b96f329fd213dcbac540f2ae92fbef9c079fd64fbc8daeb87b01")
version("0.3.2", sha256="9a1590eadf4ebe84979113b71059410413adf239b2999d22d11fe8778945f2c1")
version("0.3.1", sha256="68c51a5fdae41e1cac474742789ba5a46a38e307a0a2450cb2d3e33b4f17cf4d")
version("0.3", sha256="28c19e21bab13425a76aa65a8435f4b3909611056c2ff439c39b4e57b2a70150")
depends_on("autoconf", type="build")
depends_on("automake", type="build")
depends_on("libtool", type="build")
depends_on("m4", type="build")
depends_on("curl", type="build")
depends_on("pkg-config", type="build")
def autoreconf(self, spec, prefix):
which("sh")("bootstrap.sh")
def configure_args(self):
args = ["--datadir={0}".format(self.prefix.share)]
# Check if the target is Apple's ARM-based M1 chip.
arch = self.spec.architecture
if arch.platform == "darwin" and arch.target == "m1":
args.append("--disable-sse2")
return args

View File

@ -53,7 +53,7 @@ def lua_share_dir(self):
return os.path.join("share", self.lua_dir_name, self.__verdir())
# luarocks needs unzip for some packages (e.g. lua-luaposix)
depends_on("unzip", type="run")
depends_on("unzip", type=("build", "run"))
# luarocks needs a fetcher (curl/wget), unfortunately I have not found
# how to force a choice for curl or wget, but curl seems the default.

View File

@ -26,6 +26,7 @@ class Musl(MakefilePackage):
homepage = "https://www.musl-libc.org"
url = "https://www.musl-libc.org/releases/musl-1.1.23.tar.gz"
version("1.2.4", sha256="7a35eae33d5372a7c0da1188de798726f68825513b7ae3ebe97aaaa52114f039")
version("1.2.3", sha256="7d5b0b6062521e4627e099e4c9dc8248d32a30285e959b7eecaa780cf8cfd4a4")
version("1.2.2", sha256="9b969322012d796dc23dda27a35866034fa67d8fb67e0e2c45c913c3d43219dd")
version("1.2.1", sha256="68af6e18539f646f9c41a3a2bb25be4a5cfa5a8f65f0bb647fd2bbfdf877e84b")

View File

@ -16,6 +16,7 @@ class Neko(AutotoolsPackage, CudaPackage, ROCmPackage):
url = "https://github.com/ExtremeFLOW/neko/releases/download/v0.3.2/neko-0.3.2.tar.gz"
maintainers("njansson")
version("0.6.1", sha256="6282baaf9c8a201669e274cba23c37922f7ad701ba20ef086442e48f00dabf29")
version("0.6.0", sha256="ce37c7cea1a7bf1bf554c5717aa7fed35bbd079ff68c2fc9d3529facc717e31a")
version("0.5.2", sha256="8873f5ada106f92f21c9bb13ea8164550bccde9301589b9e7f1c1a82a2efe2b8")
version("0.5.1", sha256="8b176bcc9f2d4a6804b68dd93a2f5e02e2dfa986d5c88063bbc72d39e9659cc4")
@ -27,6 +28,7 @@ class Neko(AutotoolsPackage, CudaPackage, ROCmPackage):
version("develop", branch="develop")
variant("parmetis", default=False, description="Build with support for parmetis")
variant("xsmm", default=False, description="Build with support for libxsmm")
variant("gslib", default=False, when="@develop", description="Build with support for gslib")
depends_on("autoconf", type="build")
depends_on("automake", type="build")
@ -38,6 +40,8 @@ class Neko(AutotoolsPackage, CudaPackage, ROCmPackage):
depends_on("mpi")
depends_on("blas")
depends_on("lapack")
depends_on("json-fortran", when="@develop")
depends_on("gslib", when="+gslib")
def configure_args(self):
args = []
@ -46,6 +50,7 @@ def configure_args(self):
args += self.with_or_without("parmetis", variant="parmetis", activation_value="prefix")
args += self.with_or_without("metis", variant="parmetis", activation_value="prefix")
args += self.with_or_without("libxsmm", variant="xsmm")
args += self.with_or_without("gslib", variant="gslib", activation_value="prefix")
args += self.with_or_without("cuda", activation_value="prefix")
rocm_fn = lambda x: self.spec["hip"].prefix
args += self.with_or_without("hip", variant="rocm", activation_value=rocm_fn)

View File

@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.build_environment import MakeExecutable, determine_number_of_jobs
from spack.package import *
from spack.util.executable import which_string

View File

@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from spack.build_environment import MakeExecutable, determine_number_of_jobs
from spack.package import *
from spack.util.executable import which_string

View File

@ -87,6 +87,7 @@ class OpenfoamOrg(Package):
"source", default=True, description="Install library/application sources and tutorials"
)
variant("metis", default=False, description="With metis decomposition")
variant("scotch", default=True, description="With scotch/ptscotch decomposition")
variant(
"precision",
default="dp",
@ -101,8 +102,8 @@ class OpenfoamOrg(Package):
depends_on("cmake", type="build")
# Require scotch with ptscotch - corresponds to standard OpenFOAM setup
depends_on("scotch~metis+mpi~int64", when="~int64")
depends_on("scotch~metis+mpi+int64", when="+int64")
depends_on("scotch~metis+mpi~int64", when="+scotch~int64")
depends_on("scotch~metis+mpi+int64", when="+scotch+int64")
depends_on("metis@5:", when="+metis")
depends_on("metis+int64", when="+metis+int64")
@ -293,7 +294,7 @@ def configure(self, spec, prefix):
"gperftools": [], # Currently unused
}
if True:
if "+scotch" in spec:
self.etc_config["scotch"] = {
"SCOTCH_ARCH_PATH": spec["scotch"].prefix,
# For src/parallel/decompose/Allwmake

View File

@ -20,7 +20,7 @@ class Pocl(CMakePackage):
git = "https://github.com/pocl/pocl.git"
version("master", branch="master")
version("3.0", sha256="5f6bbc391ba144bc7becc3b90888b25468460d5aa6830f63a3b066137e7bfac3")
version("3.0", sha256="a3fd3889ef7854b90b8e4c7899c5de48b7494bf770e39fba5ad268a5cbcc719d")
version("1.8", sha256="0f63377ae1826e16e90038fc8e7f65029be4ff6f9b059f6907174b5c0d1f8ab2")
version("1.7", sha256="5f6bbc391ba144bc7becc3b90888b25468460d5aa6830f63a3b066137e7bfac3")
version("1.6", sha256="b0a4c0c056371b6f0db726b88fbb76bbee94948fb2abd4dbc8d958f7c42f766c")

View File

@ -0,0 +1,21 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RFunctional(RPackage):
"""Curry, Compose, and other higher-order functions
Curry, Compose, and other higher-order functions"""
cran = "functional"
maintainers("jgaeb")
version("0.6", sha256="19b78e27c27b1081245222c42da4dd1cb65c5643e6da9d6c1a6e997755c21888")
version("0.4", sha256="05d1a50de6a574d938471c9a615c840871df9f879b2cbbcabc6b25b5809a70a8")
version("0.2", sha256="1b11d039153a214e89e4903163522d8e15c1fcf84495023d9e463487bde1e8d8")
version("0.1", sha256="148301d066f9c7e450d809a130d31b0763424f65f177704856d76143ded3db7e")

View File

@ -0,0 +1,48 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RFurrr(RPackage):
"""Apply Mapping Functions in Parallel using Futures
Implementations of the family of map() functions from 'purrr' that can be
resolved using any 'future'-supported backend, e.g. parallel on the local
machine or distributed on a compute cluster."""
homepage = "https://furrr.futureverse.org"
cran = "furrr"
maintainers("jgaeb")
version("0.3.1", sha256="0d91735e2e9be759b1ab148d115c2c7429b79740514778828e5dab631dc0e48b")
version("0.3.0", sha256="3fe91cc1614f9404c708ea3a15b6a40289fa57f40f3ece54452093408d91fd84")
version("0.2.3", sha256="0a213422dc0a2e84173f2d3e6c7900dcb677f980c255d6b6ccf666fba1173700")
version("0.2.2", sha256="e5c10353dc47416eda870d16cf810c576f11bdc9e4c7277f7755581f3824cd4d")
version("0.2.1", sha256="07b3c98324aeb6a7e77a3d48c54fb90696a6e14efeee391cfc5e05f8dcd3469b")
version("0.2.0", sha256="9d6483656fdb5b90e998e2c2f1494c721185079a1412316c6d391e1eade89e1b")
version("0.1.0", sha256="dd2937f7cad1bc69e7a512b2a777f82d6cb7e40fe99afa2049ca360f9352a9d1")
depends_on("r@3.2.0:", type=("build", "run"), when="@0.1.0:")
depends_on("r@3.4.0:", type=("build", "run"), when="@0.3.0:")
depends_on("r-future@1.6.2:1.19.1", type=("build", "run"), when="@0.1.0")
depends_on("r-future@1.19.1:1.22.1", type=("build", "run"), when="@0.2.0:0.2.3")
depends_on("r-future@1.25.0:", type=("build", "run"), when="@0.3.0:")
depends_on("r-globals@0.10.3:", type=("build", "run"), when="@0.1.0:")
depends_on("r-globals@0.13.1:", type=("build", "run"), when="@0.2.0:")
depends_on("r-globals@0.14.0:", type=("build", "run"), when="@0.3.0:")
depends_on("r-rlang@0.2.0:", type=("build", "run"), when="@0.1.0:")
depends_on("r-rlang@0.3.0:", type=("build", "run"), when="@0.2.0:")
depends_on("r-rlang@1.0.2:", type=("build", "run"), when="@0.3.0:")
depends_on("r-purrr@0.2.4:", type=("build", "run"), when="@0.1.0:")
depends_on("r-purrr@0.3.0:", type=("build", "run"), when="@0.2.0:")
depends_on("r-purrr@0.3.4:", type=("build", "run"), when="@0.3.0:")
depends_on("r-ellipsis", type=("build", "run"), when="@0.2.0:0.3.0")
depends_on("r-lifecycle@0.2.0:", type=("build", "run"), when="@0.2.0:")
depends_on("r-lifecycle@1.0.0:", type=("build", "run"), when="@0.2.3:")
depends_on("r-lifecycle@1.0.1:", type=("build", "run"), when="@0.3.0:")
depends_on("r-vctrs@0.3.2:", type=("build", "run"), when="@0.2.0:")
depends_on("r-vctrs@0.4.1:", type=("build", "run"), when="@0.3.0:")

View File

@ -0,0 +1,26 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RHumaniformat(RPackage):
"""A Parser for Human Names
Human names are complicated and nonstandard things. Humaniformat, which is
based on Anthony Ettinger's 'humanparser' project
(https://github.com/chovy/humanparser) provides functions for parsing human
names, making a best- guess attempt to distinguish sub-components such as
prefixes, suffixes, middle names and salutations."""
homepage = "https://github.com/Ironholds/humaniformat"
cran = "humaniformat"
maintainers("jgaeb")
version("0.6.0", sha256="861232c66bf6d4ff91b073193506104f4d99eca5e9a9488327f39ef2bfb45e6d")
version("0.5.0", sha256="02b585e3623a5c5faa7dc3abff92b932d748900be39097c5db8434b8e92709a0")
depends_on("r-rcpp", type=("build", "run"))

View File

@ -0,0 +1,54 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RSets(RPackage):
"""Sets, Generalized Sets, Customizable Sets and Intervals
Data structures and basic operations for ordinary sets, generalizations such
as fuzzy sets, multisets, and fuzzy multisets, customizable sets, and
intervals."""
cran = "sets"
maintainers("jgaeb")
version("1.0-24", sha256="e75733f5c9418eb09fb950a4a94ccf84ddd88231c61ee80d02b7f0917debcac9")
version("1.0-23", sha256="e5b6bc52060421c572d7f2d99b25909a38eacabd5344a47e1cdb2662c62d690b")
version("1.0-22", sha256="6fbf9aa6b0113a58e04f803ab35593feabb0fb55d486d54afb59e027008f9ec6")
version("1.0-21", sha256="5733f0be59189c058c069583f5c4dc1d772bfad5abbfd16081131414d6002ac0")
version("1.0-20", sha256="234b724d40afcabc57eaf42dd34c6cb846e26803796e7fc80c00d26047c475d6")
version("1.0-19", sha256="ae93e56bb0b4fd361349faf962772bb5eab965966e9c9bbf8bd4a2426a2e28a0")
version("1.0-18", sha256="74d1e057e5b84197edb120665831d7b0565e2945e903be56c6701e724131679b")
version("1.0-17", sha256="17817a386d725a458d95368795e4c31ef5dbc00439df24daf9bda996bfe767c5")
version("1.0-16", sha256="5d36bc40937283112287d543f86a8fd470ce587420f5690f6a82f9ffa8e5805e")
version("1.0-15", sha256="6f65ebfda2a94707b98cecdb1d3dcd0c0d1fd2f6a5c36eb128de7c2d5f7c1f8b")
version("1.0-14", sha256="8fe81fc8d296484ffe9d796820dc259c0e6ab69d65d1f18564f89f3b9827cff1")
version("1.0-13", sha256="5ffdc1a0e59c2a9e314b652ad72e0af5d138bad3e69190c6b56eca277c3c41fb")
version("1.0-12", sha256="b1d1868bfba7c22e4bd726d534b1afbe593bde1f8e209ddb76613d1dd9c9954a")
version("1.0-11", sha256="133d36b6fc3cb75097a829edbc15542f4817e2b6edf2a4d4830004a74308449f")
version("1.0-10", sha256="5a631056ceb192ca35ecfc1cf10a0cf5a1671a3d5e50f942b0ac2e2098c909d0")
version("1.0-9", sha256="748b254fedfe710bd295eb99168799c711f6a563b986b4f98e32f6ecc0c6de54")
version("1.0-8", sha256="fa93e8e44b12cba33e9e1ca71e0d5ea84f3beb5656de9031c78e87ebfc2ee799")
version("1.0-7", sha256="29f717a4b71fb2e72f3ce04f4cd703cc860c28c0f58a0e3b2adc1bcaa3b742ba")
version("1.0", sha256="02b85933d9cd55e281c3a87889d827021394b51ba714e87359a36cbf60b50980")
version("0.7", sha256="f450feaa2df5071c2029367edac867d7dbe435d202e5b1475e48827bc10bdf06")
version("0.6", sha256="d682e5fe37d7fb2ded11ca702f9af2bf744cc56b3d5b310dba20dda2df6b1dc6")
version("0.5", sha256="b4d8298e6f169d70b969ef6a49ed0583e2efecbdaa883c45142156bc97263149")
version("0.4", sha256="f013228cbc3e63eb0a5b6ad8e217099482f275316f78f9bad380da6faa0defc6")
version("0.3-2", sha256="c971cf712de2503f6605b883e36e1e1639ab8534d7c78ebc5a6a347e09771d37")
version("0.3-1", sha256="0e6d4cd4eaf29edafbe4dbf4b71bfa3ada0f4ffb4a3becb144c023ef4478c944")
version("0.3", sha256="a10c390d0571ab50a3c6e128d9157a9afc7c87b7befdc61cf86fa4cc9ef9c36d")
version("0.2-1", sha256="f59ea29e7d87ba195909ab11102d14bb9b89c8fd76b25d273e79cd9227753aa2")
version("0.2", sha256="987ff3ec1597a3d25d39ac62853443e43e77d3fdc450bed43f5ed72a30b142cb")
version("0.1-3", sha256="e91d5b70ddd74ab50c944722b5dab2032cccd5cbfa740dd2b5a744ff0a89ce90")
version("0.1-2", sha256="87fa3292eca69d358ea615c39240bb2151afc3a64f004b975f1918602ff9c694")
version("0.1-1", sha256="4e41480757e33897a26974e5234801ff1c15f1a3952c96071787b43141a130de")
version("0.1", sha256="18dda6c9d526a2f41f2b49a472fb27a7f1bb9ce6ea137b8963e8ad6c378825d0")
depends_on("r@2.6:", type=("build", "run"), when="@0.1:")
depends_on("r@2.7:", type=("build", "run"), when="@0.1-2:")

View File

@ -0,0 +1,33 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RTinytiger(RPackage):
"""Lightweight Interface to TIGER/Line Shapefiles
Download geographic shapes from the United States Census Bureau TIGER/Line
Shapefiles
<https://www.census.gov/geographies/mapping-files/time-series/geo/tiger-line-file.html>.
Functions support downloading and reading in geographic boundary data. All
downloads can be set up with a cache to avoid multiple downloads. Data is
available back to 2000 for most geographies."""
homepage = "https://alarm-redist.org/tinytiger/"
cran = "tinytiger"
maintainers("jgaeb")
version("0.0.4", sha256="818328b5095d9e8b302f1a04d004cd3ec6e62d945dbd757fe15e9ab768a7459e")
version("0.0.3", sha256="841d92dd4185b9bff5eef0d3635805c5a3efb1bc4ff0a1101ef264417e37921c")
depends_on("r@2.0.0:", type=("build", "run"))
depends_on("r@2.10:", type=("build", "run"), when="@0.0.4:")
depends_on("r-rlang")
depends_on("r-cli")
depends_on("r-glue")
depends_on("r-curl")
depends_on("r-sf")

View File

@ -195,6 +195,8 @@ def cmake_args(self):
from_variant("BUILD_SHARED_LIBS", "shared"),
from_variant("SEACASExodus_ENABLE_THREADSAFE", "thread_safe"),
from_variant("SEACASIoss_ENABLE_THREADSAFE", "thread_safe"),
# SEACASExodus_ENABLE_THREADSAFE=ON requires TPL_ENABLE_Pthread=ON
from_variant("TPL_ENABLE_Pthread", "thread_safe"),
from_variant("TPL_ENABLE_X11", "x11"),
from_variant(project_name_base + "_ENABLE_Fortran", "fortran"),
]

View File

@ -240,7 +240,7 @@ class Sirius(CMakePackage, CudaPackage, ROCmPackage):
depends_on("costa+shared", when="@7.3.2:")
with when("@7.5: +memory_pool"):
depends_on("umpire")
depends_on("umpire~cuda~rocm", when="~cuda~rocm")
depends_on("umpire+cuda~device_alloc", when="+cuda")
depends_on("umpire+rocm~device_alloc", when="+rocm")
@ -347,12 +347,6 @@ def cmake_args(self):
if "+rocm" in spec:
archs = ",".join(self.spec.variants["amdgpu_target"].value)
args.extend(
[
self.define("HIP_ROOT_DIR", spec["hip"].prefix),
self.define("HIP_HCC_FLAGS", "--amdgpu-target={0}".format(archs)),
self.define("HIP_CXX_COMPILER", self.spec["hip"].hipcc),
]
)
args.extend([self.define("CMAKE_HIP_ARCHITECTURES", archs)])
return args

View File

@ -29,6 +29,9 @@ class Spectre(CMakePackage):
generator("ninja")
version("develop", branch="develop")
version(
"2023.07.29", sha256="134668b81b8e89e3fd02b8b1415a1198889d7fb90f04ca6556458d3ce4489e43"
)
version(
"2023.06.19", sha256="f1140dfca1a9cf58f04acfe853c5597fa19c463d52b3643428e379496bff1236"
)

View File

@ -12,22 +12,22 @@ class SpiralPackageFftx(Package):
Transform as well as higher-level operations composed of linear operations
combined with DFT transforms."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/spiral-package-fftx/archive/refs/tags/1.0.0.tar.gz"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-package-fftx/archive/refs/tags/1.2.2.tar.gz"
git = "https://github.com/spiral-software/spiral-package-fftx.git"
maintainers("spiralgen")
extends("spiral-software")
# Although this package 'extends("spiral-software")' don't declare it as
# such. If this package is required spiral-software should be installed
# with the +fftx variant active
version("develop", branch="develop")
version("main", branch="main")
version("1.2.2", sha256="18dacc3f974c4bd58295be2ea61f8ae0aada9a239f27b93d7806df564612cf22")
version("1.2.1", sha256="3f15aa5949c1b09eb59257cf1c5f6fcddc6e46f77ae9d5fce8acd8b9f99ce941")
version("1.1.1", sha256="99ec7fab9274d378524b4933917fae23f9590255518c7a124cb46bd5e8d9af37")
version("1.1.0", sha256="979d7e59fc39e7e5423bce64628cea467079667d75ce885febee7c42fa7164aa")
version("1.0.0", sha256="9ed352049fcaab31a1a898149d16438c95a1656a2d24df6dee14e3b61efacb5c")
# FFTX package is an extension for Spiral (spec: spiral-software). Spiral finds
# extensions in the "namespaces/packages" folder. Install the tree in a similarly
# named folder so that when activated it'll get symlinked to the correct place.
# FFTX package is an extension for Spiral (spec: spiral-software).
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, "namespaces", "packages", "fftx")

View File

@ -10,20 +10,19 @@ class SpiralPackageHcol(Package):
"""This is the SPIRAL package for the Hybrid Control Operator Language
(HCOL)."""
homepage = "https://spiral.net"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-package-hcol/archive/refs/tags/1.0.0.tar.gz"
git = "https://github.com/spiral-software/spiral-package-hcol.git"
maintainers("spiralgen")
extends("spiral-software")
# Although this package 'extends("spiral-software")' don't declare it as
# such. If this package is required spiral-software should be installed
# with the +hcol variant active
version("master", branch="master")
version("1.0.0", sha256="18ae6f0a090de03723612a6c91ca17cf62971129540936d8c2738bd8f807a511")
# HCOL package is an extension for Spiral (spec: spiral-software). Extensions
# packages for Spiral are intended to be installed in the spiral-software prefix,
# in the "namespaces/packages" folder. Install the tree in that folder under the
# name 'hcol'.
# HCOL package is an extension for Spiral (spec: spiral-software).
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, "namespaces", "packages", "hcol")

View File

@ -0,0 +1,31 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class SpiralPackageJit(Package):
"""This is the SPIRAL package for Just-In-Time (JIT) or Real-Time
Compilation (RTC)."""
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-package-jit/archive/refs/tags/1.0.2.tar.gz"
git = "https://github.com/spiral-software/spiral-package-jit.git"
maintainers("spiralgen")
# Although this package 'extends("spiral-software")' don't declare it as
# such. If this package is required spiral-software should be installed
# with the +jit variant active
version("develop", branch="develop")
version("main", branch="main")
version("1.0.2", sha256="d7fac0493ac406a8b1874491223c3a9a1c6727ea1aa39de7ef4694c59aac9d26")
version("1.0.1", sha256="acf22db04e705276f06642d7f2ebf161f6c347f93bb1bdd6e3ddcfc4b7be5707")
# JIT package is an extension for Spiral (spec: spiral-software).
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, "namespaces", "packages", "jit")
install_tree(".", spiral_pkgs)

View File

@ -9,20 +9,21 @@
class SpiralPackageMpi(Package):
"""This is the SPIRAL package for MPI."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/spiral-package-mpi/archive/refs/tags/1.0.0.tar.gz"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-package-mpi/archive/refs/tags/1.1.0.tar.gz"
git = "https://github.com/spiral-software/spiral-package-mpi.git"
maintainers("spiralgen")
extends("spiral-software")
# Although this package 'extends("spiral-software")' don't declare it as
# such. If this package is required spiral-software should be installed
# with the +mpi variant active
version("develop", branch="develop")
version("main", branch="main")
version("1.1.0", sha256="baf3c9dac7fee330e4bb4adbd24cc7e55f27fc27417644c0b216124f9052f1f5")
version("1.0.0", sha256="64896a82aacce9cc8abe88b921e09ba7a5fceb8262e490f60a7088583c2c2151")
# MPI package is an extension for Spiral (spec: spiral-software). Spiral finds
# extensions in the "namespaces/packages" folder. Install the tree in a similarly
# named folder so that when activated it'll get symlinked to the correct place.
# MPI package is an extension for Spiral (spec: spiral-software).
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, "namespaces", "packages", "mpi")

View File

@ -10,21 +10,21 @@ class SpiralPackageSimt(Package):
"""This is the SPIRAL package for SIMT: SIMT, single instruction multiple
threads, is used to generate code for GPUs and multi-threading aplications."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/spiral-package-simt/archive/refs/tags/1.0.0.tar.gz"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-package-simt/archive/refs/tags/1.1.0.tar.gz"
git = "https://github.com/spiral-software/spiral-package-simt.git"
maintainers("spiralgen")
extends("spiral-software")
# Although this package 'extends("spiral-software")' don't declare it as
# such. If this package is required spiral-software should be installed
# with the +simt variant active
version("develop", branch="develop")
version("main", branch="main")
version("1.1.0", sha256="4d6a5e586889b9e000968c99f3068ba86a12cc389665c6deadc4734117ef7a95")
version("1.0.0", sha256="888ca01aa8fd5df80d6ae1bd64eb1b1e70240b6a36bc3437eb48f5a4b59c2d07")
# SIMT package is an extension for Spiral (spec: spiral-software). Spiral finds
# extensions in the "namespaces/packages" folder. Install the tree in a similarly
# named folder so that when activated it'll get symlinked to the correct place.
# SIMT package is an extension for Spiral (spec: spiral-software).
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, "namespaces", "packages", "simt")

View File

@ -11,31 +11,66 @@ class SpiralSoftware(CMakePackage):
mathematical functions that produces very high performance code for a wide
spectrum of hardware platforms."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/spiral-software/archive/refs/tags/8.4.0.tar.gz"
homepage = "https://spiralgen.com"
url = "https://github.com/spiral-software/spiral-software/archive/refs/tags/8.5.0.tar.gz"
git = "https://github.com/spiral-software/spiral-software.git"
maintainers("spiralgen")
version("develop", branch="develop")
version("master", branch="master")
version("8.5.0", sha256="829345b8ca3ab0069a1a6e230f60ab03257060a8f05c021cee022e294eef592d")
version("8.4.0", sha256="d0c58de65c678130eeee6b8b8b48061bbe463468990f66d9b452225ce46dee19")
version("8.3.0", sha256="41cf0e7f14f9497e98353baa1ef4ca6204ce5ca525db8093f5bb44e89992abdf")
version("8.2.1", sha256="78d7bb1c22a5b2d216eac7b6ddedd20b601ba40227e64f743cbb54d4e5a7794d")
version("8.2.0", sha256="983f38d270ae2cb753c88cbce3f412e307c773807ad381acedeb9275afc0be32")
extendable = True
# No dependencies.
# No dependencies. Spiral pacakges are listed here as variants. If a
# variant (i.e., spiral-package) is enabled then spiral-software depends
# on the package, so dependencies may be added during the install process.
variant("fftx", default=False, description="Install Spiral package FFTX.")
variant(
"simt",
default=False,
description="Install Spiral package for Single Instruction, Multiple Threads"
" (SIMT) to generate code for GPUs.",
)
variant(
"mpi",
default=False,
description="Install Spiral package for Message Passing Interface (MPI).",
)
variant(
"jit",
default=False,
description="Install Spiral supporting Just-In-Time (aka RTC) Compilation.",
)
variant(
"hcol",
default=False,
description="Install Spiral package for the Hybrid Control Operator Language (HCOL).",
)
# Dependencies
for pkg in ["fftx", "simt", "mpi", "jit", "hcol"]:
depends_on(f"spiral-package-{pkg}", when=f"+{pkg}")
def build(self, spec, prefix):
with working_dir(self.build_directory):
make("all")
make("install")
def spiral_package_install(self, spec, prefix, pkg):
pkg_name = "spiral-package-" + pkg
pkg_prefix = spec[pkg_name].prefix
dest = join_path(prefix, "namespaces", "packages", pkg)
src = join_path(pkg_prefix, "namespaces", "packages", pkg)
install_tree(src, dest)
def install(self, spec, prefix):
with working_dir(self.stage.source_path):
files = ("LICENSE", "README.md", "ReleaseNotes.md")
files = ("LICENSE", "README.md", "ReleaseNotes.md", "Contributing.md")
for fil in files:
install(fil, prefix)
@ -61,6 +96,10 @@ def install(self, spec, prefix):
install_tree("grp", prefix.gap.grp)
install_tree("bin", prefix.gap.bin)
for pkg in ["fftx", "simt", "mpi", "jit", "hcol"]:
if f"+{pkg}" in spec:
self.spiral_package_install(spec, prefix, pkg)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set("SPIRAL_HOME", self.prefix)

View File

@ -27,6 +27,7 @@ class Sundials(CMakePackage, CudaPackage, ROCmPackage):
# Versions
# ==========================================================================
version("develop", branch="develop")
version("6.6.1", sha256="5162b0e6d19d7b14078a21441e0795cf38d1d883973be35901e018f9bb30c543")
version("6.6.0", sha256="f90029b8da846c8faff5530fd1fa4847079188d040554f55c1d5d1e04743d29d")
version("6.5.1", sha256="4252303805171e4dbdd19a01e52c1dcfe0dafc599c3cfedb0a5c2ffb045a8a75")
version("6.5.0", sha256="4e0b998dff292a2617e179609b539b511eb80836f5faacf800e688a886288502")

View File

@ -1,34 +0,0 @@
From 8a622dd8204754733c769f0d13b685419e6607db Mon Sep 17 00:00:00 2001
From: Paul Kuberry <pakuber@sandia.gov>
Date: Wed, 12 Apr 2023 13:36:28 -0600
Subject: [PATCH] Remove use of OpenMPI specific struct member
MPITest/testBUG967 could not be built with MPICH because it used
OpenMPI specific struct member variable `_ucount`.
Replaced use of `_ucount` with call to MPI_Get_count, which is
compatible with OpenMPI and MPICH.
---
src/test/MPITest/testBUG967.c | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/src/test/MPITest/testBUG967.c b/src/test/MPITest/testBUG967.c
index cdf7f918f..26e1b5311 100644
--- a/src/test/MPITest/testBUG967.c
+++ b/src/test/MPITest/testBUG967.c
@@ -45,8 +45,10 @@ int main(int narg, char**arg)
for (i=0; i < nrecvs; i++){
MPI_Wait(req + i, &status);
procs_from[i] = status.MPI_SOURCE;
- printf("%d wait source %d count %lu \n",
- my_proc, status.MPI_SOURCE, status._ucount);
+ int count;
+ MPI_Get_count(&status, MPI_INT, &count);
+ printf("%d wait source %d count %d \n",
+ my_proc, status.MPI_SOURCE, count);
}
for (i = 0; i < nrecvs; i++)
--
2.37.1 (Apple Git-137.1)

View File

@ -25,6 +25,7 @@ class Xyce(CMakePackage):
maintainers("kuberry", "tbird2001")
version("master", branch="master")
version("7.7.0", sha256="1b95450e1905c3af3c16b42c41d5ef1f8ab0e640f48086d0cb4d52961a90a175")
version("7.6.0", sha256="fc25557e2edc82adbe0436a15fca2929a2f9ab08ddf91f1a47aab5e8b27ec88c")
version("7.5.0", sha256="854d7d5e19e0ee2138d1f20f10f8f27f2bebb94ec81c157040955cff7250dacd")
version("7.4.0", sha256="2d6bc1b7377834b2e0bf50131e96728c5be83dbb3548e765bb48911067c87c91")
@ -123,9 +124,9 @@ class Xyce(CMakePackage):
# fix MPI issue
patch(
"450-mpich-xyce.patch",
sha256="e91063d22afeeff01e6c572cef2ac2e3abea27b2fcb5a7e6ac5f41e4734a556d",
when="@:7.6,master",
"https://github.com/xyce/xyce/commit/2f95783637a5171a7f65f5d18c24d9a580a7f39e.patch?full_index=1",
sha256="1aeaac78830fbc9ae089a50ef61c6cbd89d29ead54ce7fdca258e194fa05b1a3",
when="@:7.6",
)
# fix RPATH issue on mac
@ -142,12 +143,11 @@ class Xyce(CMakePackage):
when="@7.4:7.6 +pymi",
)
# fix oneapi issue 7.6 and prior
# can switch to github PR once in master
# fix oneapi issue
patch(
"454-oneapi-xyce.patch",
sha256="76a3ff987e43d1657f24d55cfd864b487876a72a9a7c8a37c3151a9b586a21c1",
when="@:7.6",
when="%oneapi",
)
def cmake_args(self):