Compare commits
29 Commits
e4s-24.02
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b51980904 | ||
|
|
1865e228c4 | ||
|
|
179a1e423e | ||
|
|
bd8de5bf2d | ||
|
|
7c8c7eedca | ||
|
|
8c1957c03e | ||
|
|
803ad69eb1 | ||
|
|
29d784e5fa | ||
|
|
58b2201710 | ||
|
|
02605d577b | ||
|
|
42de252c12 | ||
|
|
6c3c06a571 | ||
|
|
6a4573ce5a | ||
|
|
e77128dfa2 | ||
|
|
19df8e45ec | ||
|
|
4c7a1f541c | ||
|
|
295e36efa3 | ||
|
|
3f47cc8d00 | ||
|
|
4006020d78 | ||
|
|
6d4fa96aad | ||
|
|
85def2bfc7 | ||
|
|
266bbad8cd | ||
|
|
1e3b7a6df1 | ||
|
|
00fe864321 | ||
|
|
3df720e909 | ||
|
|
02a6ec7b3c | ||
|
|
d3c1f7a872 | ||
|
|
84568b3454 | ||
|
|
2721b4c10d |
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==23.12.1
|
||||
black==24.1.0
|
||||
clingo==5.6.2
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
||||
@@ -244,7 +244,7 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
+ lines
|
||||
+ ["as they might result in non-deterministic hashes"]
|
||||
)
|
||||
except TypeError:
|
||||
except (TypeError, AttributeError):
|
||||
details = []
|
||||
|
||||
errors.append(error_cls(summary=error_msg, details=details))
|
||||
@@ -292,12 +292,6 @@ def _avoid_mismatched_variants(error_cls):
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
for pkg_name in packages_yaml:
|
||||
# 'all:' must be more forgiving, since it is setting defaults for everything
|
||||
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
|
||||
@@ -317,7 +311,7 @@ def make_error(config_data, summary):
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
)
|
||||
errors.append(make_error(preferences, summary))
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
@@ -329,11 +323,41 @@ def make_error(config_data, summary):
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
f"to the invalid value '{str(variant)}'"
|
||||
)
|
||||
errors.append(make_error(preferences, summary))
|
||||
errors.append(_make_config_error(preferences, summary, error_cls=error_cls))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _wrongly_named_spec(error_cls):
|
||||
"""Warns if the wrong name is used for an external spec"""
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
for pkg_name in packages_yaml:
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
|
||||
externals = packages_yaml[pkg_name].get("externals", [])
|
||||
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
|
||||
for entry in externals:
|
||||
spec = spack.spec.Spec(entry["spec"])
|
||||
regular_pkg_is_wrong = not is_virtual and pkg_name != spec.name
|
||||
virtual_pkg_is_wrong = is_virtual and not any(
|
||||
p.name == spec.name for p in spack.repo.PATH.providers_for(pkg_name)
|
||||
)
|
||||
if regular_pkg_is_wrong or virtual_pkg_is_wrong:
|
||||
summary = f"Wrong external spec detected for '{pkg_name}': {spec}"
|
||||
errors.append(_make_config_error(entry, summary, error_cls=error_cls))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
package_directives = AuditClass(
|
||||
group="packages",
|
||||
|
||||
@@ -146,7 +146,7 @@ def mypy_root_spec() -> str:
|
||||
|
||||
def black_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
return _root_spec("py-black@:24.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
|
||||
@@ -199,6 +199,8 @@ def initconfig_mpi_entries(self):
|
||||
mpiexec = "/usr/bin/srun"
|
||||
else:
|
||||
mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
|
||||
elif hasattr(spec["mpi"].package, "mpiexec"):
|
||||
mpiexec = spec["mpi"].package.mpiexec
|
||||
else:
|
||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
|
||||
if not os.path.exists(mpiexec):
|
||||
|
||||
@@ -58,6 +58,44 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
)
|
||||
|
||||
|
||||
def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[str]) -> None:
|
||||
"""Set a few default defines for CMake, depending on its version."""
|
||||
cmakes = pkg.spec.dependencies("cmake", dt.BUILD)
|
||||
|
||||
if len(cmakes) != 1:
|
||||
return
|
||||
|
||||
cmake = cmakes[0]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
try:
|
||||
ipo = pkg.spec.variants["ipo"].value
|
||||
except KeyError:
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
|
||||
@@ -293,11 +331,6 @@ def std_args(pkg, generator=None):
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
try:
|
||||
ipo = pkg.spec.variants["ipo"].value
|
||||
except KeyError:
|
||||
ipo = False
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
@@ -306,10 +339,6 @@ def std_args(pkg, generator=None):
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
if pkg.spec.satisfies("^cmake@3.9:"):
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
if primary_generator == "Unix Makefiles":
|
||||
args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
|
||||
|
||||
@@ -318,6 +347,7 @@ def std_args(pkg, generator=None):
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
)
|
||||
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
|
||||
@@ -218,7 +218,7 @@ def pset_components(self):
|
||||
"+inspector": " intel-inspector",
|
||||
"+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
|
||||
# Trace Analyzer and Collector
|
||||
"+vtune": " intel-vtune"
|
||||
"+vtune": " intel-vtune",
|
||||
# VTune, ..-profiler since 2020, ..-amplifier before
|
||||
}.items():
|
||||
if variant in self.spec:
|
||||
|
||||
@@ -35,9 +35,9 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
|
||||
@@ -7,9 +7,7 @@
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry
|
||||
if isinstance(needs_entry, str)
|
||||
else None
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -292,9 +292,11 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("<dd>\n")
|
||||
out.write(
|
||||
", ".join(
|
||||
d
|
||||
if d not in pkg_names
|
||||
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
|
||||
(
|
||||
d
|
||||
if d not in pkg_names
|
||||
else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
|
||||
)
|
||||
for d in deps
|
||||
)
|
||||
)
|
||||
|
||||
@@ -826,7 +826,6 @@ def __init__(self, spec):
|
||||
|
||||
|
||||
class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
|
||||
"""Raised when details on architecture cannot be collected from the
|
||||
system"""
|
||||
|
||||
|
||||
@@ -697,7 +697,6 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class GitFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""
|
||||
Fetch strategy that gets source code from a git repository.
|
||||
Use like this in a package:
|
||||
@@ -1089,7 +1088,6 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class SvnFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""Fetch strategy that gets source code from a subversion repository.
|
||||
Use like this in a package:
|
||||
|
||||
@@ -1184,7 +1182,6 @@ def __str__(self):
|
||||
|
||||
@fetcher
|
||||
class HgFetchStrategy(VCSFetchStrategy):
|
||||
|
||||
"""
|
||||
Fetch strategy that gets source code from a Mercurial repository.
|
||||
Use like this in a package:
|
||||
|
||||
@@ -91,9 +91,9 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||
|
||||
# This is vestigial code for the *old* location of sbang.
|
||||
prefix_to_projection[
|
||||
"#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
|
||||
] = sbang.sbang_shebang_line()
|
||||
prefix_to_projection["#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)] = (
|
||||
sbang.sbang_shebang_line()
|
||||
)
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
|
||||
|
||||
@@ -1768,15 +1768,12 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
|
||||
def external_imposition(input_spec, _):
|
||||
return [fn.attr("external_conditions_hold", input_spec.name, local_idx)]
|
||||
def external_imposition(input_spec, requirements):
|
||||
return requirements + [
|
||||
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
||||
]
|
||||
|
||||
self.condition(
|
||||
spec,
|
||||
spack.spec.Spec(spec.name),
|
||||
msg=msg,
|
||||
transform_imposed=external_imposition,
|
||||
)
|
||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
|
||||
@@ -1501,7 +1501,9 @@ def edge_attributes(self) -> str:
|
||||
result = f"{deptypes_str} {virtuals_str}".strip()
|
||||
return f"[{result}]"
|
||||
|
||||
def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
|
||||
def dependencies(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
) -> List["Spec"]:
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
@@ -1512,7 +1514,9 @@ def dependencies(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.A
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
|
||||
|
||||
def dependents(self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL):
|
||||
def dependents(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
) -> List["Spec"]:
|
||||
"""Return a list of direct dependents (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
@@ -1636,23 +1640,23 @@ def _add_dependency(self, spec: "Spec", *, depflag: dt.DepFlag, virtuals: Tuple[
|
||||
self.add_dependency_edge(spec, depflag=depflag, virtuals=virtuals)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
# multiple times. Currently, we only allow identical edge types.
|
||||
# Keep the intersection of constraints when a dependency is added multiple times.
|
||||
# The only restriction, currently, is keeping the same dependency type
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if depflag == dspec.depflag)
|
||||
except StopIteration:
|
||||
current_deps = ", ".join(
|
||||
dt.flag_to_chars(x.depflag) + " " + x.spec.short_spec for x in orig
|
||||
)
|
||||
edge_attrs = f"deptypes={dt.flag_to_chars(depflag).strip()}"
|
||||
required_dep_str = f"^[{edge_attrs}] {str(spec)}"
|
||||
|
||||
raise DuplicateDependencyError(
|
||||
f"{self.short_spec} cannot depend on '{spec.short_spec}' multiple times.\n"
|
||||
f"\tRequired: {dt.flag_to_chars(depflag)}\n"
|
||||
f"\tDependency: {current_deps}"
|
||||
f"{spec.name} is a duplicate dependency, with conflicting dependency types\n"
|
||||
f"\t'{str(self)}' cannot depend on '{required_dep_str}'"
|
||||
)
|
||||
|
||||
try:
|
||||
dspec.spec.constrain(spec)
|
||||
dspec.update_virtuals(virtuals=virtuals)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
raise DuplicateDependencyError(
|
||||
f"Cannot depend on incompatible specs '{dspec.spec}' and '{spec}'"
|
||||
|
||||
@@ -199,9 +199,11 @@ def get_stage_root():
|
||||
def _mirror_roots():
|
||||
mirrors = spack.config.get("mirrors")
|
||||
return [
|
||||
sup.substitute_path_variables(root)
|
||||
if root.endswith(os.sep)
|
||||
else sup.substitute_path_variables(root) + os.sep
|
||||
(
|
||||
sup.substitute_path_variables(root)
|
||||
if root.endswith(os.sep)
|
||||
else sup.substitute_path_variables(root) + os.sep
|
||||
)
|
||||
for root in mirrors.values()
|
||||
]
|
||||
|
||||
|
||||
@@ -98,13 +98,9 @@ def test_url_list(mock_packages):
|
||||
def test_url_summary(mock_packages):
|
||||
"""Test the URL summary command."""
|
||||
# test url_summary, the internal function that does the work
|
||||
(
|
||||
total_urls,
|
||||
correct_names,
|
||||
correct_versions,
|
||||
name_count_dict,
|
||||
version_count_dict,
|
||||
) = url_summary(None)
|
||||
(total_urls, correct_names, correct_versions, name_count_dict, version_count_dict) = (
|
||||
url_summary(None)
|
||||
)
|
||||
|
||||
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls
|
||||
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls
|
||||
|
||||
@@ -103,9 +103,9 @@ def hello_world_with_module_in_root(extension_creator):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _hwwmir(extension_name=None):
|
||||
with extension_creator(
|
||||
extension_name
|
||||
) if extension_name else extension_creator() as extension:
|
||||
with (
|
||||
extension_creator(extension_name) if extension_name else extension_creator()
|
||||
) as extension:
|
||||
# Note that the namespace of the extension is derived from the
|
||||
# fixture.
|
||||
extension.add_command(
|
||||
|
||||
@@ -422,7 +422,7 @@ def test_xl_version_detection(version_str, expected_version):
|
||||
("pgi", "19.1"),
|
||||
("pgi", "19.1a"),
|
||||
("intel", "9.0.0"),
|
||||
("intel", "0.0.0-foobar")
|
||||
("intel", "0.0.0-foobar"),
|
||||
# ('oneapi', '2021.1'),
|
||||
# ('oneapi', '2021.1-foobar')
|
||||
],
|
||||
|
||||
@@ -1522,6 +1522,30 @@ def test_sticky_variant_in_package(self):
|
||||
s = Spec("sticky-variant %clang").concretized()
|
||||
assert s.satisfies("%clang") and s.satisfies("~allow-gcc")
|
||||
|
||||
@pytest.mark.regression("42172")
|
||||
@pytest.mark.only_clingo("Original concretizer cannot use sticky variants")
|
||||
@pytest.mark.parametrize(
|
||||
"spec,allow_gcc",
|
||||
[
|
||||
("sticky-variant@1.0+allow-gcc", True),
|
||||
("sticky-variant@1.0~allow-gcc", False),
|
||||
("sticky-variant@1.0", False),
|
||||
],
|
||||
)
|
||||
def test_sticky_variant_in_external(self, spec, allow_gcc):
|
||||
# setup external for sticky-variant+allow-gcc
|
||||
config = {"externals": [{"spec": spec, "prefix": "/fake/path"}], "buildable": False}
|
||||
spack.config.set("packages:sticky-variant", config)
|
||||
|
||||
maybe = llnl.util.lang.nullcontext if allow_gcc else pytest.raises
|
||||
with maybe(spack.error.SpackError):
|
||||
s = Spec("sticky-variant-dependent%gcc").concretized()
|
||||
|
||||
if allow_gcc:
|
||||
assert s.satisfies("%gcc")
|
||||
assert s["sticky-variant"].satisfies("+allow-gcc")
|
||||
assert s["sticky-variant"].external
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_do_not_invent_new_concrete_versions_unless_necessary(self):
|
||||
# ensure we select a known satisfying version rather than creating
|
||||
|
||||
@@ -60,13 +60,9 @@ def test_spec_installed_upstream(
|
||||
upstream_and_downstream_db, mock_custom_repository, config, monkeypatch
|
||||
):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
upstream_layout,
|
||||
downstream_db,
|
||||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
# a known installed spec should say that it's installed
|
||||
with spack.repo.use_repositories(mock_custom_repository):
|
||||
@@ -90,13 +86,9 @@ def test_spec_installed_upstream(
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
upstream_layout,
|
||||
downstream_db,
|
||||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
@@ -132,13 +124,9 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
|
||||
|
||||
@pytest.mark.usefixtures("config")
|
||||
def test_removed_upstream_dep(upstream_and_downstream_db, tmpdir):
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
upstream_layout,
|
||||
downstream_db,
|
||||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("z")
|
||||
@@ -168,13 +156,9 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db, tmpdir):
|
||||
DB. When a package is recorded as installed in both, the results should
|
||||
refer to the downstream DB.
|
||||
"""
|
||||
(
|
||||
upstream_write_db,
|
||||
upstream_db,
|
||||
upstream_layout,
|
||||
downstream_db,
|
||||
downstream_layout,
|
||||
) = upstream_and_downstream_db
|
||||
(upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout) = (
|
||||
upstream_and_downstream_db
|
||||
)
|
||||
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir.mkdir("mock.repo"))
|
||||
builder.add_package("x")
|
||||
|
||||
@@ -523,6 +523,23 @@ def _specfile_for(spec_str, filename):
|
||||
],
|
||||
"^[virtuals=mpi] openmpi",
|
||||
),
|
||||
# Allow merging attributes, if deptypes match
|
||||
(
|
||||
"^[virtuals=mpi] openmpi+foo ^[virtuals=lapack] openmpi+bar",
|
||||
[
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="virtuals=mpi"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+foo"),
|
||||
Token(TokenType.START_EDGE_PROPERTIES, value="^["),
|
||||
Token(TokenType.KEY_VALUE_PAIR, value="virtuals=lapack"),
|
||||
Token(TokenType.END_EDGE_PROPERTIES, value="]"),
|
||||
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, value="openmpi"),
|
||||
Token(TokenType.BOOL_VARIANT, value="+bar"),
|
||||
],
|
||||
"^[virtuals=lapack,mpi] openmpi+bar+foo",
|
||||
),
|
||||
(
|
||||
"^[deptypes=link,build] zlib",
|
||||
[
|
||||
|
||||
@@ -444,12 +444,9 @@ def test_composite_stage_with_noexpand_resource(
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_composite_stage_with_expand_resource(self, composite_stage_with_expanding_resource):
|
||||
(
|
||||
composite_stage,
|
||||
root_stage,
|
||||
resource_stage,
|
||||
mock_resource,
|
||||
) = composite_stage_with_expanding_resource
|
||||
(composite_stage, root_stage, resource_stage, mock_resource) = (
|
||||
composite_stage_with_expanding_resource
|
||||
)
|
||||
|
||||
composite_stage.create()
|
||||
composite_stage.fetch()
|
||||
@@ -474,12 +471,9 @@ def test_composite_stage_with_expand_resource_default_placement(
|
||||
directory.
|
||||
"""
|
||||
|
||||
(
|
||||
composite_stage,
|
||||
root_stage,
|
||||
resource_stage,
|
||||
mock_resource,
|
||||
) = composite_stage_with_expanding_resource
|
||||
(composite_stage, root_stage, resource_stage, mock_resource) = (
|
||||
composite_stage_with_expanding_resource
|
||||
)
|
||||
|
||||
resource_stage.resource.placement = None
|
||||
|
||||
|
||||
@@ -50,8 +50,8 @@ def test_gzip_compressed_tarball_is_reproducible(tmpdir):
|
||||
|
||||
# Expected mode for non-dirs is 644 if not executable, 755 if executable. Better to compute
|
||||
# that as we don't know the umask of the user running the test.
|
||||
expected_mode = (
|
||||
lambda name: 0o755 if Path(*name.split("/")).lstat().st_mode & 0o100 else 0o644
|
||||
expected_mode = lambda name: (
|
||||
0o755 if Path(*name.split("/")).lstat().st_mode & 0o100 else 0o644
|
||||
)
|
||||
|
||||
# Verify the tarball contents
|
||||
|
||||
@@ -91,8 +91,6 @@ default:
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_REQUIRE_SIGNING: "True"
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
OIDC_TOKEN_AUDIENCE: "protected_binary_mirror"
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^releases\/v.*/
|
||||
# Pipelines on release branches always rebuild everything
|
||||
@@ -103,8 +101,6 @@ default:
|
||||
SPACK_PRUNE_UNTOUCHED: "False"
|
||||
SPACK_PRUNE_UP_TO_DATE: "False"
|
||||
SPACK_REQUIRE_SIGNING: "True"
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
OIDC_TOKEN_AUDIENCE: "protected_binary_mirror"
|
||||
- if: $CI_COMMIT_TAG =~ /^develop-[\d]{4}-[\d]{2}-[\d]{2}$/ || $CI_COMMIT_TAG =~ /^v.*/
|
||||
# Pipelines on tags (release or dev snapshots) only copy binaries from one mirror to another
|
||||
@@ -113,8 +109,6 @@ default:
|
||||
SPACK_PIPELINE_TYPE: "spack_copy_only"
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
PIPELINE_MIRROR_TEMPLATE: "copy-only-protected-mirrors.yaml.in"
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
OIDC_TOKEN_AUDIENCE: "protected_binary_mirror"
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^pr[\d]+_.*$/
|
||||
# Pipelines on PR branches rebuild only what's missing, and do extra pruning
|
||||
@@ -131,8 +125,6 @@ default:
|
||||
# TODO: far gitlab doesn't support that.
|
||||
PR_TARGET_REF_NAME: "develop"
|
||||
PIPELINE_MIRROR_TEMPLATE: "multi-src-mirrors.yaml.in"
|
||||
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
OIDC_TOKEN_AUDIENCE: "pr_binary_mirror"
|
||||
|
||||
.generate-common:
|
||||
@@ -154,6 +146,12 @@ default:
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
compiler find
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
audit configs
|
||||
- spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))"
|
||||
< "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
- spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
@@ -266,8 +264,6 @@ protected-publish:
|
||||
variables:
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
script:
|
||||
@@ -353,7 +349,7 @@ e4s-build:
|
||||
|
||||
e4s-neoverse-v2-generate:
|
||||
extends: [ ".e4s-neoverse-v2", ".generate-neoverse-v2" ]
|
||||
image: ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01
|
||||
image: ghcr.io/spack/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01
|
||||
|
||||
e4s-neoverse-v2-build:
|
||||
extends: [ ".e4s-neoverse-v2", ".build" ]
|
||||
@@ -445,7 +441,7 @@ e4s-rocm-external-build:
|
||||
|
||||
e4s-oneapi-generate:
|
||||
extends: [ ".e4s-oneapi", ".generate-x86_64"]
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
image: ghcr.io/spack/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
|
||||
e4s-oneapi-build:
|
||||
extends: [ ".e4s-oneapi", ".build" ]
|
||||
|
||||
@@ -21,7 +21,7 @@ spack:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
|
||||
@@ -21,7 +21,7 @@ spack:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
|
||||
@@ -31,7 +31,7 @@ spack:
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
require: "%gcc"
|
||||
gcc-runtime:
|
||||
require: "%gcc"
|
||||
|
||||
@@ -29,7 +29,7 @@ spack:
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
require: "%gcc"
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
|
||||
@@ -15,7 +15,7 @@ spack:
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -199,6 +199,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=75 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=75
|
||||
- ginkgo +cuda cuda_arch=75
|
||||
- gromacs +cuda cuda_arch=75
|
||||
- heffte +cuda cuda_arch=75
|
||||
- hpx +cuda cuda_arch=75
|
||||
- hypre +cuda cuda_arch=75
|
||||
@@ -244,6 +245,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=80
|
||||
- ginkgo +cuda cuda_arch=80
|
||||
- gromacs +cuda cuda_arch=80
|
||||
- heffte +cuda cuda_arch=80
|
||||
- hpx +cuda cuda_arch=80
|
||||
- hypre +cuda cuda_arch=80
|
||||
@@ -289,6 +291,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=90
|
||||
- ginkgo +cuda cuda_arch=90
|
||||
- gromacs +cuda cuda_arch=90
|
||||
- heffte +cuda cuda_arch=90
|
||||
- hpx +cuda cuda_arch=90
|
||||
- kokkos +wrapper +cuda cuda_arch=90
|
||||
@@ -329,7 +332,7 @@ spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: "ecpe4s/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01"
|
||||
image: "ghcr.io/spack/ubuntu22.04-runner-arm64-gcc-11.4:2024.01.01"
|
||||
|
||||
cdash:
|
||||
build-group: E4S ARM Neoverse V2
|
||||
|
||||
@@ -15,7 +15,7 @@ spack:
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -197,6 +197,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=75 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=75
|
||||
- ginkgo +cuda cuda_arch=75
|
||||
- gromacs +cuda cuda_arch=75
|
||||
- heffte +cuda cuda_arch=75
|
||||
- hpx +cuda cuda_arch=75
|
||||
- hypre +cuda cuda_arch=75
|
||||
@@ -242,6 +243,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=80 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=80
|
||||
- ginkgo +cuda cuda_arch=80
|
||||
- gromacs +cuda cuda_arch=80
|
||||
- heffte +cuda cuda_arch=80
|
||||
- hpx +cuda cuda_arch=80
|
||||
- hypre +cuda cuda_arch=80
|
||||
@@ -287,6 +289,7 @@ spack:
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=90 ^umpire ~shared
|
||||
- flecsi +cuda cuda_arch=90
|
||||
- ginkgo +cuda cuda_arch=90
|
||||
- gromacs +cuda cuda_arch=90
|
||||
- heffte +cuda cuda_arch=90
|
||||
- hpx +cuda cuda_arch=90
|
||||
- kokkos +wrapper +cuda cuda_arch=90
|
||||
|
||||
@@ -17,7 +17,7 @@ spack:
|
||||
tbb: [intel-tbb]
|
||||
variants: +mpi
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
gcc-runtime:
|
||||
require: "%gcc"
|
||||
hdf5:
|
||||
@@ -98,6 +98,7 @@ spack:
|
||||
- gmp
|
||||
- gotcha
|
||||
- gptune ~mpispawn
|
||||
- gromacs
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- hdf5-vol-cache
|
||||
@@ -221,7 +222,7 @@ spack:
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: ecpe4s/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
image: ghcr.io/spack/ubuntu22.04-runner-amd64-oneapi-2024.0.0:2024.01.16b
|
||||
|
||||
cdash:
|
||||
build-group: E4S OneAPI
|
||||
|
||||
@@ -16,7 +16,7 @@ spack:
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -202,6 +202,7 @@ spack:
|
||||
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0
|
||||
- flecsi +cuda cuda_arch=70
|
||||
- ginkgo +cuda cuda_arch=70
|
||||
- gromacs +cuda cuda_arch=70
|
||||
- heffte +cuda cuda_arch=70
|
||||
- hpx +cuda cuda_arch=70
|
||||
- hypre +cuda cuda_arch=70
|
||||
|
||||
@@ -15,7 +15,7 @@ spack:
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -91,7 +91,7 @@ spack:
|
||||
miopen-hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip-rocclr@5.4.3
|
||||
- spec: miopen-hip@5.4.3
|
||||
prefix: /opt/rocm-5.4.3/
|
||||
miopengemm:
|
||||
buildable: false
|
||||
|
||||
@@ -15,7 +15,7 @@ spack:
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
@@ -211,6 +211,7 @@ spack:
|
||||
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=80 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=80 #^raja@0.14.0
|
||||
- flecsi +cuda cuda_arch=80
|
||||
- ginkgo +cuda cuda_arch=80
|
||||
- gromacs +cuda cuda_arch=80
|
||||
- heffte +cuda cuda_arch=80
|
||||
- hpx +cuda cuda_arch=80
|
||||
- hypre +cuda cuda_arch=80
|
||||
@@ -254,6 +255,7 @@ spack:
|
||||
- cusz +cuda cuda_arch=90
|
||||
- flecsi +cuda cuda_arch=90
|
||||
- ginkgo +cuda cuda_arch=90
|
||||
- gromacs +cuda cuda_arch=90
|
||||
- heffte +cuda cuda_arch=90
|
||||
- hpx +cuda cuda_arch=90
|
||||
- kokkos +wrapper +cuda cuda_arch=90
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class StickyVariantDependent(AutotoolsPackage):
|
||||
"""Package with a sticky variant and a conflict"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/a-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
depends_on("sticky-variant")
|
||||
conflicts("%gcc", when="^sticky-variant~allow-gcc")
|
||||
@@ -144,9 +144,11 @@ def test_run_ctest(self):
|
||||
f"-DCMAKE_CXX_COMPILER={os.environ['CXX']}",
|
||||
self.define(
|
||||
"Kokkos_ROOT",
|
||||
self.spec["kokkos"].prefix
|
||||
if "~trilinos" in self.spec
|
||||
else self.spec["trilinos"].prefix,
|
||||
(
|
||||
self.spec["kokkos"].prefix
|
||||
if "~trilinos" in self.spec
|
||||
else self.spec["trilinos"].prefix
|
||||
),
|
||||
),
|
||||
]
|
||||
cmake = which(self.spec["cmake"].prefix.bin.cmake)
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
from os.path import join as pjoin
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.executable import which_string
|
||||
|
||||
|
||||
def get_spec_path(spec, package_name, path_replacements={}, use_bin=False):
|
||||
@@ -59,6 +60,7 @@ class Axom(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
patch("examples-oneapi.patch", when="@0.6.1 +examples %oneapi")
|
||||
|
||||
patch("scr_examples_gtest.patch", when="@0.6.0:0.6.1")
|
||||
patch("umpire_camp_blt_targets.patch", when="@=0.8.0 ^umpire@2023.06.0")
|
||||
|
||||
root_cmakelists_dir = "src"
|
||||
|
||||
@@ -96,10 +98,11 @@ class Axom(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
# -----------------------------------------------------------------------
|
||||
# Basics
|
||||
depends_on("cmake@3.14:", type="build")
|
||||
depends_on("cmake@3.18:", type="build", when="@0.7.0:")
|
||||
depends_on("cmake@3.21:", type="build", when="+rocm")
|
||||
|
||||
depends_on("blt", type="build")
|
||||
depends_on("blt@0.5.1:", type="build", when="@0.6.2:")
|
||||
depends_on("blt@0.5.1:", type="build", when="@0.6.1:")
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
@@ -141,8 +144,12 @@ class Axom(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on(raja_rocm, when="+{0}".format(raja_rocm))
|
||||
depends_on(umpire_rocm, when="+{0}".format(umpire_rocm))
|
||||
|
||||
depends_on("mfem", when="+mfem")
|
||||
depends_on("mfem~mpi", when="+mfem~mpi")
|
||||
depends_on("rocprim", when="+rocm")
|
||||
|
||||
with when("+mfem"):
|
||||
depends_on("mfem+mpi", when="+mpi")
|
||||
depends_on("mfem~mpi", when="~mpi")
|
||||
depends_on("mfem@4.5.0:", when="@0.7.0:")
|
||||
|
||||
depends_on("python", when="+python")
|
||||
|
||||
@@ -153,6 +160,7 @@ class Axom(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("python", when="+devtools")
|
||||
depends_on("py-sphinx", when="+devtools")
|
||||
depends_on("py-shroud", when="+devtools")
|
||||
depends_on("py-jsonschema", when="+devtools")
|
||||
depends_on("llvm+clang@10.0.0", when="+devtools", type="build")
|
||||
|
||||
# Hard requirement after Axom 0.6.1
|
||||
@@ -221,7 +229,10 @@ def initconfig_compiler_entries(self):
|
||||
flags = ""
|
||||
for _libpath in [libdir, libdir + "64"]:
|
||||
if os.path.exists(_libpath):
|
||||
flags += " -Wl,-rpath,{0}".format(_libpath)
|
||||
if spec.satisfies("^cuda"):
|
||||
flags += " -Xlinker -rpath -Xlinker {0}".format(_libpath)
|
||||
else:
|
||||
flags += " -Wl,-rpath,{0}".format(_libpath)
|
||||
description = "Adds a missing libstdc++ rpath"
|
||||
if flags:
|
||||
entries.append(cmake_cache_string("BLT_EXE_LINKER_FLAGS", flags, description))
|
||||
@@ -231,6 +242,12 @@ def initconfig_compiler_entries(self):
|
||||
if "+cpp14" in spec and spec.satisfies("@:0.6.1"):
|
||||
entries.append(cmake_cache_string("BLT_CXX_STD", "c++14", ""))
|
||||
|
||||
# Add optimization flag workaround for Debug builds with
|
||||
# cray compiler or newer HIP
|
||||
if "+rocm" in spec:
|
||||
if spec.satisfies("%cce") or spec.satisfies("%clang@16"):
|
||||
entries.append(cmake_cache_string("CMAKE_CXX_FLAGS_DEBUG", "-O1 -g -DNDEBUG"))
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_hardware_entries(self):
|
||||
@@ -239,21 +256,24 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
if "+cuda" in spec:
|
||||
entries.append(cmake_cache_option("ENABLE_CUDA", True))
|
||||
entries.append(cmake_cache_option("CUDA_SEPARABLE_COMPILATION", True))
|
||||
entries.append(cmake_cache_option("CMAKE_CUDA_SEPARABLE_COMPILATION", True))
|
||||
|
||||
entries.append(cmake_cache_option("AXOM_ENABLE_ANNOTATIONS", True))
|
||||
|
||||
# CUDA_FLAGS
|
||||
cudaflags = "-restrict --expt-extended-lambda "
|
||||
|
||||
# Pass through any cxxflags to the host compiler via nvcc's Xcompiler flag
|
||||
host_cxx_flags = spec.compiler_flags["cxxflags"]
|
||||
cudaflags += " ".join(["-Xcompiler=%s " % flag for flag in host_cxx_flags])
|
||||
|
||||
if not spec.satisfies("cuda_arch=none"):
|
||||
cuda_arch = spec.variants["cuda_arch"].value[0]
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_arch))
|
||||
cudaflags += "-arch sm_${CMAKE_CUDA_ARCHITECTURES} "
|
||||
else:
|
||||
entries.append("# cuda_arch could not be determined\n\n")
|
||||
|
||||
if spec.satisfies("^blt@:0.6.1"):
|
||||
if spec.satisfies("^blt@:0.5.1"):
|
||||
# This is handled internally by BLT now
|
||||
if "+cpp14" in spec:
|
||||
cudaflags += " -std=c++14"
|
||||
@@ -266,7 +286,7 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}\n".format("-" * 60))
|
||||
entries.append("# HIP\n")
|
||||
entries.append("# Axom ROCm specifics\n")
|
||||
entries.append("#------------------{0}\n\n".format("-" * 60))
|
||||
|
||||
entries.append(cmake_cache_option("ENABLE_HIP", True))
|
||||
@@ -274,13 +294,20 @@ def initconfig_hardware_entries(self):
|
||||
hip_root = spec["hip"].prefix
|
||||
rocm_root = hip_root + "/.."
|
||||
|
||||
entries.append(cmake_cache_string("HIP_ROOT_DIR", hip_root))
|
||||
entries.append(cmake_cache_string("HIP_CLANG_PATH", rocm_root + "/llvm/bin"))
|
||||
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs != "none":
|
||||
arch_str = ",".join(archs)
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
# Fix blt_hip getting HIP_CLANG_INCLUDE_PATH-NOTFOUND bad include directory
|
||||
# TODO: verify that this is still needed and is indeed specific to LC
|
||||
if (
|
||||
self.spec.satisfies("%cce") or self.spec.satisfies("%clang")
|
||||
) and "toss_4" in self._get_sys_type(spec):
|
||||
# Set the patch version to 0 if not already
|
||||
clang_version = str(self.compiler.version)[:-1] + "0"
|
||||
hip_clang_include_path = (
|
||||
rocm_root + "/llvm/lib/clang/" + clang_version + "/include"
|
||||
)
|
||||
if os.path.isdir(hip_clang_include_path):
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CLANG_INCLUDE_PATH", hip_clang_include_path)
|
||||
)
|
||||
|
||||
# Fixes for mpi for rocm until wrapper paths are fixed
|
||||
# These flags are already part of the wrapped compilers on TOSS4 systems
|
||||
@@ -291,9 +318,16 @@ def initconfig_hardware_entries(self):
|
||||
hip_link_flags += "-Wl,-rpath,{0}/../llvm/lib:{0}/lib ".format(hip_root)
|
||||
hip_link_flags += "-lpgmath -lflang -lflangrti -lompstub -lamdhip64 "
|
||||
|
||||
# Remove extra link library for crayftn
|
||||
if "+fortran" in spec and self.is_fortran_compiler("crayftn"):
|
||||
entries.append(
|
||||
cmake_cache_string("BLT_CMAKE_IMPLICIT_LINK_LIBRARIES_EXCLUDE", "unwind")
|
||||
)
|
||||
|
||||
# Additional libraries for TOSS4
|
||||
hip_link_flags += " -L{0}/../lib64 -Wl,-rpath,{0}/../lib64 ".format(hip_root)
|
||||
hip_link_flags += "-lhsakmt -lamd_comgr "
|
||||
hip_link_flags += " -L{0}/../lib -Wl,-rpath,{0}/../lib ".format(hip_root)
|
||||
hip_link_flags += "-lamd_comgr -lhsa-runtime64 "
|
||||
|
||||
entries.append(cmake_cache_string("CMAKE_EXE_LINKER_FLAGS", hip_link_flags))
|
||||
|
||||
@@ -328,13 +362,30 @@ def initconfig_hardware_entries(self):
|
||||
cmake_cache_string("CMAKE_SHARED_LINKER_FLAGS", linker_flags, description)
|
||||
)
|
||||
|
||||
description = "Converts C-style comments to Fortran style " "in preprocessed files"
|
||||
description = "Converts C-style comments to Fortran style in preprocessed files"
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"BLT_FORTRAN_FLAGS", "-WF,-C! -qxlf2003=polymorphic", description
|
||||
)
|
||||
)
|
||||
|
||||
if (
|
||||
"+openmp" in spec
|
||||
and "clang" in self.compiler.cxx
|
||||
and "+fortran" in spec
|
||||
and self.is_fortran_compiler("xlf")
|
||||
):
|
||||
openmp_gen_exp = (
|
||||
"$<$<NOT:$<COMPILE_LANGUAGE:Fortran>>:"
|
||||
"-fopenmp=libomp>;$<$<COMPILE_LANGUAGE:"
|
||||
"Fortran>:-qsmp=omp>"
|
||||
)
|
||||
|
||||
description = "Different OpenMP linker flag between CXX and Fortran"
|
||||
entries.append(
|
||||
cmake_cache_string("BLT_OPENMP_LINK_FLAGS", openmp_gen_exp, description)
|
||||
)
|
||||
|
||||
if spec.satisfies("target=ppc64le:"):
|
||||
# Fix for working around CMake adding implicit link directories
|
||||
# returned by the BlueOS compilers to link executables with
|
||||
@@ -367,28 +418,43 @@ def initconfig_mpi_entries(self):
|
||||
else:
|
||||
entries.append(cmake_cache_option("ENABLE_MPI", False))
|
||||
|
||||
# Replace /usr/bin/srun path with srun flux wrapper path on TOSS 4
|
||||
# TODO: Remove this logic by adding `using_flux` case in
|
||||
# spack/lib/spack/spack/build_systems/cached_cmake.py:196 and remove hard-coded
|
||||
# path to srun in same file.
|
||||
if "toss_4" in self._get_sys_type(spec):
|
||||
srun_wrapper = which_string("srun")
|
||||
mpi_exec_index = [
|
||||
index for index, entry in enumerate(entries) if "MPIEXEC_EXECUTABLE" in entry
|
||||
]
|
||||
del entries[mpi_exec_index[0]]
|
||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", srun_wrapper))
|
||||
|
||||
return entries
|
||||
|
||||
def find_path_replacement(self, path1, path2, path_replacements, name, entries):
|
||||
root = os.path.commonprefix([path1, path2])
|
||||
if root.endswith(os.path.sep):
|
||||
root = root[: -len(os.path.sep)]
|
||||
if root:
|
||||
path_replacements[root] = "${" + name + "}"
|
||||
entries.append(cmake_cache_path(name, root))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
spec = self.spec
|
||||
entries = []
|
||||
path_replacements = {}
|
||||
|
||||
# TPL locations
|
||||
entries.append("#------------------{0}".format("-" * 60))
|
||||
entries.append("# TPLs")
|
||||
entries.append("#------------------{0}\n".format("-" * 60))
|
||||
|
||||
# Try to find the common prefix of the TPL directory, including the
|
||||
# compiler. If found, we will use this in the TPL paths
|
||||
compiler_str = str(spec.compiler).replace("@", "-")
|
||||
prefix_paths = prefix.split(compiler_str)
|
||||
path_replacements = {}
|
||||
|
||||
if len(prefix_paths) == 2:
|
||||
tpl_root = os.path.realpath(pjoin(prefix_paths[0], compiler_str))
|
||||
path_replacements[tpl_root] = "${TPL_ROOT}"
|
||||
entries.append("# Root directory for generated TPLs\n")
|
||||
entries.append(cmake_cache_path("TPL_ROOT", tpl_root))
|
||||
# Try to find the common prefix of the TPL directory.
|
||||
# If found, we will use this in the TPL paths
|
||||
path1 = os.path.realpath(spec["conduit"].prefix)
|
||||
path2 = os.path.realpath(self.prefix)
|
||||
self.find_path_replacement(path1, path2, path_replacements, "TPL_ROOT", entries)
|
||||
|
||||
conduit_dir = get_spec_path(spec, "conduit", path_replacements)
|
||||
entries.append(cmake_cache_path("CONDUIT_DIR", conduit_dir))
|
||||
@@ -443,17 +509,13 @@ def initconfig_package_entries(self):
|
||||
# Grab common devtools root and strip the trailing slash
|
||||
path1 = os.path.realpath(spec["cppcheck"].prefix)
|
||||
path2 = os.path.realpath(spec["doxygen"].prefix)
|
||||
devtools_root = os.path.commonprefix([path1, path2])[:-1]
|
||||
path_replacements[devtools_root] = "${DEVTOOLS_ROOT}"
|
||||
entries.append("# Root directory for generated developer tools\n")
|
||||
entries.append(cmake_cache_path("DEVTOOLS_ROOT", devtools_root))
|
||||
self.find_path_replacement(path1, path2, path_replacements, "DEVTOOLS_ROOT", entries)
|
||||
|
||||
if "+devtools" in spec and "toss_4" not in self._get_sys_type(spec):
|
||||
# Only turn on clangformat support if devtools is on and not TOSS4
|
||||
if "+devtools" in spec and spec.satisfies("^llvm"):
|
||||
clang_fmt_path = spec["llvm"].prefix.bin.join("clang-format")
|
||||
entries.append(cmake_cache_path("CLANGFORMAT_EXECUTABLE", clang_fmt_path))
|
||||
else:
|
||||
entries.append("# ClangFormat disabled due to disabled devtools\n")
|
||||
entries.append("# ClangFormat disabled due to llvm and devtools not in spec\n")
|
||||
entries.append(cmake_cache_option("ENABLE_CLANGFORMAT", False))
|
||||
|
||||
if "+python" in spec or "+devtools" in spec:
|
||||
@@ -462,6 +524,11 @@ def initconfig_package_entries(self):
|
||||
python_path = python_path.replace(key, path_replacements[key])
|
||||
entries.append(cmake_cache_path("PYTHON_EXECUTABLE", python_path))
|
||||
|
||||
if spec.satisfies("^py-jsonschema"):
|
||||
jsonschema_dir = get_spec_path(spec, "py-jsonschema", path_replacements, use_bin=True)
|
||||
jsonschema_path = os.path.join(jsonschema_dir, "jsonschema")
|
||||
entries.append(cmake_cache_path("JSONSCHEMA_EXECUTABLE", jsonschema_path))
|
||||
|
||||
enable_docs = spec.satisfies("^doxygen") or spec.satisfies("^py-sphinx")
|
||||
entries.append(cmake_cache_option("ENABLE_DOCS", enable_docs))
|
||||
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
diff --git a/src/cmake/thirdparty/SetupAxomThirdParty.cmake b/src/cmake/thirdparty/SetupAxomThirdParty.cmake
|
||||
index d4d25d50e..a7ba6e59d 100644
|
||||
--- a/src/cmake/thirdparty/SetupAxomThirdParty.cmake
|
||||
+++ b/src/cmake/thirdparty/SetupAxomThirdParty.cmake
|
||||
@@ -32,31 +32,8 @@ if ((RAJA_DIR OR UMPIRE_DIR) AND NOT CAMP_DIR)
|
||||
message(FATAL_ERROR "CAMP_DIR is required if RAJA_DIR or UMPIRE_DIR is provided.")
|
||||
endif()
|
||||
|
||||
-if (CAMP_DIR)
|
||||
- if (NOT EXISTS "${CAMP_DIR}")
|
||||
- message(FATAL_ERROR "Given CAMP_DIR does not exist: ${CAMP_DIR}")
|
||||
- endif()
|
||||
-
|
||||
- if (NOT IS_DIRECTORY "${CAMP_DIR}")
|
||||
- message(FATAL_ERROR "Given CAMP_DIR is not a directory: ${CAMP_DIR}")
|
||||
- endif()
|
||||
-
|
||||
- find_package(camp REQUIRED PATHS ${CAMP_DIR})
|
||||
-
|
||||
- message(STATUS "Checking for expected Camp target 'camp'")
|
||||
- if (NOT TARGET camp)
|
||||
- message(FATAL_ERROR "Camp failed to load: ${CAMP_DIR}")
|
||||
- else()
|
||||
- message(STATUS "Camp loaded: ${CAMP_DIR}")
|
||||
- set(CAMP_FOUND TRUE CACHE BOOL "")
|
||||
- endif()
|
||||
-
|
||||
- # Note: camp sets a compile feature that is not available on XL
|
||||
- set_target_properties(camp PROPERTIES INTERFACE_COMPILE_FEATURES "")
|
||||
-else()
|
||||
- message(STATUS "Camp support is OFF")
|
||||
- set(CAMP_FOUND FALSE CACHE BOOL "")
|
||||
-endif()
|
||||
+# Note: Let Umpire find Camp via camp_DIR, don't find it ourselves
|
||||
+set(camp_DIR ${CAMP_DIR})
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# UMPIRE
|
||||
@@ -9,7 +9,6 @@
|
||||
|
||||
|
||||
class Bricks(CMakePackage):
|
||||
|
||||
"""Bricks is a data layout and code generation framework,
|
||||
enabling performance-portable stencil computations across
|
||||
a multitude of architectures."""
|
||||
|
||||
@@ -17,6 +17,7 @@ class Cgal(CMakePackage):
|
||||
homepage = "https://www.cgal.org/"
|
||||
url = "https://github.com/CGAL/cgal/releases/download/v5.4.1/CGAL-5.4.1.tar.xz"
|
||||
|
||||
version("5.6", sha256="dcab9b08a50a06a7cc2cc69a8a12200f8d8f391b9b8013ae476965c10b45161f")
|
||||
version("5.5.2", sha256="b2b05d5616ecc69facdc24417cce0b04fb4321491d107db45103add520e3d8c3")
|
||||
version("5.4.1", sha256="4c3dd7ee4d36d237111a4d72b6e14170093271595d5b695148532daa95323d76")
|
||||
version("5.1.5", sha256="b1bb8a6053aa12baa5981aef20a542cd3e617a86826963fb8fb6852b1a0da97c")
|
||||
|
||||
87
var/spack/repos/builtin/packages/countdown/package.py
Normal file
87
var/spack/repos/builtin/packages/countdown/package.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Countdown(CMakePackage, CudaPackage):
|
||||
"""COUNTDOWN is a tool for identifying and automatically reducing the power
|
||||
consumption of the computing elements, during communication and
|
||||
synchronization primitives, filtering out phases which would detriment the
|
||||
time to solution of the application."""
|
||||
|
||||
homepage = "https://github.com/EEESlab/countdown"
|
||||
url = "https://github.com/EEESlab/countdown/archive/refs/tags/v1.1.1.tar.gz"
|
||||
|
||||
license("BSD-3-Clause")
|
||||
|
||||
maintainers("f-tesser", "danielecesarini")
|
||||
|
||||
version("1.1.1", sha256="ee7f00ffc047f000a21a7a71f6ea6f4049afb1a8407608adc04993929ceba917")
|
||||
|
||||
variant(
|
||||
"acc_mpi",
|
||||
default=True,
|
||||
description="Enable the instrumentation of all accessory MPI functions",
|
||||
)
|
||||
variant(
|
||||
"coll_mpi",
|
||||
default=True,
|
||||
description="Enable the instrumentation of all collective MPI functions",
|
||||
)
|
||||
variant("debug_mpi", default=False, description="Enable the debug prints on MPI functions")
|
||||
variant(
|
||||
"excl_all_mpi",
|
||||
default=False,
|
||||
description="Disable the instrumentation of all MPI functions, "
|
||||
"except for initialization and finalization",
|
||||
)
|
||||
variant("hwp_auto_discovery", default=True, description="Autodiscovery of hwp-states")
|
||||
variant("mosquitto", default=False, description="Enable MQTT message passing")
|
||||
variant("no_profiling", default=False, description="Disable MPI profiling")
|
||||
variant("use_cpufreq", default=True, description="Manual set of cpufreq interface")
|
||||
variant("use_hwp", default=False, description="Manual set if hwp-states are available")
|
||||
variant(
|
||||
"p2p_mpi",
|
||||
default=True,
|
||||
description="Enable the instrumentation of all point-to-point MPI functions",
|
||||
)
|
||||
variant("shared", default=True, description="Build shared libraries")
|
||||
|
||||
conflicts("+acc_mpi", when="+excl_all_mpi")
|
||||
conflicts("+coll_mpi", when="+excl_all_mpi")
|
||||
conflicts("+p2p_mpi", when="+excl_all_mpi")
|
||||
conflicts("+excl_all_mpi", when="+acc_mpi")
|
||||
conflicts("+excl_all_mpi", when="+coll_mpi")
|
||||
conflicts("+excl_all_mpi", when="+p2p_mpi")
|
||||
conflicts("+hwp_auto_discovery", when="+use_cpufreq")
|
||||
conflicts("+hwp_auto_discovery", when="+use_hwp")
|
||||
conflicts("+use_cpufreq", when="+hwp_auto_discovery")
|
||||
conflicts("+use_cpufreq", when="+use_hwp")
|
||||
conflicts("+use_hwp", when="+hwp_auto_discovery")
|
||||
conflicts("+use_hwp", when="+use_cpufreq")
|
||||
|
||||
depends_on("cmake@3.0.0:", type="build")
|
||||
depends_on("hwloc", type="link")
|
||||
depends_on("mpi@3.0.0:", type="link")
|
||||
depends_on("mosquitto", when="+mosquitto", type="link")
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("CNTD_ENABLE_CUDA", "cuda"),
|
||||
self.define_from_variant("CNTD_DISABLE_ACCESSORY_MPI", "acc_mpi"),
|
||||
self.define_from_variant("CNTD_ENABLE_COLLECTIVE_MPI", "coll_mpi"),
|
||||
self.define_from_variant("CNTD_ENABLE_DEBUG_MPI", "debug_mpi"),
|
||||
self.define_from_variant("CNTD_DISABLE_ALL_MPI_EXCEPT_INI_FIN", "excl_all_mpi"),
|
||||
self.define_from_variant("CNTD_HWP_AUTO_DISCOVER", "hwp_auto_discovery"),
|
||||
self.define_from_variant("CNTD_ENABLE_MOSQUITTO", "mosquitto"),
|
||||
self.define_from_variant("CNTD_DISABLE_PROFILING_MPI", "no_profiling"),
|
||||
self.define_from_variant("CNTD_USE_CPUFREQ", "use_cpufreq"),
|
||||
self.define_from_variant("CNTD_HWP_DISCOVERED", "use_hwp"),
|
||||
self.define_from_variant("CNTD_ENABLE_P2P_MPI", "p2p_mpi"),
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
]
|
||||
|
||||
return args
|
||||
@@ -169,9 +169,11 @@ def cmake_args(self):
|
||||
]
|
||||
elif mkl_provider == "intel-mkl":
|
||||
args += [
|
||||
self.define("DLAF_WITH_MKL", True)
|
||||
if spec.version <= Version("0.3")
|
||||
else self.define("DLAF_WITH_MKL_LEGACY", True),
|
||||
(
|
||||
self.define("DLAF_WITH_MKL", True)
|
||||
if spec.version <= Version("0.3")
|
||||
else self.define("DLAF_WITH_MKL_LEGACY", True)
|
||||
),
|
||||
self.define("MKL_LAPACK_TARGET", f"mkl::mkl_intel_32bit_{mkl_threads}_dyn"),
|
||||
]
|
||||
|
||||
|
||||
@@ -50,6 +50,8 @@ class Eccodes(CMakePackage):
|
||||
license("Apache-2.0")
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("2.33.0", sha256="bdcec8ce63654ec6803400c507f01220a9aa403a45fa6b5bdff7fdcc44fd7daf")
|
||||
version("2.32.1", sha256="ad2ac1bf36577b1d35c4a771b4d174a06f522a1e5ef6c1f5e53a795fb624863e")
|
||||
version("2.32.0", sha256="b57e8eeb0eba0c05d66fda5527c4ffa84b5ab35c46bcbc9a2227142973ccb8e6")
|
||||
version("2.31.0", sha256="808ecd2c11fbf2c3f9fc7a36f8c2965b343f3151011b58a1d6e7cc2e6b3cac5d")
|
||||
version("2.25.0", sha256="8975131aac54d406e5457706fd4e6ba46a8cc9c7dd817a41f2aa64ce1193c04e")
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
|
||||
|
||||
class Elbencho(MakefilePackage):
|
||||
|
||||
"""
|
||||
Elbencho storage benchmark
|
||||
"""
|
||||
|
||||
@@ -171,8 +171,14 @@ def configure_args(self):
|
||||
|
||||
options += self.enable_or_disable("openmp")
|
||||
|
||||
# if using mkl with openmp support, link with openmp
|
||||
mkl_openmp_flag = (
|
||||
self.compiler.openmp_flag
|
||||
if self.spec.satisfies("^intel-oneapi-mkl threads=openmp")
|
||||
else ""
|
||||
)
|
||||
options += [
|
||||
"LDFLAGS={0}".format(spec["lapack"].libs.search_flags),
|
||||
"LDFLAGS={0} {1}".format(mkl_openmp_flag, spec["lapack"].libs.search_flags),
|
||||
"LIBS={0} {1}".format(spec["lapack"].libs.link_flags, spec["blas"].libs.link_flags),
|
||||
]
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ class Fpart(AutotoolsPackage):
|
||||
license("BSD-2-Clause")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.6.0", sha256="ed1fac2853fc421071b72e4c5d8455a231bc30e50034db14af8b0485ece6e097")
|
||||
version("1.5.1", sha256="c353a28f48e4c08f597304cb4ebb88b382f66b7fabfc8d0328ccbb0ceae9220c")
|
||||
|
||||
variant("embfts", default=False, description="Build with embedded fts functions")
|
||||
|
||||
@@ -17,6 +17,7 @@ class Gnupg(AutotoolsPackage):
|
||||
|
||||
license("GPL-3.0-or-later")
|
||||
|
||||
version("2.4.4", sha256="67ebe016ca90fa7688ce67a387ebd82c6261e95897db7b23df24ff335be85bc6")
|
||||
version("2.4.3", sha256="a271ae6d732f6f4d80c258ad9ee88dd9c94c8fdc33c3e45328c4d7c126bd219d")
|
||||
version("2.4.2", sha256="97eb47df8ae5a3ff744f868005a090da5ab45cb48ee9836dbf5ee739a4e5cf49")
|
||||
version("2.4.1", sha256="76b71e5aeb443bfd910ce9cbc8281b617c8341687afb67bae455877972b59de8")
|
||||
|
||||
@@ -293,9 +293,13 @@ def flag_handler(self, name, flags):
|
||||
cmake_flags.append(self.compiler.cc_pic_flag)
|
||||
if spec.satisfies("@1.8.21 %oneapi@2023.0.0"):
|
||||
cmake_flags.append("-Wno-error=int-conversion")
|
||||
if spec.satisfies("%apple-clang@15:"):
|
||||
cmake_flags.append("-Wl,-ld_classic")
|
||||
elif name == "cxxflags":
|
||||
if spec.satisfies("@:1.8.12+cxx~shared"):
|
||||
cmake_flags.append(self.compiler.cxx_pic_flag)
|
||||
if spec.satisfies("%apple-clang@15:"):
|
||||
cmake_flags.append("-Wl,-ld_classic")
|
||||
elif name == "fflags":
|
||||
if spec.satisfies("%cce+fortran"):
|
||||
# Cray compiler generates module files with uppercase names by
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
diff -ruN spack-src/CMakeLists.txt spack-src-patched/CMakeLists.txt
|
||||
--- spack-src/CMakeLists.txt 2023-11-07 21:54:14.000000000 +0000
|
||||
+++ spack-src-patched/CMakeLists.txt 2024-01-26 19:32:52.140539356 +0000
|
||||
@@ -515,7 +515,7 @@
|
||||
# docs (which has the advantage that preprocessing will take
|
||||
# "{,hydrogen_}config.h" into consideration).
|
||||
configure_file("${PROJECT_SOURCE_DIR}/cmake/configure_files/config.h.in"
|
||||
- "${PROJECT_BINARY_DIR}/include/El/config.h")
|
||||
+ "${PROJECT_BINARY_DIR}/include/El/config.h" ESCAPE_QUOTES)
|
||||
configure_file("${PROJECT_SOURCE_DIR}/cmake/configure_files/hydrogen_config.h.in"
|
||||
"${PROJECT_BINARY_DIR}/include/El/hydrogen_config.h")
|
||||
configure_file("${PROJECT_SOURCE_DIR}/doxy/Doxyfile.in"
|
||||
@@ -130,6 +130,10 @@ class Hydrogen(CachedCMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
depends_on("llvm-openmp", when="%apple-clang +openmp")
|
||||
|
||||
# Fixes https://github.com/spack/spack/issues/42286
|
||||
# https://github.com/LLNL/Elemental/pull/177
|
||||
patch("cmake-intel-mpi-escape-quotes-pr177.patch", when="@1.5.3")
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
shared = True if "+shared" in self.spec else False
|
||||
|
||||
@@ -179,7 +179,6 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
|
||||
def _find_mkl_libs(self, shared):
|
||||
libs = []
|
||||
threading_libs = []
|
||||
|
||||
if self.spec.satisfies("+cluster"):
|
||||
libs.extend([self._xlp64_lib("libmkl_scalapack"), "libmkl_cdft_core"])
|
||||
@@ -192,12 +191,6 @@ def _find_mkl_libs(self, shared):
|
||||
libs.append("libmkl_intel_thread")
|
||||
else:
|
||||
libs.append("libmkl_gnu_thread")
|
||||
|
||||
# this is slightly different than what link-line advisor suggests.
|
||||
# here it uses what the compiler suggests to use to enable openmp,
|
||||
# instead of being explicit about in which path openmp libraries
|
||||
# are located (e.g. intel libiomp5, gcc libgomp, clang libomp).
|
||||
threading_libs += [self.compiler.openmp_flag]
|
||||
else:
|
||||
libs.append("libmkl_sequential")
|
||||
|
||||
@@ -248,8 +241,6 @@ def _find_mkl_libs(self, shared):
|
||||
except spack.error.NoLibrariesError:
|
||||
pass
|
||||
|
||||
resolved_libs += threading_libs
|
||||
|
||||
return resolved_libs
|
||||
|
||||
def _xlp64_lib(self, lib):
|
||||
|
||||
@@ -113,6 +113,10 @@ class IntelOneapiMpi(IntelOneApiLibraryPackage):
|
||||
|
||||
provides("mpi@:3.1")
|
||||
|
||||
@property
|
||||
def mpiexec(self):
|
||||
return self.component_prefix.bin.mpiexec
|
||||
|
||||
@property
|
||||
def v2_layout_versions(self):
|
||||
return "@2021.11:"
|
||||
|
||||
@@ -169,6 +169,7 @@ class Julia(MakefilePackage):
|
||||
depends_on("patchelf@0.13:0.17", type="build")
|
||||
depends_on("perl", type="build")
|
||||
depends_on("libwhich", type="build")
|
||||
depends_on("which", type="build") # for detecting 7z, lld, dsymutil
|
||||
depends_on("python", type="build")
|
||||
|
||||
depends_on("blas") # note: for now openblas is fixed...
|
||||
@@ -197,6 +198,13 @@ class Julia(MakefilePackage):
|
||||
patch("use-add-rpath.patch", when="@:1.8.0")
|
||||
patch("use-add-rpath-2.patch", when="@1.8.1:1.8")
|
||||
|
||||
# Fix the path to Spack llvm's lld and dsymutil
|
||||
patch(
|
||||
"https://github.com/JuliaLang/julia/commit/55c13d234c1523861b278f7989b1af105ef0e88f.patch?full_index=1",
|
||||
sha256="00569f40e1845329060a714813e509677949e633a0e833c40a3c70dcf9269cc1",
|
||||
when="@1.9:1.10",
|
||||
)
|
||||
|
||||
# Fix libstdc++ not being found (https://github.com/JuliaLang/julia/issues/47987)
|
||||
patch(
|
||||
"https://github.com/JuliaLang/julia/pull/48342.patch?full_index=1",
|
||||
|
||||
@@ -23,6 +23,9 @@ class LinaroForge(Package):
|
||||
maintainers("kenche-linaro")
|
||||
|
||||
if platform.machine() in ["aarch64", "arm64"]:
|
||||
version(
|
||||
"23.1.1", sha256="6e95a9c9f894caad073e58590733c4ce4489aec0d8db6553050e71a59e41e6f8"
|
||||
)
|
||||
version("23.1", sha256="c9889b95729f97bcffaf0f15b930efbd27081b7cf2ebc958eede3a186cc4d93a")
|
||||
version(
|
||||
"23.0.4", sha256="a19e6b247badaa52f78815761f71fb95a565024b7f79bdfb2f602f18b47a881c"
|
||||
@@ -44,6 +47,9 @@ class LinaroForge(Package):
|
||||
"21.1.3", sha256="4a4ff7372aad5a31fc9e18b7b6c493691ab37d8d44a3158584e62d1ab82b0eeb"
|
||||
)
|
||||
elif platform.machine() == "ppc64le":
|
||||
version(
|
||||
"23.1.1", sha256="9d4dfa440ef1cc9c6a7cb4f7eeec49fc77f0b6b75864fbe018a41783ac5fc5df"
|
||||
)
|
||||
version("23.1", sha256="39a522c1d9a29f0a35bba5201f3e23c56d87543410505df30c85128816dd455b")
|
||||
version(
|
||||
"23.0.4", sha256="927c1ba733cf63027243060586b196f8262e545d898712044c359a6af6fc5795"
|
||||
@@ -68,6 +74,9 @@ class LinaroForge(Package):
|
||||
"21.1.3", sha256="eecbc5686d60994c5468b2d7cd37bebe5d9ac0ba37bd1f98fbfc69b071db541e"
|
||||
)
|
||||
elif platform.machine() == "x86_64":
|
||||
version(
|
||||
"23.1.1", sha256="6dcd39fc582088eb4b13233ae1e9b38e12bfa07babf77d89b869473a3c2b66e6"
|
||||
)
|
||||
version("23.1", sha256="31185d5f9855fd03701089907cdf7b38eb72c484ee730f8341decbbd8f9b5930")
|
||||
version(
|
||||
"23.0.4", sha256="41a81840a273ea9a232efb4f031149867c5eff7a6381d787e18195f1171caac4"
|
||||
|
||||
@@ -841,6 +841,12 @@ def cmake_args(self):
|
||||
|
||||
cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "libomptarget_debug"))
|
||||
|
||||
if spec.satisfies("@14:"):
|
||||
# The hsa-rocr-dev package may be pulled in through hwloc, which can lead to cmake
|
||||
# finding libhsa and enabling the AMDGPU plugin. Since we don't support this yet,
|
||||
# disable explicitly. See commit a05a0c3c2f8eefc80d84b7a87a23a4452d4a3087.
|
||||
cmake_args.append(define("LIBOMPTARGET_BUILD_AMDGPU_PLUGIN", False))
|
||||
|
||||
if "+lldb" in spec:
|
||||
projects.append("lldb")
|
||||
cmake_args.extend(
|
||||
@@ -961,9 +967,9 @@ def cmake_args(self):
|
||||
"openmp",
|
||||
]
|
||||
runtimes.sort(
|
||||
key=lambda x: runtimes_order.index(x)
|
||||
if x in runtimes_order
|
||||
else len(runtimes_order)
|
||||
key=lambda x: (
|
||||
runtimes_order.index(x) if x in runtimes_order else len(runtimes_order)
|
||||
)
|
||||
)
|
||||
cmake_args.extend(
|
||||
[
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
|
||||
class Mpip(AutotoolsPackage):
|
||||
|
||||
"""mpiP: Lightweight, Scalable MPI Profiling"""
|
||||
|
||||
homepage = "https://software.llnl.gov/mpiP/"
|
||||
|
||||
@@ -135,16 +135,18 @@ def write_makefile_inc(self):
|
||||
[
|
||||
"IMETIS = -I%s" % self.spec["parmetis"].prefix.include,
|
||||
(
|
||||
"LMETIS = -L%s -l%s -L%s -l%s"
|
||||
% (
|
||||
self.spec["parmetis"].prefix.lib,
|
||||
"parmetis",
|
||||
self.spec["metis"].prefix.lib,
|
||||
"metis",
|
||||
(
|
||||
"LMETIS = -L%s -l%s -L%s -l%s"
|
||||
% (
|
||||
self.spec["parmetis"].prefix.lib,
|
||||
"parmetis",
|
||||
self.spec["metis"].prefix.lib,
|
||||
"metis",
|
||||
)
|
||||
)
|
||||
)
|
||||
if not shared
|
||||
else "LMETIS =",
|
||||
if not shared
|
||||
else "LMETIS ="
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -153,9 +155,11 @@ def write_makefile_inc(self):
|
||||
makefile_conf.extend(
|
||||
[
|
||||
"IMETIS = -I%s" % self.spec["metis"].prefix.include,
|
||||
("LMETIS = -L%s -l%s" % (self.spec["metis"].prefix.lib, "metis"))
|
||||
if not shared
|
||||
else "LMETIS =",
|
||||
(
|
||||
("LMETIS = -L%s -l%s" % (self.spec["metis"].prefix.lib, "metis"))
|
||||
if not shared
|
||||
else "LMETIS ="
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ class Npb(MakefilePackage):
|
||||
# ~4X size increase going from one class to the next
|
||||
"D",
|
||||
"E",
|
||||
"F" # large test problems
|
||||
"F", # large test problems
|
||||
# ~16X size increase from each of the previous classes
|
||||
)
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
|
||||
|
||||
class Openvdb(CMakePackage):
|
||||
|
||||
"""OpenVDB - a sparse volume data format."""
|
||||
|
||||
homepage = "https://github.com/AcademySoftwareFoundation/openvdb"
|
||||
|
||||
@@ -26,6 +26,7 @@ class Pdt(AutotoolsPackage):
|
||||
|
||||
license("GPL-2.0-only")
|
||||
|
||||
version("3.25.2", sha256="01c2d403bc6672b2b264a182c325806541066c5ed5713878eb598f5506428cbe")
|
||||
version("3.25.1", sha256="0b6f8a6b8769c181b2ae6cae7298f04b8e3e3d68066f598ed24574e19500bc97")
|
||||
version("3.25", sha256="1037628d854edfeded3d847150d3e8fbd3774e8146407ce32f5021c80f6299be")
|
||||
version("3.24", sha256="4a2bb31f3f7f2e52ed49d9b7189ade05170a4386ef76771280a06e8b3ca97ab2")
|
||||
@@ -50,8 +51,13 @@ def configure(self, spec, prefix):
|
||||
options = ["-prefix=%s" % prefix]
|
||||
if self.compiler.name == "xl":
|
||||
options.append("-XLC")
|
||||
elif self.compiler.name == "intel" or self.compiler.name == "oneapi":
|
||||
elif self.compiler.name == "intel":
|
||||
options.append("-icpc")
|
||||
elif self.compiler.name == "oneapi":
|
||||
if spec.satisfies("@3.25.2:"):
|
||||
options.append("-icpx")
|
||||
else:
|
||||
options.append("-icpc")
|
||||
elif self.compiler.name == "pgi":
|
||||
options.append("-pgCC")
|
||||
elif self.compiler.name == "gcc":
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
diff -Nur spack-src.org/fortran/CMakeLists.txt spack-src/fortran/CMakeLists.txt
|
||||
--- spack-src.org/fortran/CMakeLists.txt 2023-06-06 17:55:44.000000000 +0900
|
||||
+++ spack-src/fortran/CMakeLists.txt 2023-06-06 15:50:05.000000000 +0900
|
||||
@@ -47,6 +47,7 @@
|
||||
|
||||
endmacro()
|
||||
|
||||
+set (CMAKE_EXE_LINKER_FLAGS "--linkfortran")
|
||||
|
||||
add_pexsi_f_example_exe( f_driver_ksdft )
|
||||
add_pexsi_f_example_exe( f_driver_pselinv_real )
|
||||
@@ -38,6 +38,8 @@ class Pexsi(MakefilePackage, CMakePackage):
|
||||
version("0.10.2", sha256="8714c71b76542e096211b537a9cb1ffb2c28f53eea4f5a92f94cc1ca1e7b499f")
|
||||
version("0.9.0", sha256="e5efe0c129013392cdac3234e37f1f4fea641c139b1fbea47618b4b839d05029")
|
||||
|
||||
patch("fujitsu-add-link-flags.patch", when="%fj")
|
||||
|
||||
depends_on("parmetis")
|
||||
depends_on("superlu-dist@5.1.2:5.3", when="@0.10.2:0")
|
||||
depends_on("superlu-dist@:6.1.0", when="@1") # Upper limit from CP2K toolchain
|
||||
@@ -123,4 +125,8 @@ def cmake_args(self):
|
||||
self.define_from_variant("PEXSI_ENABLE_FORTRAN", "fortran"),
|
||||
self.define_from_variant("PEXSI_ENABLE_OPENMP ", "openmp"),
|
||||
]
|
||||
|
||||
if self.spec.satisfies("%fj"):
|
||||
args.append(self.define("BLAS_LIBRARIES", self.spec["blas"].libs.link_flags))
|
||||
|
||||
return args
|
||||
|
||||
@@ -22,6 +22,7 @@ class Pflogger(CMakePackage):
|
||||
version("develop", branch="develop")
|
||||
version("main", branch="main")
|
||||
|
||||
version("1.12.0", sha256="ff29b0ce4baf50675edb69c3c7493be5410839b5f81e3ce5405f04925503fb0d")
|
||||
version("1.11.0", sha256="bf197b6f223a75c7d3eee23888cdde204b5aea053c308852a3f8f677784b8899")
|
||||
version("1.10.0", sha256="8e25564699c0adcbe9a23fded6637668ce659480b39420be5a4c8181cd44ad53")
|
||||
version("1.9.5", sha256="baa3ebb83962f1b6c8c5b0413fe9d02411d3e379c76b8c190112e158c10ac0ac")
|
||||
@@ -56,6 +57,9 @@ class Pflogger(CMakePackage):
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
# Using pFlogger with MPICH 4 is only supported from version 1.11
|
||||
conflicts("^mpich@4:", when="@:1.10")
|
||||
|
||||
depends_on("cmake@3.12:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
@@ -65,4 +69,10 @@ def cmake_args(self):
|
||||
if spec.satisfies("+mpi"):
|
||||
args.extend(["-DCMAKE_Fortran_COMPILER=%s" % spec["mpi"].mpifc])
|
||||
|
||||
# From version 1.12 on, there is an `ENABLE_MPI` option that
|
||||
# defaults to `ON`. If we don't want MPI, we need to set it to
|
||||
# `OFF`
|
||||
if spec.satisfies("@1.12: ~mpi"):
|
||||
args.append("-DENABLE_MPI=OFF")
|
||||
|
||||
return args
|
||||
|
||||
@@ -19,6 +19,7 @@ class PyBlack(PythonPackage):
|
||||
|
||||
license("MIT", checked_by="tgamblin")
|
||||
|
||||
version("24.1.0", sha256="30fbf768cd4f4576598b1db0202413fafea9a227ef808d1a12230c643cefe9fc")
|
||||
version("23.12.1", sha256="4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5")
|
||||
version("23.12.0", sha256="330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a")
|
||||
version("23.11.0", sha256="4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05")
|
||||
|
||||
@@ -17,6 +17,7 @@ class PyLightly(PythonPackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("1.4.26", sha256="7bbcf0a358f23659eb4089043c559c4584ef339266b1c0a9a2598c3100f2f3b8")
|
||||
version("1.4.18", sha256="41794f6815db178b031236793b379e5573e074fdf730506872b73766396a6bdf")
|
||||
version("1.4.17", sha256="1533ddf28c8a08b3eafd404964d03f9a62fe76405fcf8dc7206ca4093725285e")
|
||||
version("1.4.16", sha256="9bd2af53e144e4f9823409cd33b39651f579ed671ff242a1445640c9df504d92")
|
||||
@@ -38,6 +39,8 @@ class PyLightly(PythonPackage):
|
||||
|
||||
# setup.py
|
||||
depends_on("py-setuptools@21:", when="@1.4.2:", type="build")
|
||||
depends_on("py-setuptools@21:", when="@1.4.8,1.4.15:1.4.25", type=("build", "run"))
|
||||
depends_on("py-setuptools@21:65.5.1", when="@:1.4.1", type=("build", "run"))
|
||||
|
||||
# requirements/base.txt
|
||||
depends_on("py-certifi@14.05.14:", type=("build", "run"))
|
||||
@@ -48,12 +51,8 @@ class PyLightly(PythonPackage):
|
||||
depends_on("py-requests@2.23:", type=("build", "run"))
|
||||
depends_on("py-six@1.10:", type=("build", "run"))
|
||||
depends_on("py-tqdm@4.44:", type=("build", "run"))
|
||||
depends_on("py-urllib3@1.15.1:", type=("build", "run"))
|
||||
|
||||
# requirements/openapi.txt
|
||||
depends_on("py-python-dateutil@2.5.3:", when="@1.4.8:", type=("build", "run"))
|
||||
depends_on("py-setuptools@21:", when="@1.4.15:", type=("build", "run"))
|
||||
depends_on("py-urllib3@1.25.3:", when="@1.4.8:", type=("build", "run"))
|
||||
depends_on("py-urllib3@1.15.1:", type=("build", "run"))
|
||||
depends_on("py-pydantic@1.10.5:1", when="@1.4.8:", type=("build", "run"))
|
||||
depends_on("py-aenum@3.1.11:", when="@1.4.8:", type=("build", "run"))
|
||||
|
||||
@@ -66,7 +65,3 @@ class PyLightly(PythonPackage):
|
||||
|
||||
# https://github.com/lightly-ai/lightly/issues/1153
|
||||
depends_on("py-torch+distributed", when="@:1.4.4", type=("build", "run"))
|
||||
|
||||
# Historical dependencies
|
||||
depends_on("py-setuptools@21:", when="@1.4.8", type=("build", "run"))
|
||||
depends_on("py-setuptools@21:65.5.1", when="@:1.4.1", type=("build", "run"))
|
||||
|
||||
@@ -28,6 +28,7 @@ class Tau(Package):
|
||||
license("MIT")
|
||||
|
||||
version("master", branch="master")
|
||||
version("2.33.1", sha256="0ad17d46319c57b5a5e07d153da9acde66bc67f875f0ac626fe229968484021b")
|
||||
version("2.33", sha256="04d9d67adb495bc1ea56561f33c5ce5ba44f51cc7f64996f65bd446fac5483d9")
|
||||
version("2.32.1", sha256="0eec3de46b0873846dfc639270c5e30a226b463dd6cb41aa12e975b7563f0eeb")
|
||||
version("2.32", sha256="ee774a06e30ce0ef0f053635a52229152c39aba4f4933bed92da55e5e13466f3")
|
||||
|
||||
@@ -649,9 +649,11 @@ def define_enable(suffix, value=None):
|
||||
options.append(
|
||||
define(
|
||||
"Trilinos_CXX11_FLAGS",
|
||||
self.compiler.cxx14_flag
|
||||
if spec.variants["cxxstd"].value == "14"
|
||||
else self.compiler.cxx11_flag,
|
||||
(
|
||||
self.compiler.cxx14_flag
|
||||
if spec.variants["cxxstd"].value == "14"
|
||||
else self.compiler.cxx11_flag
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -138,8 +138,8 @@ class Xyce(CMakePackage):
|
||||
|
||||
depends_on("armpl-gcc~shared", when="^armpl-gcc")
|
||||
depends_on("atlas~shared", when="^atlas")
|
||||
depends_on("blis libs=static", when="^blis+cblas")
|
||||
depends_on("blis libs=static", when="^blis+blas")
|
||||
depends_on("blis libs=static", when="^[virtuals=blas] blis+cblas")
|
||||
depends_on("blis libs=static", when="^[virtuals=blas] blis+blas")
|
||||
depends_on("clblast~shared", when="^clblast+netlib")
|
||||
depends_on("intel-mkl~shared", when="^intel-mkl")
|
||||
depends_on("intel-oneapi-mkl~shared", when="^intel-oneapi-mkl")
|
||||
|
||||
Reference in New Issue
Block a user