Compare commits

..

16 Commits

Author SHA1 Message Date
Adrien Bernede
b2e526678a Merge branch 'develop' into woptim/extend-commit-fetch 2023-02-27 16:15:46 +01:00
Adrien M. BERNEDE
ecdde4a7fb Remove shallow clone fetch 2023-02-27 11:48:58 +01:00
Adrien M. BERNEDE
cbf2cb1a49 Fix wrong syntax 2023-02-27 11:36:06 +01:00
Adrien M. BERNEDE
5de57e6450 Missing repository arg 2023-02-27 11:29:09 +01:00
Adrien M. BERNEDE
a5d71af83a Attempt at getting the commit with a fetch 2023-02-27 11:27:23 +01:00
Harmen Stoppels
773fd5ad84 hpctoolkit: fix broken patches (#35711)
The patches don't have a stable checksum.
2023-02-27 10:50:48 +01:00
Seth R. Johnson
9b46e92e13 Celeritas: new versions 0.2.1 and 0.1.5 (#35704)
* celeritas: new versions 0.1.5 and 0.2.1

* celeritas: deprecate old versions
2023-02-27 09:36:28 +00:00
Howard Pritchard
f004311611 OpenMPI: add the 4.1.5 release (#35677)
Signed-off-by: Howard Pritchard <howardp@lanl.gov>
2023-02-27 00:57:36 -08:00
Glenn Johnson
a4b949492b r-twosamplemr: add new package and dependencies (#35683) 2023-02-27 07:38:27 +01:00
Larry Knox
6ab792fb03 hdf5-vol-cache: add v1.1 (#35685) 2023-02-27 07:35:30 +01:00
Alex Richert
313c7386c4 go: set GOMAXPROCS to limit number of build processes (#35703) 2023-02-27 07:26:50 +01:00
Adam J. Stewart
b0b4a05d44 py-nbqa: add new package (#35707) 2023-02-27 07:22:48 +01:00
Alberto Invernizzi
4e13b5374f fix dump problem (#35673)
if dump file existed it was not truncating the file, resulting in
a file with unaltered filesize, with the new content at the beginning,
"padded" with the tail of the old content, since the new content was
not enough to overwrite it.
2023-02-24 21:32:33 -08:00
Vinícius
07897900eb ompss-2 dependencies (#35642) 2023-02-24 21:22:17 -08:00
Axel Huebl
d286146c64 WarpX 23.02 (#35633)
Update `warpx` & `py-warpx` to the latest release.
2023-02-23 17:09:28 -08:00
Adrien M. BERNEDE
9331d47808 Add a step that forces the fetch of the specific commit
This will fetch even if the commit is on a PR from a fork
2023-02-23 16:38:58 +01:00
42 changed files with 808 additions and 604 deletions

View File

@@ -420,13 +420,6 @@ def actions():
#
def env_view_setup_parser(subparser):
"""manage a view associated with the environment"""
subparser.add_argument(
"-f",
"--force",
action="store_true",
dest="force",
help="regenerate even if regeneration cannot be done atomically",
)
subparser.add_argument(
"action", choices=ViewAction.actions(), help="action to take for the environment's view"
)
@@ -440,7 +433,7 @@ def env_view(args):
if env:
if args.action == ViewAction.regenerate:
env.regenerate_views(force=args.force)
env.regenerate_views()
elif args.action == ViewAction.enable:
if args.view_path:
view_path = args.view_path

View File

@@ -38,7 +38,6 @@
import spack.subprocess_context
import spack.traverse
import spack.user_environment as uenv
import spack.util.atomic_update
import spack.util.cpus
import spack.util.environment
import spack.util.hash
@@ -139,8 +138,6 @@ def default_manifest_yaml():
default_view_name = "default"
# Default behavior to link all packages into views (vs. only root packages)
default_view_link = "all"
# Default behavior to use exchange if possible and otherwise symlink for view updates
default_update_method = ["exchange", "symlink"]
def installed_specs():
@@ -409,7 +406,6 @@ def __init__(
exclude=[],
link=default_view_link,
link_type="symlink",
update_method=default_update_method,
):
self.base = base_path
self.raw_root = root
@@ -419,7 +415,6 @@ def __init__(
self.exclude = exclude
self.link_type = view_func_parser(link_type)
self.link = link
self.update_method = update_method
def select_fn(self, spec):
return any(spec.satisfies(s) for s in self.select)
@@ -436,7 +431,6 @@ def __eq__(self, other):
self.exclude == other.exclude,
self.link == other.link,
self.link_type == other.link_type,
self.update_method == other.update_method,
]
)
@@ -457,8 +451,6 @@ def to_dict(self):
ret["link_type"] = inverse_view_func_parser(self.link_type)
if self.link != default_view_link:
ret["link"] = self.link
if self.update_method != default_update_method:
ret["update_method"] = self.update_method
return ret
@staticmethod
@@ -471,39 +463,13 @@ def from_dict(base_path, d):
d.get("exclude", []),
d.get("link", default_view_link),
d.get("link_type", "symlink"),
d.get("update_method", default_update_method),
)
@property
def _current_root(self):
"""
Return the directory in which the view has been constructed.
Query the view if it stores metadata on where it was constructed.
If the view us using symlinks for atomic updates, self.root is a link and we read the link
to find the real root directory.
If self.root is not a view with metadata and is not a link, the view has not been
constructed.
"""
# Get the view as self.root even if it is actually a symlink
# We will not operate on this view object, only query metadata
# We don't want to pass a created_path to this view, so that we can read where it says it
# was created.
if not os.path.exists(self.root):
return None
view = self.view()
orig_path = view.metadata.get("created_path", None)
if orig_path:
return orig_path
# Backwards compat only applies for symlinked views
if not os.path.islink(self.root):
return None
# For backards compat, check link for symlink views if no "created_path"
root = os.readlink(self.root)
if os.path.isabs(root):
return root
@@ -535,9 +501,11 @@ def get_projection_for_spec(self, spec):
symlink.
"""
view = self.view()
return view.get_projection_for_spec(spec)
view_path = view.get_projection_for_spec(spec)
rel_path = os.path.relpath(view_path, self._current_root)
return os.path.join(self.root, rel_path)
def view(self, new=None, created_path=False):
def view(self, new=None):
"""
Generate the FilesystemView object for this ViewDescriptor
@@ -550,13 +518,8 @@ def view(self, new=None, created_path=False):
new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs.
created_path (bool): Pass metadata on the path the view was created in to the
underlying view implementation. Default False. This option should be enabled
when creating a view to add packages to, but not when creating a view object
to query an existing view.
"""
root = new if new else self.root
root = new if new else self._current_root
if not root:
# This can only be hit if we write a future bug
msg = (
@@ -564,17 +527,13 @@ def view(self, new=None, created_path=False):
"View root is at %s" % self.root
)
raise SpackEnvironmentViewError(msg)
kwargs = {
"ignore_conflicts": True,
"projections": self.projections,
"link": self.link_type,
"final_destination": self.root,
}
if created_path:
kwargs["metadata"] = {"created_path": root}
return SimpleFilesystemView(root, spack.store.layout, **kwargs)
return SimpleFilesystemView(
root,
spack.store.layout,
ignore_conflicts=True,
projections=self.projections,
link=self.link_type,
)
def __contains__(self, spec):
"""Is the spec described by the view descriptor
@@ -617,69 +576,7 @@ def specs_for_view(self, concretized_root_specs):
return specs
def update_method_error_msg(self, methods):
"""When methods are already determined invalid, construct error message for methods."""
msg = "View cannot be updated using specified update methods:"
if "exchange" in methods:
if not spack.util.atomic_update.renameat2():
msg += "\n Operating system does not support 'exchange' atomic update method."
msg += f"\n If the view {self.root} does not already exist on the filesystem,"
msg += " change its update_method to 'symlink' or 'auto'."
msg += f"\n If the view at {self.root} exists already, either change the"
msg += " update_method and run `spack env view regenerate --force`"
msg += " or run on a newer OS."
else:
msg += f"\n The view {self.root} cannot be updated with 'exchange' update method"
msg += " because it was originally constructed with the 'symlink' method."
msg += "\n Either change the update method to 'symlink' or"
msg += " run `spack env view regenerate --force` for a non-atomic update."
if "symlink" in methods:
msg += f"\n The view {self.root} cannot be updated with 'symlink' update method"
msg += " because it was originally constructed with the 'exchange' method."
msg += "\n Either change the update method to 'exchange' or"
msg += " run `spack env view regenerate --force` for a non-atomic update."
return msg
def valid_update_method(self, method, force):
return getattr(self, f"valid_update_method_{method}")(force)
def valid_update_method_exchange(self, force):
if not spack.util.atomic_update.renameat2():
return False
# Ensure we don't swap symlink -> exchange if we have a symlink and symlink is an
# acceptable method. This is to avoid problems switching between OSs.
if os.path.islink(self.root):
if force:
os.unlink(self.root)
elif "symlink" in self.update_method:
return False
return True
def valid_update_method_symlink(self, force):
if os.path.exists(self.root):
if not os.path.islink(self.root):
if force:
shutil.rmtree(self.root)
return True
else:
return False
return True
def update_method_to_use(self, force=False):
update_methods = self.update_method
if isinstance(update_methods, str):
update_methods = [update_methods]
for method in update_methods:
# Check whether we can use this method and return if we can
if self.valid_update_method(method, force):
return method
raise RuntimeError(self.update_method_error_msg(update_methods))
def regenerate(self, concretized_root_specs, force=False):
def regenerate(self, concretized_root_specs):
specs = self.specs_for_view(concretized_root_specs)
# To ensure there are no conflicts with packages being installed
@@ -700,39 +597,34 @@ def regenerate(self, concretized_root_specs, force=False):
tty.debug("View at %s does not need regeneration." % self.root)
return
# Check which atomic update method we need
update_method = self.update_method_to_use(force)
if update_method == "exchange" and os.path.isdir(new_root):
shutil.rmtree(new_root)
_error_on_nonempty_view_dir(new_root)
# construct view at new_root
if specs:
tty.msg("Updating view at {0}".format(self.root))
view = self.view(new=new_root, created_path=True)
view = self.view(new=new_root)
root_dirname = os.path.dirname(self.root)
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
# Create a new view
try:
fs.mkdirp(new_root)
view.add_specs(*specs, with_dependencies=False)
if update_method == "exchange":
# Swap the view to the directory of the previous view if one exists so that
# the view that is swapped out will be named appropriately
if old_root:
os.rename(new_root, old_root)
exchange_location = old_root
else:
exchange_location = new_root
spack.util.atomic_update.atomic_update_renameat2(exchange_location, self.root)
else:
spack.util.atomic_update.atomic_update_symlink(new_root, self.root)
# create symlink from tmp_symlink_name to new_root
if os.path.exists(tmp_symlink_name):
os.unlink(tmp_symlink_name)
symlink(new_root, tmp_symlink_name)
# mv symlink atomically over root symlink to old_root
fs.rename(tmp_symlink_name, self.root)
except Exception as e:
# Clean up new view and temporary symlink on any failure.
try:
shutil.rmtree(new_root, ignore_errors=True)
os.unlink(tmp_symlink_name)
except (IOError, OSError):
pass
@@ -1657,14 +1549,14 @@ def update_default_view(self, viewpath):
else:
self.views.pop(name, None)
def regenerate_views(self, force=False):
def regenerate_views(self):
if not self.views:
tty.debug("Skip view update, this environment does not" " maintain a view")
return
concretized_root_specs = [s for _, s in self.concretized_specs()]
for view in self.views.values():
view.regenerate(concretized_root_specs, force)
view.regenerate(concretized_root_specs)
def check_views(self):
"""Checks if the environments default view can be activated."""

View File

@@ -862,6 +862,9 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
)
with working_dir(dest):
# By defaults, on all references are fetched by the clone
fetch_args = ["fetch", "origin", commit]
git(*fetch_args)
checkout_args = ["checkout", commit]
if not debug:
checkout_args.insert(1, "--quiet")

View File

@@ -27,7 +27,7 @@
SingleMergeConflictError,
SourceMergeVisitor,
)
from llnl.util.symlink import islink, symlink
from llnl.util.symlink import symlink
from llnl.util.tty.color import colorize
import spack.config
@@ -43,7 +43,6 @@
_projections_path = ".spack/projections.yaml"
_metadata_path = ".spack/metadata.yaml"
def view_symlink(src, dst, **kwargs):
@@ -64,7 +63,7 @@ def view_copy(src, dst, view, spec=None):
Use spec and view to generate relocations
"""
shutil.copy2(src, dst, follow_symlinks=False)
shutil.copy2(src, dst)
if spec and not spec.external:
# Not metadata, we have to relocate it
@@ -83,23 +82,18 @@ def view_copy(src, dst, view, spec=None):
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
new_sbang = sbang.sbang_shebang_line()
root = view.final_destination
prefix_to_projection = collections.OrderedDict(
{spec.prefix: os.path.join(root, view.get_relative_projection_for_spec(spec))}
{spec.prefix: view.get_projection_for_spec(spec)}
)
for dep in spec.traverse():
if not dep.external:
prefix_to_projection[dep.prefix] = os.path.join(
root, view.get_relative_projection_for_spec(dep)
)
prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
if spack.relocate.is_binary(dst):
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
elif islink(dst):
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
else:
prefix_to_projection[spack.store.layout.root] = root
prefix_to_projection[spack.store.layout.root] = view._root
prefix_to_projection[orig_sbang] = new_sbang
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
@@ -156,13 +150,10 @@ def __init__(self, root, layout, **kwargs):
self.layout = layout
self.projections = kwargs.get("projections", {})
self.metadata = kwargs.get("metadata", {})
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
self.verbose = kwargs.get("verbose", False)
self.final_destination = kwargs.get("final_destination", self._root)
# Setup link function to include view
link_func = kwargs.get("link", view_symlink)
self.link = ft.partial(link_func, view=self)
@@ -219,15 +210,12 @@ def remove_standalone(self, spec):
"""
raise NotImplementedError
def get_relative_projection_for_spec(self, spec):
def get_projection_for_spec(self, spec):
"""
Get the relative projection in this view for a spec.
Get the projection in this view for a spec.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec):
return os.path.join(self._root, self.get_relative_projection_for_spec(spec))
def get_all_specs(self):
"""
Get all specs currently active in this view.
@@ -286,34 +274,8 @@ def __init__(self, root, layout, **kwargs):
msg += " which does not match projections passed manually."
raise ConflictingProjectionsError(msg)
self.metadata_path = os.path.join(self._root, _metadata_path)
if not self.metadata:
self.metadata = self.read_metadata()
elif not os.path.exists(self.metadata_path):
self.write_metadata()
else:
if self.metadata != self.read_metadata():
msg = f"View at {self._root} has metadata file"
msg += " which does not match metadata passed manually."
raise ConflictingMetadataError(msg)
self._croot = colorize_root(self._root) + " "
def write_metadata(self):
if self.metadata:
mkdirp(os.path.dirname(self.metadata_path))
with open(self.metadata_path, "w") as f:
f.write(s_yaml.dump_config({"metadata": self.metadata}))
def read_metadata(self):
if os.path.exists(self.metadata_path):
with open(self.metadata_path, "r") as f:
# no schema as this is not user modified
metadata_data = s_yaml.load(f)
return metadata_data["metadata"]
else:
return {}
def write_projections(self):
if self.projections:
mkdirp(os.path.dirname(self.projections_path))
@@ -524,9 +486,9 @@ def remove_standalone(self, spec):
if self.verbose:
tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
def get_relative_projection_for_spec(self, spec):
def get_projection_for_spec(self, spec):
"""
Return the relative projection for a spec in this view.
Return the projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
@@ -537,7 +499,9 @@ def get_relative_projection_for_spec(self, spec):
locator_spec = spec.package.extendee_spec
proj = spack.projections.get_projection(self.projections, locator_spec)
return spec.format(proj) if proj else ""
if proj:
return os.path.join(self._root, locator_spec.format(proj))
return self._root
def get_all_specs(self):
md_dirs = []
@@ -675,32 +639,6 @@ class SimpleFilesystemView(FilesystemView):
def __init__(self, root, layout, **kwargs):
super(SimpleFilesystemView, self).__init__(root, layout, **kwargs)
self.metadata_path = os.path.join(self._root, _metadata_path)
if not self.metadata:
self.metadata = self.read_metadata()
elif not os.path.exists(self.metadata_path):
self.write_metadata()
else:
if self.metadata != self.read_metadata():
msg = f"View at {self._root} has metadata file"
msg += " which does not match metadata passed manually."
raise ConflictingMetadataError(msg)
def write_metadata(self):
if self.metadata:
mkdirp(os.path.dirname(self.metadata_path))
with open(self.metadata_path, "w") as f:
f.write(s_yaml.dump_config({"metadata": self.metadata}))
def read_metadata(self):
if os.path.exists(self.metadata_path):
with open(self.metadata_path, "r") as f:
# no schema as this is not user modified
metadata_data = s_yaml.load(f)
return metadata_data["metadata"]
else:
return {}
def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same
package, that project to the same prefix. We want to catch that as
@@ -827,13 +765,6 @@ def link_metadata(self, specs):
self.link(os.path.join(src_root, src_relpath), os.path.join(self._root, dst_relpath))
def get_relative_projection_for_spec(self, spec):
"""
Return the relative projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
# Extensions are placed by their extendee, not by their own spec
if spec.package.extendee_spec:
spec = spec.package.extendee_spec
@@ -841,6 +772,22 @@ def get_relative_projection_for_spec(self, spec):
p = spack.projections.get_projection(self.projections, spec)
return spec.format(p) if p else ""
def get_projection_for_spec(self, spec):
"""
Return the projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
if spec.package.extendee_spec:
spec = spec.package.extendee_spec
proj = spack.projections.get_projection(self.projections, spec)
if proj:
return os.path.join(self._root, spec.format(proj))
return self._root
#####################
# utility functions #
@@ -902,7 +849,3 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually."""
class ConflictingMetadataError(SpackError):
"""Raised when a view has a metadata file and is given one manually."""

View File

@@ -30,10 +30,6 @@
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"broadcast": {
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
@@ -111,21 +107,6 @@
"type": "array",
"items": {"type": "string"},
},
"update_method": {
"anyOf": [
{
"type": "string",
"pattern": "(symlink|exchange)",
},
{
"type": "array",
"items": {
"type": "string",
"pattern": "(symlink|exchange)",
},
},
]
},
"projections": projections_scheme,
},
}

View File

@@ -186,64 +186,41 @@ def _expand_matrix_constraints(matrix_config):
new_row.append([r])
expanded_rows.append(new_row)
# TODO someday: allow matrices inside `broadcast`
broadcast_rows = matrix_config.get("broadcast", [])
excludes = matrix_config.get("exclude", []) # only compute once
sigil = matrix_config.get("sigil", "")
broadcast_constraints = list(itertools.product(*broadcast_rows))
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
flat_combo = [Spec(x) for x in flat_combo]
# If no broadcast, this is [(,)].
# It will run once, as required, and apply no constraints
for broadcast_combo in broadcast_constraints:
final_combo = [_apply_broadcast(spec.copy(), broadcast_combo) for spec in flat_combo]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
test_spec.constrain(constraint)
# Check whether final spec is excluded
# requires constructing a spec from constraints
test_spec = final_combo[0].copy()
for constraint in final_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
# This method is best effort, so all existing variants will be
# converted before any error is raised.
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.variant.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
if any(test_spec.satisfies(x) for x in excludes):
continue
# Abstract variants don't have normal satisfaction semantics
# Convert all variants to concrete types.
# This method is best effort, so all existing variants will be
# converted before any error is raised.
# Catch exceptions because we want to be able to operate on
# abstract specs without needing package information
try:
spack.variant.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
if sigil:
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
# actual exclusion check is here
if any(test_spec.satisfies(e) for e in excludes):
continue
# Apply sigil if applicable
if sigil:
final_combo[0] = Spec(sigil + str(final_combo[0]))
# Add to list of constraints
results.append(final_combo)
# Add to list of constraints
results.append(flat_combo)
return results
def _apply_broadcast(spec, constraints):
if constraints:
for node in spec.traverse():
if node.name:
for constraint in constraints:
node.constrain(constraint)
return spec
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:

View File

@@ -52,19 +52,6 @@
sep = os.sep
supports_renameat2 = bool(spack.util.atomic_update.renameat2())
if supports_renameat2:
use_renameat2 = [True, False]
else:
use_renameat2 = [False]
@pytest.fixture(params=use_renameat2)
def atomic_update_implementations(request, monkeypatch):
if request.param is False:
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
yield
def check_mpileaks_and_deps_in_view(viewdir):
"""Check that the expected install directories exist."""
@@ -74,11 +61,9 @@ def check_mpileaks_and_deps_in_view(viewdir):
def check_viewdir_removal(viewdir):
"""Check that the uninstall/removal worked."""
view_spack_dir = str(viewdir.join(".spack"))
if not os.path.exists(view_spack_dir):
return
assert all(f in ["projections.yaml", "metadata.yaml"] for f in os.listdir(view_spack_dir))
assert not os.path.exists(str(viewdir.join(".spack"))) or os.listdir(
str(viewdir.join(".spack"))
) == ["projections.yaml"]
def test_add():
@@ -612,9 +597,7 @@ def test_init_from_yaml(tmpdir):
@pytest.mark.usefixtures("config")
def test_env_view_external_prefix(
tmpdir_factory, mutable_database, mock_packages, atomic_update_implementations
):
def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_packages):
fake_prefix = tmpdir_factory.mktemp("a-prefix")
fake_bin = fake_prefix.join("bin")
fake_bin.ensure(dir=True)
@@ -1195,9 +1178,7 @@ def test_store_different_build_deps(tmpdir):
assert x_read["y"].dag_hash() != y_read.dag_hash()
def test_env_updates_view_install(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@@ -1207,9 +1188,7 @@ def test_env_updates_view_install(
check_mpileaks_and_deps_in_view(view_dir)
def test_env_view_fails(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_view_fails(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
# We currently ignore file-file conflicts for the prefix merge,
# so in principle there will be no errors in this test. But
# the .spack metadata dir is handled separately and is more strict.
@@ -1226,9 +1205,7 @@ def test_env_view_fails(
install("--fake")
def test_env_view_fails_dir_file(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
# This environment view fails to be created because a file
# and a dir are in the same path. Test that it mentions the problematic path.
view_dir = tmpdir.join("view")
@@ -1243,7 +1220,7 @@ def test_env_view_fails_dir_file(
def test_env_view_succeeds_symlinked_dir_file(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery
):
# A symlinked dir and an ordinary dir merge happily
view_dir = tmpdir.join("view")
@@ -1257,9 +1234,7 @@ def test_env_view_succeeds_symlinked_dir_file(
assert os.path.exists(os.path.join(x_dir, "file_in_symlinked_dir"))
def test_env_without_view_install(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
# Test enabling a view after installing specs
env("create", "--without-view", "test")
@@ -1280,9 +1255,7 @@ def test_env_without_view_install(
check_mpileaks_and_deps_in_view(view_dir)
def test_env_config_view_default(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery):
# This config doesn't mention whether a view is enabled
test_config = """\
env:
@@ -1300,9 +1273,7 @@ def test_env_config_view_default(
assert os.path.isdir(os.path.join(e.default_view.view()._root, ".spack", "mpileaks"))
def test_env_updates_view_install_package(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@@ -1311,9 +1282,7 @@ def test_env_updates_view_install_package(
assert os.path.exists(str(view_dir.join(".spack/mpileaks")))
def test_env_updates_view_add_concretize(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_add_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
install("--fake", "mpileaks")
@@ -1324,24 +1293,22 @@ def test_env_updates_view_add_concretize(
check_mpileaks_and_deps_in_view(view_dir)
def test_env_updates_view_uninstall(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_uninstall(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
print(install("--fake", "--add", "mpileaks"))
install("--fake", "--add", "mpileaks")
check_mpileaks_and_deps_in_view(view_dir)
with ev.read("test"):
print(uninstall("-ay"))
uninstall("-ay")
check_viewdir_removal(view_dir)
def test_env_updates_view_uninstall_referenced_elsewhere(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
tmpdir, mock_stage, mock_fetch, install_mockery
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
@@ -1358,14 +1325,10 @@ def test_env_updates_view_uninstall_referenced_elsewhere(
check_viewdir_removal(view_dir)
def test_env_updates_view_remove_concretize(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_remove_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
install("--fake", "mpileaks")
with ev.read("test"):
add("mpileaks")
concretize()
@@ -1379,9 +1342,7 @@ def test_env_updates_view_remove_concretize(
check_viewdir_removal(view_dir)
def test_env_updates_view_force_remove(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
def test_env_updates_view_force_remove(tmpdir, mock_stage, mock_fetch, install_mockery):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@@ -1928,7 +1889,7 @@ def test_stack_definition_conditional_add_write(tmpdir):
def test_stack_combinatorial_view(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@@ -1963,9 +1924,7 @@ def test_stack_combinatorial_view(
)
def test_stack_view_select(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@@ -2005,9 +1964,7 @@ def test_stack_view_select(
)
def test_stack_view_exclude(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@@ -2048,7 +2005,7 @@ def test_stack_view_exclude(
def test_stack_view_select_and_exclude(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@@ -2090,9 +2047,7 @@ def test_stack_view_select_and_exclude(
)
def test_view_link_roots(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@@ -2136,9 +2091,7 @@ def test_view_link_roots(
)
def test_view_link_run(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
yaml = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
envdir = str(tmpdir)
@@ -2180,13 +2133,7 @@ def test_view_link_run(
@pytest.mark.parametrize("link_type", ["hardlink", "copy", "symlink"])
def test_view_link_type(
link_type,
tmpdir,
mock_fetch,
mock_packages,
mock_archive,
install_mockery,
atomic_update_implementations,
link_type, tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@@ -2216,9 +2163,7 @@ def test_view_link_type(
assert os.path.islink(file_to_test) == (link_type == "symlink")
def test_view_link_all(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
def test_view_link_all(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@@ -2329,7 +2274,7 @@ def test_stack_view_no_activate_without_default(
def test_stack_view_multiple_views(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
):
filename = str(tmpdir.join("spack.yaml"))
default_viewdir = str(tmpdir.join("default-view"))
@@ -2898,14 +2843,10 @@ def test_failed_view_cleanup(tmpdir, mock_stage, mock_fetch, install_mockery):
assert os.path.samefile(resolved_view, view)
def test_environment_view_target_already_exists(
tmpdir, mock_stage, mock_fetch, install_mockery, monkeypatch
):
def test_environment_view_target_already_exists(tmpdir, mock_stage, mock_fetch, install_mockery):
"""When creating a new view, Spack should check whether
the new view dir already exists. If so, it should not be
removed or modified."""
# Only works for symlinked atomic views
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
# Create a new environment
view = str(tmpdir.join("view"))
@@ -3298,103 +3239,3 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
assert dir_name in out
assert env_dir in ev.root(dir_name)
assert os.path.isdir(os.path.join(env_dir, dir_name))
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_mismatch(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
root = str(tmpdir.join("root"))
if update_method == "symlink":
os.makedirs(root)
checker = "cannot be updated with 'symlink' update method"
forceable = True
else:
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
checker = "does not support 'exchange' atomic update method"
forceable = False
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
spec = spack.spec.Spec("libelf").concretized()
install("libelf")
with pytest.raises(RuntimeError, match=checker):
view.regenerate([spec])
if forceable:
view.regenerate([spec], force=True)
assert os.path.exists(view.root)
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_fails(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
root = str(tmpdir.join("root"))
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
spec = spack.spec.Spec("libelf").concretized()
install("libelf")
def raises(*args, **kwargs):
raise OSError
# The python symlink code fails by raising an error
monkeypatch.setattr(fs, "rename", raises)
# The c library call fails by a non-zero return code
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", lambda x, y, z, v, w: 1)
with pytest.raises(OSError):
view.regenerate([spec])
assert not os.path.exists(view.root)
if update_method == "symlink":
link = os.path.join(str(tmpdir), "._root", "._tmp_symlink")
assert not os.path.lexists(link)
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_unnecessary(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
if update_method == "exchange" and not supports_renameat2:
pytest.skip("Testing on an OS that does not support the exchange update method")
root = str(tmpdir.join("root"))
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
libelf = spack.spec.Spec("libelf").concretized()
install("libelf")
libdwarf = spack.spec.Spec("libdwarf").concretized()
install("libdwarf")
# Create a "previous" view
# Wait after each view regeneration to ensure timestamps are different
view.regenerate([libelf])
# monkeypatch so that any attempt to actually regenerate the view fails
def raises(*args, **kwargs):
raise AssertionError
old_view = view.update_method_to_use
monkeypatch.setattr(view, "update_method_to_use", raises)
# regenerating the view is a no-op, so doesn't raise
# will raise if the view isn't identical
view.regenerate([libelf])
with pytest.raises(AssertionError):
view.regenerate([libelf, libdwarf])
# Create another view so there are multiple old views around
monkeypatch.setattr(view, "update_method_to_use", old_view)
view.regenerate([libelf, libdwarf])
# Redo the monkeypatch
monkeypatch.setattr(view, "update_method_to_use", raises)
# no raise for no-op regeneration
# raise when it's not a no-op
view.regenerate([libelf, libdwarf])
with pytest.raises(AssertionError):
view.regenerate([libelf])

View File

@@ -214,17 +214,3 @@ def test_spec_list_constraints_with_structure(
speclist = SpecList("specs", [matrix])
assert len(speclist.specs) == 1
assert libdwarf_spec in speclist.specs[0]
def test_spec_list_broadcast(self, mock_packages):
matrix = {
"matrix": [["mpileaks"], ["^callpath"]],
"broadcast": [["%gcc", "%clang"], ["+debug", "~debug"]],
"exclude": ["+debug%clang"],
}
speclist = SpecList("specs", [matrix])
assert len(speclist) == 3
for spec in speclist:
for node in spec.traverse():
assert node.compiler.name == spec.compiler.name
assert node.variants["debug"].value == spec.variants["debug"].value

View File

@@ -1,75 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import ctypes
import os
from typing import Optional
import llnl.util.filesystem as fs
from llnl.util.symlink import symlink
# Magic numbers from linux headers
RENAME_EXCHANGE = 2
AT_FDCWD = -100
# object for renameat2 not set
# We use None for not found and notset for not set so that boolean checks work
# properly in client code
notset = object()
_renameat2 = notset
def set_renameat2():
libc: Optional[ctypes.CDLL] = None
try:
# CDLL(None) returns the python process
# python links against libc, so we can treat this as a libc handle
# we could also use CDLL("libc.so.6") but this is (irrelevantly) more future proof
libc = ctypes.CDLL(None)
except (OSError, TypeError):
# OSError if the call fails,
# TypeError on Windows
return None
return getattr(libc, "renameat2", None)
def renameat2():
global _renameat2
if _renameat2 is notset:
_renameat2 = set_renameat2()
return _renameat2
def atomic_update_renameat2(src, dest):
# Ensure a directory that is a symlink will not be read as symlink in libc
src = src.rstrip(os.path.sep)
dest = dest.rstrip(os.path.sep)
dest_exists = os.path.lexists(dest)
if not dest_exists:
fs.mkdirp(dest)
try:
rc = renameat2()(AT_FDCWD, src.encode(), AT_FDCWD, dest.encode(), RENAME_EXCHANGE)
if rc:
raise OSError(f"renameat2 failed to exchange {src} and {dest}")
except OSError:
if not dest_exists:
os.rmdir(dest)
raise
def atomic_update_symlink(src, dest):
# Create temporary symlink to point to src
tmp_symlink_name = os.path.join(os.path.dirname(dest), "._tmp_symlink")
if os.path.exists(tmp_symlink_name):
os.unlink(tmp_symlink_name)
symlink(src, tmp_symlink_name)
# atomically mv the symlink to destpath (still points to srcpath)
try:
fs.rename(tmp_symlink_name, dest)
except OSError:
os.unlink(tmp_symlink_name)
raise

View File

@@ -138,7 +138,7 @@ def dump_environment(path, environment=None):
use_env = environment or os.environ
hidden_vars = set(["PS1", "PWD", "OLDPWD", "TERM_SESSION_ID"])
fd = os.open(path, os.O_WRONLY | os.O_CREAT, 0o600)
fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)
with os.fdopen(fd, "w") as env_file:
for var, val in sorted(use_env.items()):
env_file.write(

View File

@@ -1336,6 +1336,10 @@ def lookup_ref(self, ref):
# won't properly update the local rev-list)
self.fetcher.git("fetch", "--tags", output=os.devnull, error=os.devnull)
# We need to do an attempt at fetching the commit in order to
# be sure to get it in case it comes from a PR in a fork.
self.fetcher.git("fetch", "origin", "%s" % ref, output=os.devnull, error=os.devnull)
# Ensure ref is a commit object known to git
# Note the brackets are literals, the ref replaces the format string
try:

View File

@@ -977,7 +977,7 @@ _spack_env_loads() {
_spack_env_view() {
if $list_options
then
SPACK_COMPREPLY="-h --help -f --force"
SPACK_COMPREPLY="-h --help"
else
SPACK_COMPREPLY=""
fi

View File

@@ -723,17 +723,14 @@ def setup_dependent_package(self, module, dependent_spec):
# Disable find package's config mode for versions of Boost that
# didn't provide it. See https://github.com/spack/spack/issues/20169
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
#is_cmake = isinstance(dependent_spec.package, CMakePackage)
#if self.spec.satisfies("boost@:1.69.0") and is_cmake:
# args_fn = type(dependent_spec.package).cmake_args
if self.spec.satisfies("boost@:1.69.0") and dependent_spec.satisfies("build_system=cmake"):
args_fn = type(dependent_spec.package.builder).cmake_args
is_cmake = isinstance(dependent_spec.package, CMakePackage)
if self.spec.satisfies("boost@:1.69.0") and is_cmake:
args_fn = type(dependent_spec.package).cmake_args
def _cmake_args(self):
return ["-DBoost_NO_BOOST_CMAKE=ON"] + args_fn(self)
#type(dependent_spec.package).cmake_args = _cmake_args
type(dependent_spec.package.builder).cmake_args = _cmake_args
type(dependent_spec.package).cmake_args = _cmake_args
def setup_dependent_build_environment(self, env, dependent_spec):
if "+context" in self.spec and "context-impl" in self.spec.variants:

View File

@@ -17,12 +17,38 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
maintainers("sethrj")
version("0.2.0", sha256="12af28fda0e482a9eba89781b4ead445cf6f170bc1b8d88cc814e49b1ec09e9f")
version("0.1.4", sha256="ea82a03fc750a2a805f87afd9ac944109dd7537edb5c0c370f93d332d4cd47db")
version("0.1.3", sha256="992c49a48adba884fe3933c9624da5bf480ef0694809430ae98903f2c28cc881")
version("0.1.2", sha256="d123ea2e34267adba387d46bae8c9a1146a2e047f87f2ea5f823878c1684678d")
version("0.1.1", sha256="a1d58e29226e89a2330d69c40049d61e7c885cf991824e60ff8c9ccc95fc5ec6")
version("0.1.0", sha256="46692977b9b31d73662252cc122d7f016f94139475788bca7fdcb97279b93af8")
version("0.2.1", sha256="b3717b43f70dd0da848139da4171ca7a887bb6777908845b6d953d47b1f4db41")
version(
"0.2.0",
sha256="12af28fda0e482a9eba89781b4ead445cf6f170bc1b8d88cc814e49b1ec09e9f",
deprecated=True,
)
version("0.1.5", sha256="5e63b9ce7fcfe34a8938565b84453bce51fa6639d1ede13bb59d41de6431cef4")
version(
"0.1.4",
sha256="ea82a03fc750a2a805f87afd9ac944109dd7537edb5c0c370f93d332d4cd47db",
deprecated=True,
)
version(
"0.1.3",
sha256="992c49a48adba884fe3933c9624da5bf480ef0694809430ae98903f2c28cc881",
deprecated=True,
)
version(
"0.1.2",
sha256="d123ea2e34267adba387d46bae8c9a1146a2e047f87f2ea5f823878c1684678d",
deprecated=True,
)
version(
"0.1.1",
sha256="a1d58e29226e89a2330d69c40049d61e7c885cf991824e60ff8c9ccc95fc5ec6",
deprecated=True,
)
version(
"0.1.0",
sha256="46692977b9b31d73662252cc122d7f016f94139475788bca7fdcb97279b93af8",
deprecated=True,
)
_cxxstd_values = ("14", "17")
@@ -49,7 +75,8 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
depends_on("cmake@3.22:", type="build", when="+rocm")
depends_on("nlohmann-json")
depends_on("geant4@10.6:", when="+geant4")
depends_on("geant4@10.7:11.0", when="@:0.2.0 +geant4")
depends_on("geant4@10.6:11.0", when="@0.2.1: +geant4")
depends_on("hepmc3", when="+hepmc3")
depends_on("root", when="+root")
depends_on("swig", when="+swig")
@@ -68,6 +95,7 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
depends_on("vecgeom +gdml@1.1.17:", when="+vecgeom")
depends_on("vecgeom +cuda", when="+vecgeom +cuda")
conflicts("cxxstd=14", when="@0.3:")
conflicts("+rocm", when="+cuda", msg="AMD and NVIDIA accelerators are incompatible")
conflicts("+rocm", when="+vecgeom", msg="HIP support is only available with ORANGE")
conflicts("^vecgeom+shared@1.2.0", when="+vecgeom +cuda")

View File

@@ -53,7 +53,6 @@ class Dyninst(CMakePackage):
depends_on("boost@1.61.0:" + boost_libs, when="@10.1.0:")
depends_on("boost@1.61.0:1.69" + boost_libs, when="@:10.0")
depends_on("boost@1.67.0:" + boost_libs, when="@11.0.0:")
depends_on("boost@1.70.0:" + boost_libs, when="@12:")
depends_on("libiberty+pic")
@@ -117,7 +116,7 @@ def cmake_args(self):
"-DBoost_ROOT_DIR=%s" % spec["boost"].prefix,
"-DElfUtils_ROOT_DIR=%s" % spec["elf"].prefix,
"-DLibIberty_ROOT_DIR=%s" % spec["libiberty"].prefix,
"-DTBB_ROOT_DIR=%s" % spec["tbb"].prefix.tbb,
"-DTBB_ROOT_DIR=%s" % spec["tbb"].prefix,
self.define("LibIberty_LIBRARIES", spec["libiberty"].libs),
]

View File

@@ -199,8 +199,8 @@ def setup_build_environment(self, env):
env.set("ESMF_CXX", spec["mpi"].mpicxx)
env.set("ESMF_F90", spec["mpi"].mpifc)
else:
os.environ["ESMF_CXX"] = os.environ["CXX"]
os.environ["ESMF_F90"] = os.environ["FC"]
env.set("ESMF_CXX", env["CXX"])
env.set("ESMF_F90", env["FC"])
# This environment variable controls the build option.
if "+debug" in spec:

View File

@@ -116,6 +116,7 @@ def setup_build_environment(self, env):
# internal Spack wrappers and fail.
env.set("CC_FOR_TARGET", self.compiler.cc)
env.set("CXX_FOR_TARGET", self.compiler.cxx)
env.set("GOMAXPROCS", make_jobs)
def setup_dependent_package(self, module, dependent_spec):
"""Called before go modules' install() methods.

View File

@@ -14,6 +14,7 @@ class Hdf5VolCache(CMakePackage):
maintainers("hyoklee", "lrknox")
version("default", branch="develop")
version("v1.1", tag="v1.1")
version("v1.0", tag="v1.0")
depends_on("hdf5-vol-async")

View File

@@ -0,0 +1,91 @@
From 411d62544717873432c49ef45c7cb99cc5de2fb8 Mon Sep 17 00:00:00 2001
From: "Mark W. Krentel" <krentel@rice.edu>
Date: Thu, 15 Dec 2022 16:43:43 -0600
Subject: [PATCH] Add a temporary hack to allow both ROCM 5.2/5.3 to build
cleanly.
There were some corner cases (build 5.3 pieces from spack and feed
into autotools) that didn't work. After the next release, I will want
to rework ROCM configure more extensively.
---
configure | 21 +++++++++++++--------
configure.ac | 17 +++++++++++------
2 files changed, 24 insertions(+), 14 deletions(-)
diff --git a/configure b/configure
index 1760e678e8..814376b3bd 100755
--- a/configure
+++ b/configure
@@ -23891,10 +23891,13 @@ $as_echo "$as_me: found $ROCM/rocprofiler/lib/librocprofiler64.so" >&6;}
fi
# HSA
- if test -f "$ROCM/include/hsa/hsa.h" ; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: found $ROCM/include/hsa/hsa.h" >&5
-$as_echo "$as_me: found $ROCM/include/hsa/hsa.h" >&6;}
- ROCM_HSA_IFLAGS="-I$ROCM/include/hsa"
+ # FIXME: as of rocm 5.2/5.3, this was not fully switched over,
+ # so temporarily use both paths.
+ if test -f "$ROCM/include/hsa/hsa.h" || test -f "$ROCM/include/hsa.h"
+ then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: found $ROCM: hsa.h" >&5
+$as_echo "$as_me: found $ROCM: hsa.h" >&6;}
+ ROCM_HSA_IFLAGS="-I$ROCM/include -I$ROCM/include/hsa"
ROCM_HSA_INC_MESG="$ROCM/hsa"
found=yes
fi
@@ -24020,10 +24023,12 @@ case "$ROCM_HSA" in
require_rocm=yes
found=no
- if test -f "$ROCM_HSA/include/hsa/hsa.h" ; then
- { $as_echo "$as_me:${as_lineno-$LINENO}: found $ROCM_HSA/include/hsa/hsa.h" >&5
-$as_echo "$as_me: found $ROCM_HSA/include/hsa/hsa.h" >&6;}
- ROCM_HSA_IFLAGS="-I$ROCM_HSA/include/hsa"
+ # FIXME: again, temporarily use both paths
+ if test -f "$ROCM_HSA/include/hsa/hsa.h" || test -f "$ROCM_HSA/include/hsa.h"
+ then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: found $ROCM_HSA: hsa.h" >&5
+$as_echo "$as_me: found $ROCM_HSA: hsa.h" >&6;}
+ ROCM_HSA_IFLAGS="-I$ROCM_HSA/include -I$ROCM_HSA/include/hsa"
ROCM_HSA_INC_MESG="$ROCM_HSA"
found=yes
fi
diff --git a/configure.ac b/configure.ac
index a14b15835f..9d5ed46134 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4885,9 +4885,12 @@ case "$ROCM" in
fi
# HSA
- if test -f "$ROCM/include/hsa/hsa.h" ; then
- AC_MSG_NOTICE([found $ROCM/include/hsa/hsa.h])
- ROCM_HSA_IFLAGS="-I$ROCM/include/hsa"
+ # FIXME: as of rocm 5.2/5.3, this was not fully switched over,
+ # so temporarily use both paths.
+ if test -f "$ROCM/include/hsa/hsa.h" || test -f "$ROCM/include/hsa.h"
+ then
+ AC_MSG_NOTICE([found $ROCM: hsa.h])
+ ROCM_HSA_IFLAGS="-I$ROCM/include -I$ROCM/include/hsa"
ROCM_HSA_INC_MESG="$ROCM/hsa"
found=yes
fi
@@ -5002,9 +5005,11 @@ case "$ROCM_HSA" in
require_rocm=yes
found=no
- if test -f "$ROCM_HSA/include/hsa/hsa.h" ; then
- AC_MSG_NOTICE([found $ROCM_HSA/include/hsa/hsa.h])
- ROCM_HSA_IFLAGS="-I$ROCM_HSA/include/hsa"
+ # FIXME: again, temporarily use both paths
+ if test -f "$ROCM_HSA/include/hsa/hsa.h" || test -f "$ROCM_HSA/include/hsa.h"
+ then
+ AC_MSG_NOTICE([found $ROCM_HSA: hsa.h])
+ ROCM_HSA_IFLAGS="-I$ROCM_HSA/include -I$ROCM_HSA/include/hsa"
ROCM_HSA_INC_MESG="$ROCM_HSA"
found=yes
fi
--
GitLab

View File

@@ -0,0 +1,130 @@
From 511afd95b01d743edc5940c84e0079f462b2c23e Mon Sep 17 00:00:00 2001
From: "Mark W. Krentel" <krentel@rice.edu>
Date: Tue, 18 May 2021 14:54:41 -0500
Subject: [PATCH] Cleanup some usage for gcc/g++ 11.x (#413)
1. Change epsilon to hpc_epsilon in prof/Metric header files. This
conflicted with using epsilon in some STL template libraries.
2. Add const to some comparison operators that are used in some STL
maps.
---
src/lib/banal/Struct-Inline.hpp | 6 +++---
src/lib/prof/Metric-AExpr.cpp | 4 ++--
src/lib/prof/Metric-AExpr.hpp | 2 +-
src/lib/prof/Metric-AExprIncr.hpp | 6 +++---
src/lib/support/StringTable.hpp | 2 +-
5 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/src/lib/banal/Struct-Inline.hpp b/src/lib/banal/Struct-Inline.hpp
index ffb93355fd..0099ad112d 100644
--- a/src/lib/banal/Struct-Inline.hpp
+++ b/src/lib/banal/Struct-Inline.hpp
@@ -150,14 +150,14 @@ public:
pretty_index = strTab.str2index(node.getPrettyName());
}
- bool operator == (const FLPIndex rhs)
+ bool operator == (const FLPIndex rhs) const
{
return file_index == rhs.file_index
&& line_num == rhs.line_num
&& pretty_index == rhs.pretty_index;
}
- bool operator != (const FLPIndex rhs)
+ bool operator != (const FLPIndex rhs) const
{
return ! (*this == rhs);
}
@@ -167,7 +167,7 @@ public:
// Compare (file, line, proc) indices lexigraphically.
class FLPCompare {
public:
- bool operator() (const FLPIndex t1, const FLPIndex t2)
+ bool operator() (const FLPIndex t1, const FLPIndex t2) const
{
if (t1.file_index < t2.file_index) { return true; }
if (t1.file_index > t2.file_index) { return false; }
diff --git a/src/lib/prof/Metric-AExpr.cpp b/src/lib/prof/Metric-AExpr.cpp
index 2ce43e6d39..5b32ff67d1 100644
--- a/src/lib/prof/Metric-AExpr.cpp
+++ b/src/lib/prof/Metric-AExpr.cpp
@@ -483,7 +483,7 @@ CoefVar::eval(const Metric::IData& mdata) const
double sdev = sqrt(v_m.first); // always non-negative
double mean = v_m.second;
double z = 0.0;
- if (mean > epsilon) {
+ if (mean > hpc_epsilon) {
z = sdev / mean;
}
@@ -522,7 +522,7 @@ RStdDev::eval(const Metric::IData& mdata) const
double sdev = sqrt(v_m.first); // always non-negative
double mean = v_m.second;
double z = 0.0;
- if (mean > epsilon) {
+ if (mean > hpc_epsilon) {
z = (sdev / mean) * 100;
}
diff --git a/src/lib/prof/Metric-AExpr.hpp b/src/lib/prof/Metric-AExpr.hpp
index 56359cc9df..d75189f763 100644
--- a/src/lib/prof/Metric-AExpr.hpp
+++ b/src/lib/prof/Metric-AExpr.hpp
@@ -97,7 +97,7 @@
//****************************************************************************
-#define epsilon (0.000001)
+#define hpc_epsilon (0.000001)
namespace Prof {
diff --git a/src/lib/prof/Metric-AExprIncr.hpp b/src/lib/prof/Metric-AExprIncr.hpp
index f1b38d7f74..d0c0feb7e6 100644
--- a/src/lib/prof/Metric-AExprIncr.hpp
+++ b/src/lib/prof/Metric-AExprIncr.hpp
@@ -97,7 +97,7 @@
//****************************************************************************
-#define epsilon (0.000001)
+#define hpc_epsilon (0.000001)
namespace Prof {
@@ -841,7 +841,7 @@ public:
double sdev = finalizeStdDev(mdata);
double mean = accumVar(1, mdata);
double z = 0.0;
- if (mean > epsilon) {
+ if (mean > hpc_epsilon) {
z = sdev / mean;
}
accumVar(0, mdata) = z;
@@ -927,7 +927,7 @@ public:
double sdev = finalizeStdDev(mdata);
double mean = accumVar(1, mdata);
double z = 0.0;
- if (mean > epsilon) {
+ if (mean > hpc_epsilon) {
z = (sdev / mean) * 100;
}
accumVar(0, mdata) = z;
diff --git a/src/lib/support/StringTable.hpp b/src/lib/support/StringTable.hpp
index 9930bc5649..36ce5b7fa9 100644
--- a/src/lib/support/StringTable.hpp
+++ b/src/lib/support/StringTable.hpp
@@ -75,7 +75,7 @@ namespace HPC {
// compare the strings, not the pointers
class StringCompare {
public:
- bool operator() (const std::string *s1, const std::string *s2)
+ bool operator() (const std::string *s1, const std::string *s2) const
{
return *s1 < *s2;
}
--
GitLab

View File

@@ -162,17 +162,11 @@ class Hpctoolkit(AutotoolsPackage):
# Fix the build for old revs with gcc 10.x.
patch("gcc10-enum.patch", when="@2020.01.01:2020.08 %gcc@10.0:")
patch(
"https://gitlab.com/hpctoolkit/hpctoolkit/-/commit/511afd95b01d743edc5940c84e0079f462b2c23e.patch",
sha256="8da18df88a80847c092da8d0892de51ea2bf2523124148b6305ab8717707d897",
when="@2019.08.01:2021.03 %gcc@11.0:",
)
patch("511afd95b01d743edc5940c84e0079f462b2c23e.patch", when="@2019.08.01:2021.03 %gcc@11.0:")
# Update configure for rocm 5.3.0
patch(
"https://gitlab.com/hpctoolkit/hpctoolkit/-/commit/411d62544717873432c49ef45c7cb99cc5de2fb8.patch",
sha256="484045891a665cdba3b0f141540c89f0d691ed32c5912ef62a93670d44c2786c",
when="@2022.04:2022.10 +rocm ^hip@5.3.0:",
"411d62544717873432c49ef45c7cb99cc5de2fb8.patch", when="@2022.04:2022.10 +rocm ^hip@5.3.0:"
)
# Change python to python3 for some old revs that use a script

View File

@@ -740,8 +740,7 @@ def post_install(self):
install_tree("clang/bindings/python", python_platlib)
with working_dir(self.build_directory):
if not os.path.exists(join_path(self.prefix, 'libexec', 'llvm')):
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
def llvm_config(self, *args, **kwargs):
lc = Executable(self.prefix.bin.join("llvm-config"))

View File

@@ -124,10 +124,7 @@ class NetcdfC(AutotoolsPackage):
# Starting version 4.4.0, it became possible to disable parallel I/O even
# if HDF5 supports it. For previous versions of the library we need
# HDF5 without mpi support to disable parallel I/O:
#depends_on("hdf5~mpi", when="@:4.3~mpi")
# While it may be possible to do this, it messes with concretizing large environments with
# both ~mpi and +mpi builds of hdf5
depends_on("hdf5~mpi", when="~mpi")
depends_on("hdf5~mpi", when="@:4.3~mpi")
# We need HDF5 with mpi support to enable parallel I/O.
depends_on("hdf5+mpi", when="+mpi")

View File

@@ -31,6 +31,8 @@ class Ompss2(Package):
depends_on("python", type="build")
depends_on("cmake", type="build")
depends_on("extrae", when="+extrae")
depends_on("boost@1.59.0:")
depends_on("numactl")
resource(
name="jemalloc",
@@ -105,6 +107,8 @@ def install_nanos6(self, spec, prefix):
"--prefix=%s" % prefix,
"--with-jemalloc=%s" % prefix,
"--with-hwloc=%s" % spec["hwloc"].prefix,
"--with-boost=%s" % spec["boost"].prefix,
"--with-libnuma=%s" % spec["numactl"].prefix,
"--disable-stats-instrumentation",
"--disable-verbose-instrumentation",
"--disable-lint-instrumentation",

View File

@@ -42,10 +42,13 @@ class Openmpi(AutotoolsPackage, CudaPackage):
# Current
version(
"4.1.4", sha256="92912e175fd1234368c8730c03f4996fe5942e7479bb1d10059405e7f2b3930d"
) # libmpi.so.40.30.4
"4.1.5", sha256="a640986bc257389dd379886fdae6264c8cfa56bc98b71ce3ae3dfbd8ce61dbe3"
) # libmpi.so.40.30.5
# Still supported
version(
"4.1.4", sha256="92912e175fd1234368c8730c03f4996fe5942e7479bb1d10059405e7f2b3930d"
) # libmpi.so.40.30.4
version(
"4.1.3", sha256="3d81d04c54efb55d3871a465ffb098d8d72c1f48ff1cbaf2580eb058567c0a3b"
) # libmpi.so.40.30.3

View File

@@ -0,0 +1,21 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyNbqa(PythonPackage):
"""Run any standard Python code quality tool on a Jupyter Notebook."""
homepage = "https://github.com/nbQA-dev/nbQA"
pypi = "nbqa/nbqa-1.6.3.tar.gz"
version("1.6.3", sha256="5394a29fc6d27b9a950c0a36d2d9de25de980be9acfe2a3f3aea0d27b5f7fec1")
depends_on("python@3.8:", type=("build", "run"))
depends_on("py-setuptools", type="build")
depends_on("py-ipython@7.8:", type=("build", "run"))
depends_on("py-tokenize-rt@3.2:", type=("build", "run"))
depends_on("py-tomli", type=("build", "run"))

View File

@@ -18,7 +18,7 @@ class PyWarpx(PythonPackage):
"""
homepage = "https://ecp-warpx.github.io"
url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/23.01.tar.gz"
url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/23.02.tar.gz"
git = "https://github.com/ECP-WarpX/WarpX.git"
maintainers("ax3l", "dpgrote", "RemiLehe")
@@ -27,6 +27,7 @@ class PyWarpx(PythonPackage):
# NOTE: if you update the versions here, also see warpx
version("develop", branch="development")
version("23.02", sha256="a6c63ebc38cbd224422259a814be501ac79a3b734dab7f59500b6957cddaaac1")
version("23.01", sha256="e853d01c20ea00c8ddedfa82a31a11d9d91a7f418d37d7f064cf8a241ea4da0c")
version("22.12", sha256="96019902cd6ea444a1ae515e8853048e9074822c168021e4ec1687adc72ef062")
version("22.11", sha256="528f65958f2f9e60a094e54eede698e871ccefc89fa103fe2a6f22e4a059515e")
@@ -53,6 +54,7 @@ class PyWarpx(PythonPackage):
variant("mpi", default=True, description="Enable MPI support")
for v in [
"23.02",
"23.01",
"22.12",
"22.11",

View File

@@ -0,0 +1,27 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RArrangements(RPackage):
"""Fast Generators and Iterators for Permutations, Combinations, Integer
Partitions and Compositions.
Fast generators and iterators for permutations, combinations, integer
partitions and compositions. The arrangements are in lexicographical order
and generated iteratively in a memory efficient manner. It has been
demonstrated that 'arrangements' outperforms most existing packages of
similar kind. Benchmarks could be found at
<https://randy3k.github.io/arrangements/articles/benchmark.html>."""
cran = "arrangements"
version("1.1.9", sha256="e9b5dcb185ec9b28201b196384b04a8d5a15f4ddb9e0b0b2a0c718635ff7345b")
depends_on("r@3.4.0:", type=("build", "run"))
depends_on("r-gmp", type=("build", "run"))
depends_on("r-r6", type=("build", "run"))
depends_on("gmp@4.2.3:")

View File

@@ -0,0 +1,28 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RGoogleauthr(RPackage):
"""Authenticate and Create Google APIs.
Create R functions that interact with OAuth2 Google APIs
<https://developers.google.com/apis-explorer/> easily, with auto-refresh
and Shiny compatibility."""
cran = "googleAuthR"
version("2.0.0", sha256="ba504baf3bde2e1b3e988bee7602df5765cc6ca542cf0ab76a782c4e60966feb")
depends_on("r@3.3.0:", type=("build", "run"))
depends_on("r-assertthat", type=("build", "run"))
depends_on("r-cli", type=("build", "run"))
depends_on("r-digest", type=("build", "run"))
depends_on("r-gargle@1.2.0:", type=("build", "run"))
depends_on("r-httr@1.4.0:", type=("build", "run"))
depends_on("r-jsonlite@1.6:", type=("build", "run"))
depends_on("r-memoise@1.1.0:", type=("build", "run"))
depends_on("r-rlang", type=("build", "run"))

View File

@@ -0,0 +1,26 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RIeugwasr(RPackage):
"""R Interface to the OpenGWAS Database API.
R interface to the OpenGWAS database API. Includes a wrapper
to make generic calls to the API, plus convenience functions for
specific queries."""
homepage = "https://github.com/MRCIEU/ieugwasr"
url = "https://github.com/MRCIEU/ieugwasr/archive/refs/tags/0.1.5.tar.gz"
version("0.1.5", sha256="8d900d5a780f23836c80191f9635fbf48a0ca94f828452948c0f445e3217f422")
depends_on("r@3.6.0:", type=("build", "run"))
depends_on("r-magrittr", type=("build", "run"))
depends_on("r-googleauthr", type=("build", "run"))
depends_on("r-dplyr", type=("build", "run"))
depends_on("r-httr", type=("build", "run"))
depends_on("r-jsonlite", type=("build", "run"))

View File

@@ -0,0 +1,29 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RIterpc(RPackage):
"""Efficient Iterator for Permutations and Combinations.
Iterator for generating permutations and combinations. They can be either
drawn with or without replacement, or with distinct/ non-distinct items
(multiset). The generated sequences are in lexicographical order
(dictionary order). The algorithms to generate permutations and
combinations are memory efficient. These iterative algorithms enable users
to process all sequences without putting all results in the memory at the
same time. The algorithms are written in C/C++ for faster performance.
Note: 'iterpc' is no longer being maintained. Users are recommended to
switch to 'arrangements'."""
cran = "iterpc"
version("0.4.2", sha256="38bd464042a27536f676e889263eb2c257a431b59083f58cb54473f42ba2071b")
depends_on("r@3.0.0:", type=("build", "run"))
depends_on("r-iterators", type=("build", "run"))
depends_on("r-gmp@0.5-12:", type=("build", "run"))
depends_on("r-arrangements@1.0.0:", type=("build", "run"))

View File

@@ -0,0 +1,32 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMendelianrandomization(RPackage):
"""Mendelian Randomization Package.
Encodes several methods for performing Mendelian randomization analyses
with summarized data. Summarized data on genetic associations with the
exposure and with the outcome can be obtained from large consortia. These
data can be used for obtaining causal estimates using instrumental variable
methods."""
cran = "MendelianRandomization"
version("0.7.0", sha256="cad7cc1b6964fc7d299864378694c5fd947caa83796a1958e581299796b854c7")
depends_on("r@3.0.1:", type=("build", "run"))
depends_on("r-knitr", type=("build", "run"))
depends_on("r-rmarkdown", type=("build", "run"))
depends_on("r-plotly@3.6.0:", type=("build", "run"))
depends_on("r-ggplot2@1.0.1:", type=("build", "run"))
depends_on("r-robustbase@0.92-6:", type=("build", "run"))
depends_on("r-matrix@1.2:", type=("build", "run"))
depends_on("r-iterpc@0.3:", type=("build", "run"))
depends_on("r-quantreg@5.01:", type=("build", "run"))
depends_on("r-rjson", type=("build", "run"))
depends_on("r-glmnet", type=("build", "run"))

View File

@@ -0,0 +1,33 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMeta(RPackage):
"""General Package for Meta-Analysis.
User-friendly general package providing standard methods for meta-analysis
and supporting Schwarzer, Carpenter, and Rücker
<doi:10.1007/978-3-319-21416-0>, "Meta-Analysis with R" (2015): - common
effect and random effects meta-analysis; - several plots (forest, funnel,
Galbraith / radial, L'Abbe, Baujat, bubble); - three-level meta-analysis
model; - generalised linear mixed model; - Hartung-Knapp method for random
effects model; - Kenward-Roger method for random effects model; -
prediction interval; - statistical tests for funnel plot asymmetry; -
trim-and-fill method to evaluate bias in meta-analysis; - meta-regression;
- cumulative meta-analysis and leave-one-out meta-analysis; - import data
from 'RevMan 5'; - produce forest plot summarising several (subgroup)
meta-analyses."""
cran = "meta"
version("6.2-0", sha256="8ec8fb412996bbe17d3ca073f15c191a77bad486b08f39d7b8c2d07360ad5781")
depends_on("r@4.0.0:", type=("build", "run"))
depends_on("r-metafor@3.0-0:", type=("build", "run"))
depends_on("r-lme4", type=("build", "run"))
depends_on("r-compquadform", type=("build", "run"))
depends_on("r-xml2", type=("build", "run"))

View File

@@ -0,0 +1,21 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMetadat(RPackage):
"""Meta-Analysis Datasets.
A collection of meta-analysis datasets for teaching purposes,
illustrating/testing meta-analytic methods, and validating published
analyses."""
cran = "metadat"
version("1.2-0", sha256="f0cce5e30c3d256eaf5a41e4f52ffc7108e195016a4b99409e0ab4c2ef58f5b8")
depends_on("r@4.0.0:", type=("build", "run"))
depends_on("r-mathjaxr", type=("build", "run"))

View File

@@ -0,0 +1,39 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMetafor(RPackage):
"""Meta-Analysis Package for R.
A comprehensive collection of functions for conducting meta-analyses in R.
The package includes functions to calculate various effect sizes or outcome
measures, fit equal-, fixed-, random-, and mixed-effects models to such
data, carry out moderator and meta-regression analyses, and create various
types of meta-analytical plots (e.g., forest, funnel, radial, L'Abbe,
Baujat, bubble, and GOSH plots). For meta-analyses of binomial and
person-time data, the package also provides functions that implement
specialized methods, including the Mantel-Haenszel method, Peto's method,
and a variety of suitable generalized linear (mixed-effects) models (i.e.,
mixed-effects logistic and Poisson regression models). Finally, the package
provides functionality for fitting meta-analytic multivariate/multilevel
models that account for non-independent sampling errors and/or true effects
(e.g., due to the inclusion of multiple treatment studies, multiple
endpoints, or other forms of clustering). Network meta-analyses and
meta-analyses accounting for known correlation structures (e.g., due to
phylogenetic relatedness) can also be conducted. An introduction to the
package can be found in Viechtbauer (2010) <doi:10.18637/jss.v036.i03>."""
cran = "metafor"
version("3.8-1", sha256="d694577f954144d8a5eeab6521fe1c87e68ddf9ecfd7ccc915d01533371b0514")
depends_on("r@4.0.0:", type=("build", "run"))
depends_on("r-matrix", type=("build", "run"))
depends_on("r-metadat", type=("build", "run"))
depends_on("r-nlme", type=("build", "run"))
depends_on("r-mathjaxr", type=("build", "run"))
depends_on("r-pbapply", type=("build", "run"))

View File

@@ -0,0 +1,24 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMrRaps(RPackage):
"""Two Sample Mendelian Randomization using Robust Adjusted Profile Score.
Mendelian randomization is a method of identifying and estimating a
confounded causal effect using genetic instrumental variables. This
packages implements methods for two-sample Mendelian randomization with
summary statistics by using Robust Adjusted Profile Score (RAPS).
References: Qingyuan Zhao, Jingshu Wang, Jack Bowden, Dylan S. Small.
Statistical inference in two-sample summary-data Mendelian randomization
using robust adjusted profile score. <arXiv:1801.09652>."""
cran = "mr.raps"
version("0.2", sha256="c899f6143dac99e1232ff0a8d9f5fe099d4f69960782e6843db5b0d7f4f63b19")
depends_on("r-nortest", type=("build", "run"))

View File

@@ -0,0 +1,19 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMrinstruments(RPackage):
"""Data sources for genetic instruments to be used in MR.
Datasets of eQTLs, GWAS catalogs, etc."""
homepage = "https://github.com/MRCIEU/MRInstruments"
url = "https://github.com/MRCIEU/MRInstruments/archive/refs/tags/0.3.3.tar.gz"
version("0.3.3", sha256="4ddbaf6335133e8f7baef469d6bc1f89212462b9f4062c9e4ddda37b12eb3486")
depends_on("r@2.10:", type=("build", "run"))

View File

@@ -0,0 +1,18 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMrmix(RPackage):
"""Mendelian Randomization Analysis Using Mixture Models (MRMix).
This package gives robust estimation of causal effects by conducting
Mendelian randomization analysis using a mixture model approach."""
homepage = "https://github.com/gqi/MRMix"
git = "https://github.com/gqi/MRMix"
version("0.1.0", commit="56afdb2bc96760842405396f5d3f02e60e305039")

View File

@@ -0,0 +1,21 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RMrpresso(RPackage):
"""Performs the Mendelian Randomization Pleiotropy RESidual Sum and Outlier
(MR-PRESSO) test.
MR-PRESSO (Mendelian Randomization Pleiotropy RESidual Sum and Outlier) is
a framework that allows for the evaluation of pleiotropy in
multi-instrument Mendelian Randomization utilizing genome-wide summary
association statistics."""
homepage = "https://github.com/rondolab/MR-PRESSO"
git = "https://github.com/rondolab/MR-PRESSO"
version("1.0", commit="cece763b47e59763a7916974de43c7cb93843e41")

View File

@@ -0,0 +1,23 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RRadialmr(RPackage):
"""RadialMR.
A package for implementing radial inverse variance weighted and MR-Egger
methods."""
homepage = "https://github.com/WSpiller/RadialMR"
git = "https://github.com/WSpiller/RadialMR"
version("1.0", commit="d63d3fc8270836ab441b9e14a5ba3eeb2795d7cb")
depends_on("r@3.5.0:", type=("build", "run"))
depends_on("r-ggplot2", type=("build", "run"))
depends_on("r-magrittr", type=("build", "run"))
depends_on("r-plotly", type=("build", "run"))

View File

@@ -0,0 +1,51 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RTwosamplemr(RPackage):
"""Two Sample MR functions and interface to MR Base database.
A package for performing Mendelian randomization using GWAS summary data.
It uses the IEU GWAS database to obtain data automatically, and a wide
range of methods to run the analysis. You can use the MR-Base web app to
try out a limited range of the functionality in this package, but for any
serious work we strongly recommend using this R package."""
homepage = "https://mrcieu.github.io/TwoSampleMR/"
url = "https://github.com/MRCIEU/TwoSampleMR/archive/refs/tags/v0.5.6.tar.gz"
version("0.5.6", sha256="c63eb008ab7ed08a6f30ccbf0c299beb31b2f5835e5e2aa1b59c5e4fe284a30c")
depends_on("r@3.6.0:", type=("build", "run"))
depends_on("r-ieugwasr@0.1.5:", type=("build", "run"))
depends_on("r-ggplot2", type=("build", "run"))
depends_on("r-gridextra", type=("build", "run"))
depends_on("r-cowplot", type=("build", "run"))
depends_on("r-plyr", type=("build", "run"))
depends_on("r-reshape2", type=("build", "run"))
depends_on("r-stringr", type=("build", "run"))
depends_on("r-knitr", type=("build", "run"))
depends_on("r-markdown", type=("build", "run"))
depends_on("r-gtable", type=("build", "run"))
depends_on("r-rmarkdown", type=("build", "run"))
depends_on("r-mendelianrandomization", type=("build", "run"))
depends_on("r-dplyr", type=("build", "run"))
depends_on("r-mr-raps", type=("build", "run"))
depends_on("r-psych", type=("build", "run"))
depends_on("r-magrittr", type=("build", "run"))
depends_on("r-car", type=("build", "run"))
depends_on("r-randomforest", type=("build", "run"))
depends_on("r-meta", type=("build", "run"))
depends_on("r-data-table", type=("build", "run"))
depends_on("r-mrpresso", type=("build", "run"))
depends_on("r-mrinstruments", type=("build", "run"))
depends_on("r-radialmr", type=("build", "run"))
depends_on("r-mrmix", type=("build", "run"))
depends_on("r-glmnet", type=("build", "run"))
depends_on("r-lattice", type=("build", "run"))
depends_on("r-pbapply", type=("build", "run"))
depends_on("r-mass", type=("build", "run"))

View File

@@ -17,7 +17,7 @@ class Warpx(CMakePackage):
"""
homepage = "https://ecp-warpx.github.io"
url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/23.01.tar.gz"
url = "https://github.com/ECP-WarpX/WarpX/archive/refs/tags/23.02.tar.gz"
git = "https://github.com/ECP-WarpX/WarpX.git"
maintainers("ax3l", "dpgrote", "MaxThevenet", "RemiLehe")
@@ -25,6 +25,7 @@ class Warpx(CMakePackage):
# NOTE: if you update the versions here, also see py-warpx
version("develop", branch="development")
version("23.02", sha256="a6c63ebc38cbd224422259a814be501ac79a3b734dab7f59500b6957cddaaac1")
version("23.01", sha256="e853d01c20ea00c8ddedfa82a31a11d9d91a7f418d37d7f064cf8a241ea4da0c")
version("22.12", sha256="96019902cd6ea444a1ae515e8853048e9074822c168021e4ec1687adc72ef062")
version("22.11", sha256="528f65958f2f9e60a094e54eede698e871ccefc89fa103fe2a6f22e4a059515e")