Compare commits

...

56 Commits

Author SHA1 Message Date
Gregory
265c479984 test/cmd/env.py: fix check_viewdir_removal 2023-04-05 18:44:07 -07:00
Gregory
686ece1eb7 fixup 2023-04-05 18:44:07 -07:00
Gregory
63c6df152b fixup 2023-04-05 18:44:07 -07:00
Gregory Becker
c9bb1a937b refactor for views to store metadata 2023-04-05 18:44:07 -07:00
Gregory Becker
8d8efa074d debug 2023-04-05 18:44:06 -07:00
becker33
a1b94653b0 [@spackbot] updating style on behalf of becker33 2023-04-05 18:44:06 -07:00
Gregory
e3da8956c8 debug 2023-04-05 18:44:06 -07:00
becker33
adf5a46313 [@spackbot] updating style on behalf of becker33 2023-04-05 18:44:06 -07:00
Gregory
4514b7c737 debugging 2023-04-05 18:44:06 -07:00
Gregory
f1c831cf6e ensure timestamps are different in tests 2023-04-05 18:44:06 -07:00
Gregory Becker
c20a4d6ad1 fix bug in recreating new exchange view; test 2023-04-05 18:44:05 -07:00
Gregory Becker
4fbb23b89e fix relocation broken symlink, discovered in testing 2023-04-03 23:51:08 -07:00
Gregory Becker
7904d2f13a improved way to find oldest file in dir 2023-04-03 23:51:08 -07:00
Gregory Becker
fec8a6dace address review: refactor for clarity 2023-04-03 23:51:08 -07:00
Greg Becker
4bc30a40a3 Apply suggestions from code review
Co-authored-by: Todd Gamblin <tgamblin@llnl.gov>
2023-04-03 23:51:07 -07:00
Greg Becker
2fba1a58ef Update lib/spack/spack/environment/environment.py
Co-authored-by: Todd Gamblin <tgamblin@llnl.gov>
2023-04-03 23:51:07 -07:00
becker33
11139a9e39 [@spackbot] updating style on behalf of becker33 2023-04-03 23:51:07 -07:00
Gregory Becker
8ce1d9c05a test for not writing the same view multiple times 2023-04-03 23:51:06 -07:00
Gregory Becker
f4865334ae avoid writing the same view multiple times in exchange mode 2023-04-03 23:51:06 -07:00
Gregory Becker
c39610e5a0 update test to match new behavior 2023-04-03 23:51:06 -07:00
Gregory Becker
3146dbee05 typo 2023-04-03 23:51:06 -07:00
becker33
e6c34d39ba [@spackbot] updating style on behalf of becker33 2023-04-03 23:51:05 -07:00
Gregory Becker
9a1e9574aa remove auto replace with [exchange, symlink] 2023-04-03 23:51:05 -07:00
Gregory Becker
b97107af54 increase coverage 2023-04-03 23:51:05 -07:00
Gregory Becker
2e96f86df2 update completion 2023-04-03 23:51:05 -07:00
Gregory Becker
b48dfe6589 style 2023-04-03 23:51:04 -07:00
Gregory Becker
b8e58e9bd8 tinker with error message 2023-04-03 23:51:04 -07:00
Gregory Becker
bd305d48a9 enable spack env view regenerate -f 2023-04-03 23:50:53 -07:00
Gregory Becker
ae3a5f3848 additional testing 2023-04-03 23:49:58 -07:00
Gregory Becker
24dad071d4 remove vestigial code 2023-04-03 23:49:57 -07:00
Gregory Becker
5fc3dd7a26 Revert "test"
This reverts commit 23e65646d5.
2023-04-03 23:49:57 -07:00
Gregory Becker
14f0ad4dd3 indentation typo 2023-04-03 23:49:57 -07:00
Gregory Becker
e156679b5a test 2023-04-03 23:49:57 -07:00
Gregory Becker
6f959badcb refactor for review 2023-04-03 23:49:56 -07:00
Gregory Becker
6bad473e4e ensure renameat2 checks only run when required 2023-04-03 23:49:56 -07:00
Gregory Becker
9baea936f2 fix CDLL failure on windows 2023-04-03 23:49:56 -07:00
Gregory Becker
a30b07dd36 address review comments 2023-04-03 23:49:56 -07:00
Gregory Becker
553a9036eb typo 2023-04-03 23:49:55 -07:00
Gregory Becker
d9637751cb flake 2023-04-03 23:49:55 -07:00
Gregory Becker
f6c895ce10 more robust switching between atomic update methods 2023-04-03 23:49:55 -07:00
Gregory
17381f0281 fixup 2023-04-03 23:49:55 -07:00
Gregory
0d4acde1bf clean up code/address review 2023-04-03 23:49:54 -07:00
Gregory
d3000d540e remove vestigial code 2023-04-03 23:49:54 -07:00
Gregory
9df9bfa4b7 attempt at fixing typing info 2023-04-03 23:49:54 -07:00
becker33
5af37b7059 [@spackbot] updating style on behalf of becker33 2023-04-03 23:49:54 -07:00
Gregory
d31efd67ff flake 2023-04-03 23:49:53 -07:00
Gregory
db3ee62dc0 use renameat2 for atomic view updates. 2023-04-03 23:49:53 -07:00
Nicholas Cameron Sly
cd0121fd20 Include fix for boost/dyninst. Fix syntax in esmf. Fix file existence check in llvm. Fix hdf5 mpi dependency in netcdf-c. 2023-03-31 14:39:41 -07:00
Nicholas Cameron Sly
9150f47af0 Merge branch 'features/matrix-broadcast' of https://github.com/spack/spack into 2022-12-06 2023-03-24 13:13:03 -07:00
Gregory Becker
5a23819165 style 2023-03-23 12:48:02 -07:00
Gregory Becker
9df3b57f1f update broadcast test to test excludes as well 2023-03-23 11:24:47 -07:00
Gregory Becker
788ad561bd fix excludes with bcast 2023-03-23 11:24:06 -07:00
becker33
c8c025215d [@spackbot] updating style on behalf of becker33 2023-03-13 17:14:28 +00:00
Gregory Becker
1df6a3196a matrix broadcast: more robust test 2023-02-23 15:47:11 -08:00
Gregory Becker
7014eb3236 matrices: broadcast key combinatorially applies to all nodes in matrix 2023-02-23 15:40:34 -08:00
Nicholas Cameron Sly
8042184842 Fix dyninst tbb dependency path. Probably too specific. 2022-12-12 12:09:03 -08:00
14 changed files with 584 additions and 114 deletions

View File

@ -420,6 +420,13 @@ def actions():
#
def env_view_setup_parser(subparser):
"""manage a view associated with the environment"""
subparser.add_argument(
"-f",
"--force",
action="store_true",
dest="force",
help="regenerate even if regeneration cannot be done atomically",
)
subparser.add_argument(
"action", choices=ViewAction.actions(), help="action to take for the environment's view"
)
@ -433,7 +440,7 @@ def env_view(args):
if env:
if args.action == ViewAction.regenerate:
env.regenerate_views()
env.regenerate_views(force=args.force)
elif args.action == ViewAction.enable:
if args.view_path:
view_path = args.view_path

View File

@ -38,6 +38,7 @@
import spack.subprocess_context
import spack.traverse
import spack.user_environment as uenv
import spack.util.atomic_update
import spack.util.cpus
import spack.util.environment
import spack.util.hash
@ -138,6 +139,8 @@ def default_manifest_yaml():
default_view_name = "default"
# Default behavior to link all packages into views (vs. only root packages)
default_view_link = "all"
# Default behavior to use exchange if possible and otherwise symlink for view updates
default_update_method = ["exchange", "symlink"]
def installed_specs():
@ -406,6 +409,7 @@ def __init__(
exclude=[],
link=default_view_link,
link_type="symlink",
update_method=default_update_method,
):
self.base = base_path
self.raw_root = root
@ -415,6 +419,7 @@ def __init__(
self.exclude = exclude
self.link_type = view_func_parser(link_type)
self.link = link
self.update_method = update_method
def select_fn(self, spec):
return any(spec.satisfies(s) for s in self.select)
@ -431,6 +436,7 @@ def __eq__(self, other):
self.exclude == other.exclude,
self.link == other.link,
self.link_type == other.link_type,
self.update_method == other.update_method,
]
)
@ -451,6 +457,8 @@ def to_dict(self):
ret["link_type"] = inverse_view_func_parser(self.link_type)
if self.link != default_view_link:
ret["link"] = self.link
if self.update_method != default_update_method:
ret["update_method"] = self.update_method
return ret
@staticmethod
@ -463,13 +471,39 @@ def from_dict(base_path, d):
d.get("exclude", []),
d.get("link", default_view_link),
d.get("link_type", "symlink"),
d.get("update_method", default_update_method),
)
@property
def _current_root(self):
"""
Return the directory in which the view has been constructed.
Query the view if it stores metadata on where it was constructed.
If the view us using symlinks for atomic updates, self.root is a link and we read the link
to find the real root directory.
If self.root is not a view with metadata and is not a link, the view has not been
constructed.
"""
# Get the view as self.root even if it is actually a symlink
# We will not operate on this view object, only query metadata
# We don't want to pass a created_path to this view, so that we can read where it says it
# was created.
if not os.path.exists(self.root):
return None
view = self.view()
orig_path = view.metadata.get("created_path", None)
if orig_path:
return orig_path
# Backwards compat only applies for symlinked views
if not os.path.islink(self.root):
return None
# For backards compat, check link for symlink views if no "created_path"
root = os.readlink(self.root)
if os.path.isabs(root):
return root
@ -501,11 +535,9 @@ def get_projection_for_spec(self, spec):
symlink.
"""
view = self.view()
view_path = view.get_projection_for_spec(spec)
rel_path = os.path.relpath(view_path, self._current_root)
return os.path.join(self.root, rel_path)
return view.get_projection_for_spec(spec)
def view(self, new=None):
def view(self, new=None, created_path=False):
"""
Generate the FilesystemView object for this ViewDescriptor
@ -518,8 +550,13 @@ def view(self, new=None):
new (str or None): If a string, create a FilesystemView
rooted at that path. Default None. This should only be used to
regenerate the view, and cannot be used to access specs.
created_path (bool): Pass metadata on the path the view was created in to the
underlying view implementation. Default False. This option should be enabled
when creating a view to add packages to, but not when creating a view object
to query an existing view.
"""
root = new if new else self._current_root
root = new if new else self.root
if not root:
# This can only be hit if we write a future bug
msg = (
@ -527,13 +564,17 @@ def view(self, new=None):
"View root is at %s" % self.root
)
raise SpackEnvironmentViewError(msg)
return SimpleFilesystemView(
root,
spack.store.layout,
ignore_conflicts=True,
projections=self.projections,
link=self.link_type,
)
kwargs = {
"ignore_conflicts": True,
"projections": self.projections,
"link": self.link_type,
"final_destination": self.root,
}
if created_path:
kwargs["metadata"] = {"created_path": root}
return SimpleFilesystemView(root, spack.store.layout, **kwargs)
def __contains__(self, spec):
"""Is the spec described by the view descriptor
@ -576,7 +617,69 @@ def specs_for_view(self, concretized_root_specs):
return specs
def regenerate(self, concretized_root_specs):
def update_method_error_msg(self, methods):
"""When methods are already determined invalid, construct error message for methods."""
msg = "View cannot be updated using specified update methods:"
if "exchange" in methods:
if not spack.util.atomic_update.renameat2():
msg += "\n Operating system does not support 'exchange' atomic update method."
msg += f"\n If the view {self.root} does not already exist on the filesystem,"
msg += " change its update_method to 'symlink' or 'auto'."
msg += f"\n If the view at {self.root} exists already, either change the"
msg += " update_method and run `spack env view regenerate --force`"
msg += " or run on a newer OS."
else:
msg += f"\n The view {self.root} cannot be updated with 'exchange' update method"
msg += " because it was originally constructed with the 'symlink' method."
msg += "\n Either change the update method to 'symlink' or"
msg += " run `spack env view regenerate --force` for a non-atomic update."
if "symlink" in methods:
msg += f"\n The view {self.root} cannot be updated with 'symlink' update method"
msg += " because it was originally constructed with the 'exchange' method."
msg += "\n Either change the update method to 'exchange' or"
msg += " run `spack env view regenerate --force` for a non-atomic update."
return msg
def valid_update_method(self, method, force):
return getattr(self, f"valid_update_method_{method}")(force)
def valid_update_method_exchange(self, force):
if not spack.util.atomic_update.renameat2():
return False
# Ensure we don't swap symlink -> exchange if we have a symlink and symlink is an
# acceptable method. This is to avoid problems switching between OSs.
if os.path.islink(self.root):
if force:
os.unlink(self.root)
elif "symlink" in self.update_method:
return False
return True
def valid_update_method_symlink(self, force):
if os.path.exists(self.root):
if not os.path.islink(self.root):
if force:
shutil.rmtree(self.root)
return True
else:
return False
return True
def update_method_to_use(self, force=False):
update_methods = self.update_method
if isinstance(update_methods, str):
update_methods = [update_methods]
for method in update_methods:
# Check whether we can use this method and return if we can
if self.valid_update_method(method, force):
return method
raise RuntimeError(self.update_method_error_msg(update_methods))
def regenerate(self, concretized_root_specs, force=False):
specs = self.specs_for_view(concretized_root_specs)
# To ensure there are no conflicts with packages being installed
@ -597,34 +700,39 @@ def regenerate(self, concretized_root_specs):
tty.debug("View at %s does not need regeneration." % self.root)
return
# Check which atomic update method we need
update_method = self.update_method_to_use(force)
if update_method == "exchange" and os.path.isdir(new_root):
shutil.rmtree(new_root)
_error_on_nonempty_view_dir(new_root)
# construct view at new_root
if specs:
tty.msg("Updating view at {0}".format(self.root))
view = self.view(new=new_root)
root_dirname = os.path.dirname(self.root)
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
view = self.view(new=new_root, created_path=True)
# Create a new view
try:
fs.mkdirp(new_root)
view.add_specs(*specs, with_dependencies=False)
# create symlink from tmp_symlink_name to new_root
if os.path.exists(tmp_symlink_name):
os.unlink(tmp_symlink_name)
symlink(new_root, tmp_symlink_name)
# mv symlink atomically over root symlink to old_root
fs.rename(tmp_symlink_name, self.root)
if update_method == "exchange":
# Swap the view to the directory of the previous view if one exists so that
# the view that is swapped out will be named appropriately
if old_root:
os.rename(new_root, old_root)
exchange_location = old_root
else:
exchange_location = new_root
spack.util.atomic_update.atomic_update_renameat2(exchange_location, self.root)
else:
spack.util.atomic_update.atomic_update_symlink(new_root, self.root)
except Exception as e:
# Clean up new view and temporary symlink on any failure.
try:
shutil.rmtree(new_root, ignore_errors=True)
os.unlink(tmp_symlink_name)
except (IOError, OSError):
pass
@ -1549,14 +1657,14 @@ def update_default_view(self, viewpath):
else:
self.views.pop(name, None)
def regenerate_views(self):
def regenerate_views(self, force=False):
if not self.views:
tty.debug("Skip view update, this environment does not" " maintain a view")
return
concretized_root_specs = [s for _, s in self.concretized_specs()]
for view in self.views.values():
view.regenerate(concretized_root_specs)
view.regenerate(concretized_root_specs, force)
def check_views(self):
"""Checks if the environments default view can be activated."""

View File

@ -27,7 +27,7 @@
SingleMergeConflictError,
SourceMergeVisitor,
)
from llnl.util.symlink import symlink
from llnl.util.symlink import islink, symlink
from llnl.util.tty.color import colorize
import spack.config
@ -43,6 +43,7 @@
_projections_path = ".spack/projections.yaml"
_metadata_path = ".spack/metadata.yaml"
def view_symlink(src, dst, **kwargs):
@ -63,7 +64,7 @@ def view_copy(src, dst, view, spec=None):
Use spec and view to generate relocations
"""
shutil.copy2(src, dst)
shutil.copy2(src, dst, follow_symlinks=False)
if spec and not spec.external:
# Not metadata, we have to relocate it
@ -82,18 +83,23 @@ def view_copy(src, dst, view, spec=None):
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
new_sbang = sbang.sbang_shebang_line()
root = view.final_destination
prefix_to_projection = collections.OrderedDict(
{spec.prefix: view.get_projection_for_spec(spec)}
{spec.prefix: os.path.join(root, view.get_relative_projection_for_spec(spec))}
)
for dep in spec.traverse():
if not dep.external:
prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
prefix_to_projection[dep.prefix] = os.path.join(
root, view.get_relative_projection_for_spec(dep)
)
if spack.relocate.is_binary(dst):
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
elif islink(dst):
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
else:
prefix_to_projection[spack.store.layout.root] = view._root
prefix_to_projection[spack.store.layout.root] = root
prefix_to_projection[orig_sbang] = new_sbang
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
@ -150,10 +156,13 @@ def __init__(self, root, layout, **kwargs):
self.layout = layout
self.projections = kwargs.get("projections", {})
self.metadata = kwargs.get("metadata", {})
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
self.verbose = kwargs.get("verbose", False)
self.final_destination = kwargs.get("final_destination", self._root)
# Setup link function to include view
link_func = kwargs.get("link", view_symlink)
self.link = ft.partial(link_func, view=self)
@ -210,12 +219,15 @@ def remove_standalone(self, spec):
"""
raise NotImplementedError
def get_projection_for_spec(self, spec):
def get_relative_projection_for_spec(self, spec):
"""
Get the projection in this view for a spec.
Get the relative projection in this view for a spec.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec):
return os.path.join(self._root, self.get_relative_projection_for_spec(spec))
def get_all_specs(self):
"""
Get all specs currently active in this view.
@ -274,8 +286,34 @@ def __init__(self, root, layout, **kwargs):
msg += " which does not match projections passed manually."
raise ConflictingProjectionsError(msg)
self.metadata_path = os.path.join(self._root, _metadata_path)
if not self.metadata:
self.metadata = self.read_metadata()
elif not os.path.exists(self.metadata_path):
self.write_metadata()
else:
if self.metadata != self.read_metadata():
msg = f"View at {self._root} has metadata file"
msg += " which does not match metadata passed manually."
raise ConflictingMetadataError(msg)
self._croot = colorize_root(self._root) + " "
def write_metadata(self):
if self.metadata:
mkdirp(os.path.dirname(self.metadata_path))
with open(self.metadata_path, "w") as f:
f.write(s_yaml.dump_config({"metadata": self.metadata}))
def read_metadata(self):
if os.path.exists(self.metadata_path):
with open(self.metadata_path, "r") as f:
# no schema as this is not user modified
metadata_data = s_yaml.load(f)
return metadata_data["metadata"]
else:
return {}
def write_projections(self):
if self.projections:
mkdirp(os.path.dirname(self.projections_path))
@ -486,9 +524,9 @@ def remove_standalone(self, spec):
if self.verbose:
tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
def get_projection_for_spec(self, spec):
def get_relative_projection_for_spec(self, spec):
"""
Return the projection for a spec in this view.
Return the relative projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
@ -499,9 +537,7 @@ def get_projection_for_spec(self, spec):
locator_spec = spec.package.extendee_spec
proj = spack.projections.get_projection(self.projections, locator_spec)
if proj:
return os.path.join(self._root, locator_spec.format(proj))
return self._root
return spec.format(proj) if proj else ""
def get_all_specs(self):
md_dirs = []
@ -639,6 +675,32 @@ class SimpleFilesystemView(FilesystemView):
def __init__(self, root, layout, **kwargs):
super(SimpleFilesystemView, self).__init__(root, layout, **kwargs)
self.metadata_path = os.path.join(self._root, _metadata_path)
if not self.metadata:
self.metadata = self.read_metadata()
elif not os.path.exists(self.metadata_path):
self.write_metadata()
else:
if self.metadata != self.read_metadata():
msg = f"View at {self._root} has metadata file"
msg += " which does not match metadata passed manually."
raise ConflictingMetadataError(msg)
def write_metadata(self):
if self.metadata:
mkdirp(os.path.dirname(self.metadata_path))
with open(self.metadata_path, "w") as f:
f.write(s_yaml.dump_config({"metadata": self.metadata}))
def read_metadata(self):
if os.path.exists(self.metadata_path):
with open(self.metadata_path, "r") as f:
# no schema as this is not user modified
metadata_data = s_yaml.load(f)
return metadata_data["metadata"]
else:
return {}
def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same
package, that project to the same prefix. We want to catch that as
@ -765,6 +827,13 @@ def link_metadata(self, specs):
self.link(os.path.join(src_root, src_relpath), os.path.join(self._root, dst_relpath))
def get_relative_projection_for_spec(self, spec):
"""
Return the relative projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
# Extensions are placed by their extendee, not by their own spec
if spec.package.extendee_spec:
spec = spec.package.extendee_spec
@ -772,22 +841,6 @@ def get_relative_projection_for_spec(self, spec):
p = spack.projections.get_projection(self.projections, spec)
return spec.format(p) if p else ""
def get_projection_for_spec(self, spec):
"""
Return the projection for a spec in this view.
Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
if spec.package.extendee_spec:
spec = spec.package.extendee_spec
proj = spack.projections.get_projection(self.projections, spec)
if proj:
return os.path.join(self._root, spec.format(proj))
return self._root
#####################
# utility functions #
@ -849,3 +902,7 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually."""
class ConflictingMetadataError(SpackError):
"""Raised when a view has a metadata file and is given one manually."""

View File

@ -30,6 +30,10 @@
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"broadcast": {
"type": "array",
"items": {"type": "array", "items": {"type": "string"}},
},
"exclude": {"type": "array", "items": {"type": "string"}},
},
},
@ -107,6 +111,21 @@
"type": "array",
"items": {"type": "string"},
},
"update_method": {
"anyOf": [
{
"type": "string",
"pattern": "(symlink|exchange)",
},
{
"type": "array",
"items": {
"type": "string",
"pattern": "(symlink|exchange)",
},
},
]
},
"projections": projections_scheme,
},
}

View File

@ -186,17 +186,28 @@ def _expand_matrix_constraints(matrix_config):
new_row.append([r])
expanded_rows.append(new_row)
# TODO someday: allow matrices inside `broadcast`
broadcast_rows = matrix_config.get("broadcast", [])
excludes = matrix_config.get("exclude", []) # only compute once
sigil = matrix_config.get("sigil", "")
broadcast_constraints = list(itertools.product(*broadcast_rows))
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
flat_combo = [Spec(x) for x in flat_combo]
test_spec = flat_combo[0].copy()
for constraint in flat_combo[1:]:
# If no broadcast, this is [(,)].
# It will run once, as required, and apply no constraints
for broadcast_combo in broadcast_constraints:
final_combo = [_apply_broadcast(spec.copy(), broadcast_combo) for spec in flat_combo]
# Check whether final spec is excluded
# requires constructing a spec from constraints
test_spec = final_combo[0].copy()
for constraint in final_combo[1:]:
test_spec.constrain(constraint)
# Abstract variants don't have normal satisfaction semantics
@ -209,18 +220,30 @@ def _expand_matrix_constraints(matrix_config):
spack.variant.substitute_abstract_variants(test_spec)
except spack.variant.UnknownVariantError:
pass
if any(test_spec.satisfies(x) for x in excludes):
# actual exclusion check is here
if any(test_spec.satisfies(e) for e in excludes):
continue
# Apply sigil if applicable
if sigil:
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
final_combo[0] = Spec(sigil + str(final_combo[0]))
# Add to list of constraints
results.append(flat_combo)
results.append(final_combo)
return results
def _apply_broadcast(spec, constraints):
if constraints:
for node in spec.traverse():
if node.name:
for constraint in constraints:
node.constrain(constraint)
return spec
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:

View File

@ -52,6 +52,19 @@
sep = os.sep
supports_renameat2 = bool(spack.util.atomic_update.renameat2())
if supports_renameat2:
use_renameat2 = [True, False]
else:
use_renameat2 = [False]
@pytest.fixture(params=use_renameat2)
def atomic_update_implementations(request, monkeypatch):
if request.param is False:
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
yield
def check_mpileaks_and_deps_in_view(viewdir):
"""Check that the expected install directories exist."""
@ -61,9 +74,11 @@ def check_mpileaks_and_deps_in_view(viewdir):
def check_viewdir_removal(viewdir):
"""Check that the uninstall/removal worked."""
assert not os.path.exists(str(viewdir.join(".spack"))) or os.listdir(
str(viewdir.join(".spack"))
) == ["projections.yaml"]
view_spack_dir = str(viewdir.join(".spack"))
if not os.path.exists(view_spack_dir):
return
assert all(f in ["projections.yaml", "metadata.yaml"] for f in os.listdir(view_spack_dir))
def test_add():
@ -597,7 +612,9 @@ def test_init_from_yaml(tmpdir):
@pytest.mark.usefixtures("config")
def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_packages):
def test_env_view_external_prefix(
tmpdir_factory, mutable_database, mock_packages, atomic_update_implementations
):
fake_prefix = tmpdir_factory.mktemp("a-prefix")
fake_bin = fake_prefix.join("bin")
fake_bin.ensure(dir=True)
@ -1178,7 +1195,9 @@ def test_store_different_build_deps(tmpdir):
assert x_read["y"].dag_hash() != y_read.dag_hash()
def test_env_updates_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_install(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@ -1188,7 +1207,9 @@ def test_env_updates_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
check_mpileaks_and_deps_in_view(view_dir)
def test_env_view_fails(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
def test_env_view_fails(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
# We currently ignore file-file conflicts for the prefix merge,
# so in principle there will be no errors in this test. But
# the .spack metadata dir is handled separately and is more strict.
@ -1205,7 +1226,9 @@ def test_env_view_fails(tmpdir, mock_packages, mock_stage, mock_fetch, install_m
install("--fake")
def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
def test_env_view_fails_dir_file(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
# This environment view fails to be created because a file
# and a dir are in the same path. Test that it mentions the problematic path.
view_dir = tmpdir.join("view")
@ -1220,7 +1243,7 @@ def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch,
def test_env_view_succeeds_symlinked_dir_file(
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery
tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
# A symlinked dir and an ordinary dir merge happily
view_dir = tmpdir.join("view")
@ -1234,7 +1257,9 @@ def test_env_view_succeeds_symlinked_dir_file(
assert os.path.exists(os.path.join(x_dir, "file_in_symlinked_dir"))
def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_without_view_install(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
# Test enabling a view after installing specs
env("create", "--without-view", "test")
@ -1255,7 +1280,9 @@ def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
check_mpileaks_and_deps_in_view(view_dir)
def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_config_view_default(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
# This config doesn't mention whether a view is enabled
test_config = """\
env:
@ -1273,7 +1300,9 @@ def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery
assert os.path.isdir(os.path.join(e.default_view.view()._root, ".spack", "mpileaks"))
def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_install_package(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@ -1282,7 +1311,9 @@ def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, instal
assert os.path.exists(str(view_dir.join(".spack/mpileaks")))
def test_env_updates_view_add_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_add_concretize(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
install("--fake", "mpileaks")
@ -1293,22 +1324,24 @@ def test_env_updates_view_add_concretize(tmpdir, mock_stage, mock_fetch, install
check_mpileaks_and_deps_in_view(view_dir)
def test_env_updates_view_uninstall(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_uninstall(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
install("--fake", "--add", "mpileaks")
print(install("--fake", "--add", "mpileaks"))
check_mpileaks_and_deps_in_view(view_dir)
with ev.read("test"):
uninstall("-ay")
print(uninstall("-ay"))
check_viewdir_removal(view_dir)
def test_env_updates_view_uninstall_referenced_elsewhere(
tmpdir, mock_stage, mock_fetch, install_mockery
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
@ -1325,10 +1358,14 @@ def test_env_updates_view_uninstall_referenced_elsewhere(
check_viewdir_removal(view_dir)
def test_env_updates_view_remove_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_remove_concretize(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
install("--fake", "mpileaks")
with ev.read("test"):
add("mpileaks")
concretize()
@ -1342,7 +1379,9 @@ def test_env_updates_view_remove_concretize(tmpdir, mock_stage, mock_fetch, inst
check_viewdir_removal(view_dir)
def test_env_updates_view_force_remove(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_env_updates_view_force_remove(
tmpdir, mock_stage, mock_fetch, install_mockery, atomic_update_implementations
):
view_dir = tmpdir.join("view")
env("create", "--with-view=%s" % view_dir, "test")
with ev.read("test"):
@ -1889,7 +1928,7 @@ def test_stack_definition_conditional_add_write(tmpdir):
def test_stack_combinatorial_view(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@ -1924,7 +1963,9 @@ def test_stack_combinatorial_view(
)
def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
def test_stack_view_select(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@ -1964,7 +2005,9 @@ def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, inst
)
def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
def test_stack_view_exclude(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@ -2005,7 +2048,7 @@ def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages, mock_archive, ins
def test_stack_view_select_and_exclude(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@ -2047,7 +2090,9 @@ def test_stack_view_select_and_exclude(
)
def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
def test_view_link_roots(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@ -2091,7 +2136,9 @@ def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive, instal
)
def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
def test_view_link_run(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
yaml = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
envdir = str(tmpdir)
@ -2133,7 +2180,13 @@ def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive, install_
@pytest.mark.parametrize("link_type", ["hardlink", "copy", "symlink"])
def test_view_link_type(
link_type, tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
link_type,
tmpdir,
mock_fetch,
mock_packages,
mock_archive,
install_mockery,
atomic_update_implementations,
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
@ -2163,7 +2216,9 @@ def test_view_link_type(
assert os.path.islink(file_to_test) == (link_type == "symlink")
def test_view_link_all(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
def test_view_link_all(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
viewdir = str(tmpdir.join("view"))
with open(filename, "w") as f:
@ -2274,7 +2329,7 @@ def test_stack_view_no_activate_without_default(
def test_stack_view_multiple_views(
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery, atomic_update_implementations
):
filename = str(tmpdir.join("spack.yaml"))
default_viewdir = str(tmpdir.join("default-view"))
@ -2843,10 +2898,14 @@ def test_failed_view_cleanup(tmpdir, mock_stage, mock_fetch, install_mockery):
assert os.path.samefile(resolved_view, view)
def test_environment_view_target_already_exists(tmpdir, mock_stage, mock_fetch, install_mockery):
def test_environment_view_target_already_exists(
tmpdir, mock_stage, mock_fetch, install_mockery, monkeypatch
):
"""When creating a new view, Spack should check whether
the new view dir already exists. If so, it should not be
removed or modified."""
# Only works for symlinked atomic views
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
# Create a new environment
view = str(tmpdir.join("view"))
@ -3239,3 +3298,103 @@ def test_environment_created_in_users_location(mutable_config, tmpdir):
assert dir_name in out
assert env_dir in ev.root(dir_name)
assert os.path.isdir(os.path.join(env_dir, dir_name))
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_mismatch(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
root = str(tmpdir.join("root"))
if update_method == "symlink":
os.makedirs(root)
checker = "cannot be updated with 'symlink' update method"
forceable = True
else:
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", None)
checker = "does not support 'exchange' atomic update method"
forceable = False
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
spec = spack.spec.Spec("libelf").concretized()
install("libelf")
with pytest.raises(RuntimeError, match=checker):
view.regenerate([spec])
if forceable:
view.regenerate([spec], force=True)
assert os.path.exists(view.root)
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_fails(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
root = str(tmpdir.join("root"))
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
spec = spack.spec.Spec("libelf").concretized()
install("libelf")
def raises(*args, **kwargs):
raise OSError
# The python symlink code fails by raising an error
monkeypatch.setattr(fs, "rename", raises)
# The c library call fails by a non-zero return code
monkeypatch.setattr(spack.util.atomic_update, "_renameat2", lambda x, y, z, v, w: 1)
with pytest.raises(OSError):
view.regenerate([spec])
assert not os.path.exists(view.root)
if update_method == "symlink":
link = os.path.join(str(tmpdir), "._root", "._tmp_symlink")
assert not os.path.lexists(link)
@pytest.mark.parametrize("update_method", ["symlink", "exchange"])
def test_view_update_unnecessary(update_method, tmpdir, install_mockery, mock_fetch, monkeypatch):
if update_method == "exchange" and not supports_renameat2:
pytest.skip("Testing on an OS that does not support the exchange update method")
root = str(tmpdir.join("root"))
view = ev.environment.ViewDescriptor(
base_path=str(tmpdir), root=root, update_method=update_method
)
libelf = spack.spec.Spec("libelf").concretized()
install("libelf")
libdwarf = spack.spec.Spec("libdwarf").concretized()
install("libdwarf")
# Create a "previous" view
# Wait after each view regeneration to ensure timestamps are different
view.regenerate([libelf])
# monkeypatch so that any attempt to actually regenerate the view fails
def raises(*args, **kwargs):
raise AssertionError
old_view = view.update_method_to_use
monkeypatch.setattr(view, "update_method_to_use", raises)
# regenerating the view is a no-op, so doesn't raise
# will raise if the view isn't identical
view.regenerate([libelf])
with pytest.raises(AssertionError):
view.regenerate([libelf, libdwarf])
# Create another view so there are multiple old views around
monkeypatch.setattr(view, "update_method_to_use", old_view)
view.regenerate([libelf, libdwarf])
# Redo the monkeypatch
monkeypatch.setattr(view, "update_method_to_use", raises)
# no raise for no-op regeneration
# raise when it's not a no-op
view.regenerate([libelf, libdwarf])
with pytest.raises(AssertionError):
view.regenerate([libelf])

View File

@ -214,3 +214,17 @@ def test_spec_list_constraints_with_structure(
speclist = SpecList("specs", [matrix])
assert len(speclist.specs) == 1
assert libdwarf_spec in speclist.specs[0]
def test_spec_list_broadcast(self, mock_packages):
matrix = {
"matrix": [["mpileaks"], ["^callpath"]],
"broadcast": [["%gcc", "%clang"], ["+debug", "~debug"]],
"exclude": ["+debug%clang"],
}
speclist = SpecList("specs", [matrix])
assert len(speclist) == 3
for spec in speclist:
for node in spec.traverse():
assert node.compiler.name == spec.compiler.name
assert node.variants["debug"].value == spec.variants["debug"].value

View File

@ -0,0 +1,75 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import ctypes
import os
from typing import Optional
import llnl.util.filesystem as fs
from llnl.util.symlink import symlink
# Magic numbers from linux headers
RENAME_EXCHANGE = 2
AT_FDCWD = -100
# object for renameat2 not set
# We use None for not found and notset for not set so that boolean checks work
# properly in client code
notset = object()
_renameat2 = notset
def set_renameat2():
libc: Optional[ctypes.CDLL] = None
try:
# CDLL(None) returns the python process
# python links against libc, so we can treat this as a libc handle
# we could also use CDLL("libc.so.6") but this is (irrelevantly) more future proof
libc = ctypes.CDLL(None)
except (OSError, TypeError):
# OSError if the call fails,
# TypeError on Windows
return None
return getattr(libc, "renameat2", None)
def renameat2():
global _renameat2
if _renameat2 is notset:
_renameat2 = set_renameat2()
return _renameat2
def atomic_update_renameat2(src, dest):
# Ensure a directory that is a symlink will not be read as symlink in libc
src = src.rstrip(os.path.sep)
dest = dest.rstrip(os.path.sep)
dest_exists = os.path.lexists(dest)
if not dest_exists:
fs.mkdirp(dest)
try:
rc = renameat2()(AT_FDCWD, src.encode(), AT_FDCWD, dest.encode(), RENAME_EXCHANGE)
if rc:
raise OSError(f"renameat2 failed to exchange {src} and {dest}")
except OSError:
if not dest_exists:
os.rmdir(dest)
raise
def atomic_update_symlink(src, dest):
# Create temporary symlink to point to src
tmp_symlink_name = os.path.join(os.path.dirname(dest), "._tmp_symlink")
if os.path.exists(tmp_symlink_name):
os.unlink(tmp_symlink_name)
symlink(src, tmp_symlink_name)
# atomically mv the symlink to destpath (still points to srcpath)
try:
fs.rename(tmp_symlink_name, dest)
except OSError:
os.unlink(tmp_symlink_name)
raise

View File

@ -977,7 +977,7 @@ _spack_env_loads() {
_spack_env_view() {
if $list_options
then
SPACK_COMPREPLY="-h --help"
SPACK_COMPREPLY="-h --help -f --force"
else
SPACK_COMPREPLY=""
fi

View File

@ -723,14 +723,17 @@ def setup_dependent_package(self, module, dependent_spec):
# Disable find package's config mode for versions of Boost that
# didn't provide it. See https://github.com/spack/spack/issues/20169
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
is_cmake = isinstance(dependent_spec.package, CMakePackage)
if self.spec.satisfies("boost@:1.69.0") and is_cmake:
args_fn = type(dependent_spec.package).cmake_args
#is_cmake = isinstance(dependent_spec.package, CMakePackage)
#if self.spec.satisfies("boost@:1.69.0") and is_cmake:
# args_fn = type(dependent_spec.package).cmake_args
if self.spec.satisfies("boost@:1.69.0") and dependent_spec.satisfies("build_system=cmake"):
args_fn = type(dependent_spec.package.builder).cmake_args
def _cmake_args(self):
return ["-DBoost_NO_BOOST_CMAKE=ON"] + args_fn(self)
type(dependent_spec.package).cmake_args = _cmake_args
#type(dependent_spec.package).cmake_args = _cmake_args
type(dependent_spec.package.builder).cmake_args = _cmake_args
def setup_dependent_build_environment(self, env, dependent_spec):
if "+context" in self.spec and "context-impl" in self.spec.variants:

View File

@ -53,6 +53,7 @@ class Dyninst(CMakePackage):
depends_on("boost@1.61.0:" + boost_libs, when="@10.1.0:")
depends_on("boost@1.61.0:1.69" + boost_libs, when="@:10.0")
depends_on("boost@1.67.0:" + boost_libs, when="@11.0.0:")
depends_on("boost@1.70.0:" + boost_libs, when="@12:")
depends_on("libiberty+pic")
@ -116,7 +117,7 @@ def cmake_args(self):
"-DBoost_ROOT_DIR=%s" % spec["boost"].prefix,
"-DElfUtils_ROOT_DIR=%s" % spec["elf"].prefix,
"-DLibIberty_ROOT_DIR=%s" % spec["libiberty"].prefix,
"-DTBB_ROOT_DIR=%s" % spec["tbb"].prefix,
"-DTBB_ROOT_DIR=%s" % spec["tbb"].prefix.tbb,
self.define("LibIberty_LIBRARIES", spec["libiberty"].libs),
]

View File

@ -199,8 +199,8 @@ def setup_build_environment(self, env):
env.set("ESMF_CXX", spec["mpi"].mpicxx)
env.set("ESMF_F90", spec["mpi"].mpifc)
else:
env.set("ESMF_CXX", env["CXX"])
env.set("ESMF_F90", env["FC"])
os.environ["ESMF_CXX"] = os.environ["CXX"]
os.environ["ESMF_F90"] = os.environ["FC"]
# This environment variable controls the build option.
if "+debug" in spec:

View File

@ -740,6 +740,7 @@ def post_install(self):
install_tree("clang/bindings/python", python_platlib)
with working_dir(self.build_directory):
if not os.path.exists(join_path(self.prefix, 'libexec', 'llvm')):
install_tree("bin", join_path(self.prefix, "libexec", "llvm"))
def llvm_config(self, *args, **kwargs):

View File

@ -124,7 +124,10 @@ class NetcdfC(AutotoolsPackage):
# Starting version 4.4.0, it became possible to disable parallel I/O even
# if HDF5 supports it. For previous versions of the library we need
# HDF5 without mpi support to disable parallel I/O:
depends_on("hdf5~mpi", when="@:4.3~mpi")
#depends_on("hdf5~mpi", when="@:4.3~mpi")
# While it may be possible to do this, it messes with concretizing large environments with
# both ~mpi and +mpi builds of hdf5
depends_on("hdf5~mpi", when="~mpi")
# We need HDF5 with mpi support to enable parallel I/O.
depends_on("hdf5+mpi", when="+mpi")