From 4e7a5e9362790bc9d8e9c5c853ab434ca77368f9 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Mon, 24 Feb 2025 11:28:06 +0100 Subject: [PATCH] spack verify libraries: verify dependencies of installed packages can be resolved (#49124) Currently, we have `config:shared_linking:missing_library_policy` to error or warn when shared libraries cannot be resolved upon install. The new `spack verify libraries` command allows users to run this post install hook at any point in time to check whether their current installations can resolve shared libs in rpaths. --- lib/spack/docs/basic_usage.rst | 33 ++- lib/spack/spack/cmd/verify.py | 93 ++++++-- .../spack/hooks/resolve_shared_libraries.py | 219 +----------------- lib/spack/spack/test/cmd/verify.py | 76 +++++- lib/spack/spack/verify_libraries.py | 212 +++++++++++++++++ share/spack/spack-completion.bash | 18 ++ share/spack/spack-completion.fish | 39 ++-- 7 files changed, 441 insertions(+), 249 deletions(-) create mode 100644 lib/spack/spack/verify_libraries.py diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 72dad746b48..ea19703c1d4 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1761,19 +1761,24 @@ Verifying installations The ``spack verify`` command can be used to verify the validity of Spack-installed packages any time after installation. + +^^^^^^^^^^^^^^^^^^^^^^^^^ +``spack verify manifest`` +^^^^^^^^^^^^^^^^^^^^^^^^^ + At installation time, Spack creates a manifest of every file in the installation prefix. For links, Spack tracks the mode, ownership, and destination. For directories, Spack tracks the mode, and ownership. For files, Spack tracks the mode, ownership, modification -time, hash, and size. The Spack verify command will check, for every -file in each package, whether any of those attributes have changed. It -will also check for newly added files or deleted files from the -installation prefix. Spack can either check all installed packages +time, hash, and size. The ``spack verify manifest`` command will check, +for every file in each package, whether any of those attributes have +changed. It will also check for newly added files or deleted files from +the installation prefix. Spack can either check all installed packages using the `-a,--all` or accept specs listed on the command line to verify. -The ``spack verify`` command can also verify for individual files that -they haven't been altered since installation time. If the given file +The ``spack verify manifest`` command can also verify for individual files +that they haven't been altered since installation time. If the given file is not in a Spack installation prefix, Spack will report that it is not owned by any package. To check individual files instead of specs, use the ``-f,--files`` option. @@ -1788,6 +1793,22 @@ check only local packages (as opposed to those used transparently from ``upstream`` spack instances) and the ``-j,--json`` option to output machine-readable json data for any errors. +^^^^^^^^^^^^^^^^^^^^^^^^^^ +``spack verify libraries`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``spack verify libraries`` command can be used to verify that packages +do not have accidental system dependencies. This command scans the install +prefixes of packages for executables and shared libraries, and resolves +their needed libraries in their RPATHs. When needed libraries cannot be +located, an error is reported. This typically indicates that a package +was linked against a system library, instead of a library provided by +a Spack package. + +This verification can also be enabled as a post-install hook by setting +``config:shared_linking:missing_library_policy`` to ``error`` or ``warn`` +in :ref:`config.yaml `. + ----------------------- Filesystem requirements ----------------------- diff --git a/lib/spack/spack/cmd/verify.py b/lib/spack/spack/cmd/verify.py index c2c4b599bb8..8f23168677c 100644 --- a/lib/spack/spack/cmd/verify.py +++ b/lib/spack/spack/cmd/verify.py @@ -2,35 +2,48 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import argparse +import io +from typing import List, Optional import llnl.util.tty as tty +from llnl.string import plural +from llnl.util.filesystem import visit_directory_tree import spack.cmd import spack.environment as ev +import spack.spec import spack.store import spack.verify +import spack.verify_libraries +from spack.cmd.common import arguments -description = "check that all spack packages are on disk as installed" +description = "verify spack installations on disk" section = "admin" level = "long" +MANIFEST_SUBPARSER: Optional[argparse.ArgumentParser] = None -def setup_parser(subparser): - setup_parser.parser = subparser - subparser.add_argument( +def setup_parser(subparser: argparse.ArgumentParser): + global MANIFEST_SUBPARSER + sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="verify_command") + + MANIFEST_SUBPARSER = sp.add_parser( + "manifest", help=verify_manifest.__doc__, description=verify_manifest.__doc__ + ) + MANIFEST_SUBPARSER.add_argument( "-l", "--local", action="store_true", help="verify only locally installed packages" ) - subparser.add_argument( + MANIFEST_SUBPARSER.add_argument( "-j", "--json", action="store_true", help="ouptut json-formatted errors" ) - subparser.add_argument("-a", "--all", action="store_true", help="verify all packages") - subparser.add_argument( + MANIFEST_SUBPARSER.add_argument("-a", "--all", action="store_true", help="verify all packages") + MANIFEST_SUBPARSER.add_argument( "specs_or_files", nargs=argparse.REMAINDER, help="specs or files to verify" ) - type = subparser.add_mutually_exclusive_group() - type.add_argument( + manifest_sp_type = MANIFEST_SUBPARSER.add_mutually_exclusive_group() + manifest_sp_type.add_argument( "-s", "--specs", action="store_const", @@ -39,7 +52,7 @@ def setup_parser(subparser): default="specs", help="treat entries as specs (default)", ) - type.add_argument( + manifest_sp_type.add_argument( "-f", "--files", action="store_const", @@ -49,14 +62,67 @@ def setup_parser(subparser): help="treat entries as absolute filenames\n\ncannot be used with '-a'", ) + libraries_subparser = sp.add_parser( + "libraries", help=verify_libraries.__doc__, description=verify_libraries.__doc__ + ) + + arguments.add_common_arguments(libraries_subparser, ["constraint"]) + def verify(parser, args): + cmd = args.verify_command + if cmd == "libraries": + return verify_libraries(args) + elif cmd == "manifest": + return verify_manifest(args) + parser.error("invalid verify subcommand") + + +def verify_libraries(args): + """verify that shared libraries of install packages can be located in rpaths (Linux only)""" + specs_from_db = [s for s in args.specs(installed=True) if not s.external] + + tty.info(f"Checking {len(specs_from_db)} packages for shared library resolution") + + errors = 0 + for spec in specs_from_db: + try: + pkg = spec.package + except Exception: + tty.warn(f"Skipping {spec.cformat('{name}{@version}{/hash}')} due to missing package") + error_msg = _verify_libraries(spec, pkg.unresolved_libraries) + if error_msg is not None: + errors += 1 + tty.error(error_msg) + + if errors: + tty.error(f"Cannot resolve shared libraries in {plural(errors, 'package')}") + return 1 + + +def _verify_libraries(spec: spack.spec.Spec, unresolved_libraries: List[str]) -> Optional[str]: + """Go over the prefix of the installed spec and verify its shared libraries can be resolved.""" + visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor( + [*spack.verify_libraries.ALLOW_UNRESOLVED, *unresolved_libraries] + ) + visit_directory_tree(spec.prefix, visitor) + + if not visitor.problems: + return None + + output = io.StringIO() + visitor.write(output, indent=4, brief=True) + message = output.getvalue().rstrip() + return f"{spec.cformat('{name}{@version}{/hash}')}: {spec.prefix}:\n{message}" + + +def verify_manifest(args): + """verify that install directories have not been modified since installation""" local = args.local if args.type == "files": if args.all: - setup_parser.parser.print_help() - return 1 + MANIFEST_SUBPARSER.error("cannot use --all with --files") for file in args.specs_or_files: results = spack.verify.check_file_manifest(file) @@ -87,8 +153,7 @@ def verify(parser, args): env = ev.active_environment() specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args)) else: - setup_parser.parser.print_help() - return 1 + MANIFEST_SUBPARSER.error("use --all or specify specs to verify") for spec in specs: tty.debug("Verifying package %s") diff --git a/lib/spack/spack/hooks/resolve_shared_libraries.py b/lib/spack/spack/hooks/resolve_shared_libraries.py index 4eef63d338d..62500cead18 100644 --- a/lib/spack/spack/hooks/resolve_shared_libraries.py +++ b/lib/spack/spack/hooks/resolve_shared_libraries.py @@ -2,200 +2,14 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -import fnmatch import io -import os -import re -from typing import Dict, List, Union import llnl.util.tty as tty -from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree -from llnl.util.lang import stable_partition +from llnl.util.filesystem import visit_directory_tree import spack.config import spack.error -import spack.util.elf as elf - -#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs -#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in -#: default search paths of the dynamic linker. -ALLOW_UNRESOLVED = [ - # kernel - "linux-vdso.so.*", - "libselinux.so.*", - # musl libc - "ld-musl-*.so.*", - # glibc - "ld-linux*.so.*", - "ld64.so.*", - "libanl.so.*", - "libc.so.*", - "libdl.so.*", - "libm.so.*", - "libmemusage.so.*", - "libmvec.so.*", - "libnsl.so.*", - "libnss_compat.so.*", - "libnss_db.so.*", - "libnss_dns.so.*", - "libnss_files.so.*", - "libnss_hesiod.so.*", - "libpcprofile.so.*", - "libpthread.so.*", - "libresolv.so.*", - "librt.so.*", - "libSegFault.so.*", - "libthread_db.so.*", - "libutil.so.*", - # gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s, - # but the binaries we copy from the compiler don't have an $ORIGIN rpath. - "libasan.so.*", - "libatomic.so.*", - "libcc1.so.*", - "libgcc_s.so.*", - "libgfortran.so.*", - "libgomp.so.*", - "libitm.so.*", - "liblsan.so.*", - "libquadmath.so.*", - "libssp.so.*", - "libstdc++.so.*", - "libtsan.so.*", - "libubsan.so.*", - # systemd - "libudev.so.*", - # cuda driver - "libcuda.so.*", - # intel-oneapi-runtime - "libur_loader.so.*", -] - - -def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool: - return ( - child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN - and parent.is_little_endian == child.is_little_endian - and parent.is_64_bit == child.is_64_bit - and parent.elf_hdr.e_machine == child.elf_hdr.e_machine - ) - - -def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool: - try: - with open(candidate_path, "rb") as g: - return is_compatible(current_elf, elf.parse_elf(g)) - except (OSError, elf.ElfParsingError): - return False - - -class Problem: - def __init__( - self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes] - ) -> None: - self.resolved = resolved - self.unresolved = unresolved - self.relative_rpaths = relative_rpaths - - -class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor): - def __init__(self, allow_unresolved_patterns: List[str]) -> None: - self.problems: Dict[str, Problem] = {} - self._allow_unresolved_regex = re.compile( - "|".join(fnmatch.translate(x) for x in allow_unresolved_patterns) - ) - - def allow_unresolved(self, needed: bytes) -> bool: - try: - name = needed.decode("utf-8") - except UnicodeDecodeError: - return False - return bool(self._allow_unresolved_regex.match(name)) - - def visit_file(self, root: str, rel_path: str, depth: int) -> None: - # We work with byte strings for paths. - path = os.path.join(root, rel_path).encode("utf-8") - - # For $ORIGIN interpolation: should not have trailing dir seperator. - origin = os.path.dirname(path) - - # Retrieve the needed libs + rpaths. - try: - with open(path, "rb") as f: - parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True) - except (OSError, elf.ElfParsingError): - # Not dealing with an invalid ELF file. - return - - # If there's no needed libs all is good - if not parsed_elf.has_needed: - return - - # Get the needed libs and rpaths (notice: byte strings) - # Don't force an encoding cause paths are just a bag of bytes. - needed_libs = parsed_elf.dt_needed_strs - - rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else [] - - # We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really - # supported in general. Also remove empty paths. - rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x] - - # Do not allow relative rpaths (they are relative to the current working directory) - rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs) - - # If there's a / in the needed lib, it's opened directly, otherwise it needs - # a search. - direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x) - - # Do not allow relative paths in direct libs (they are relative to the current working - # directory) - direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs) - - resolved: Dict[bytes, bytes] = {} - - for lib in search_libs: - if self.allow_unresolved(lib): - continue - for rpath in rpaths: - candidate = os.path.join(rpath, lib) - if candidate_matches(parsed_elf, candidate): - resolved[lib] = candidate - break - else: - unresolved.append(lib) - - # Check if directly opened libs are compatible - for lib in direct_libs: - if candidate_matches(parsed_elf, lib): - resolved[lib] = lib - else: - unresolved.append(lib) - - if unresolved or relative_rpaths: - self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths) - - def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None: - pass - - def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool: - # There can be binaries in .spack/test which shouldn't be checked. - if rel_path == ".spack": - return False - return True - - def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool: - return False - - -class CannotLocateSharedLibraries(spack.error.SpackError): - pass - - -def maybe_decode(byte_str: bytes) -> Union[str, bytes]: - try: - return byte_str.decode("utf-8") - except UnicodeDecodeError: - return byte_str +import spack.verify_libraries def post_install(spec, explicit): @@ -206,36 +20,23 @@ def post_install(spec, explicit): if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"): return - visitor = ResolveSharedElfLibDepsVisitor( - [*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries] + visitor = spack.verify_libraries.ResolveSharedElfLibDepsVisitor( + [*spack.verify_libraries.ALLOW_UNRESOLVED, *spec.package.unresolved_libraries] ) visit_directory_tree(spec.prefix, visitor) - # All good? if not visitor.problems: return - # For now just list the issues (print it in ldd style, except we don't recurse) - output = io.StringIO() - output.write("not all executables and libraries can resolve their dependencies:\n") - for path, problem in visitor.problems.items(): - output.write(path) - output.write("\n") - for needed, full_path in problem.resolved.items(): - output.write(" ") - if needed == full_path: - output.write(maybe_decode(needed)) - else: - output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}") - output.write("\n") - for not_found in problem.unresolved: - output.write(f" {maybe_decode(not_found)} => not found\n") - for relative_rpath in problem.relative_rpaths: - output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n") - + output = io.StringIO("not all executables and libraries can resolve their dependencies:\n") + visitor.write(output) message = output.getvalue().strip() if policy == "error": raise CannotLocateSharedLibraries(message) tty.warn(message) + + +class CannotLocateSharedLibraries(spack.error.SpackError): + pass diff --git a/lib/spack/spack/test/cmd/verify.py b/lib/spack/spack/test/cmd/verify.py index e9886e55657..5ada2fe9f8e 100644 --- a/lib/spack/spack/test/cmd/verify.py +++ b/lib/spack/spack/test/cmd/verify.py @@ -4,19 +4,31 @@ """Tests for the `spack verify` command""" import os +import platform + +import pytest import llnl.util.filesystem as fs +import spack.cmd.verify import spack.concretize +import spack.installer import spack.store +import spack.util.executable import spack.util.spack_json as sjson import spack.verify -from spack.main import SpackCommand +from spack.main import SpackCommand, SpackCommandError verify = SpackCommand("verify") install = SpackCommand("install") +def skip_unless_linux(f): + return pytest.mark.skipif( + str(platform.system()) != "Linux", reason="only tested on linux for now" + )(f) + + def test_single_file_verify_cmd(tmpdir): # Test the verify command interface to verifying a single file. filedir = os.path.join(str(tmpdir), "a", "b", "c", "d") @@ -36,7 +48,7 @@ def test_single_file_verify_cmd(tmpdir): with open(manifest_file, "w", encoding="utf-8") as f: sjson.dump({filepath: data}, f) - results = verify("-f", filepath, fail_on_error=False) + results = verify("manifest", "-f", filepath, fail_on_error=False) print(results) assert not results @@ -44,7 +56,7 @@ def test_single_file_verify_cmd(tmpdir): with open(filepath, "w", encoding="utf-8") as f: f.write("I changed.") - results = verify("-f", filepath, fail_on_error=False) + results = verify("manifest", "-f", filepath, fail_on_error=False) expected = ["hash"] mtime = os.stat(filepath).st_mtime @@ -55,7 +67,7 @@ def test_single_file_verify_cmd(tmpdir): assert filepath in results assert all(x in results for x in expected) - results = verify("-fj", filepath, fail_on_error=False) + results = verify("manifest", "-fj", filepath, fail_on_error=False) res = sjson.load(results) assert len(res) == 1 errors = res.pop(filepath) @@ -69,18 +81,68 @@ def test_single_spec_verify_cmd(tmpdir, mock_packages, mock_archive, mock_fetch, prefix = s.prefix hash = s.dag_hash() - results = verify("/%s" % hash, fail_on_error=False) + results = verify("manifest", "/%s" % hash, fail_on_error=False) assert not results new_file = os.path.join(prefix, "new_file_for_verify_test") with open(new_file, "w", encoding="utf-8") as f: f.write("New file") - results = verify("/%s" % hash, fail_on_error=False) + results = verify("manifest", "/%s" % hash, fail_on_error=False) assert new_file in results assert "added" in results - results = verify("-j", "/%s" % hash, fail_on_error=False) + results = verify("manifest", "-j", "/%s" % hash, fail_on_error=False) res = sjson.load(results) assert len(res) == 1 assert res[new_file] == ["added"] + + +@pytest.mark.requires_executables("gcc") +@skip_unless_linux +def test_libraries(tmp_path, install_mockery, mock_fetch): + gcc = spack.util.executable.which("gcc", required=True) + s = spack.concretize.concretize_one("libelf") + spack.installer.PackageInstaller([s.package]).install() + os.mkdir(s.prefix.bin) + + # There are no ELF files so the verification should pass + verify("libraries", f"/{s.dag_hash()}") + + # Now put main_with_rpath linking to libf.so inside the prefix and verify again. This should + # work because libf.so can be located in the rpath. + (tmp_path / "f.c").write_text("void f(void){return;}") + (tmp_path / "main.c").write_text("void f(void); int main(void){f();return 0;}") + + gcc("-shared", "-fPIC", "-o", str(tmp_path / "libf.so"), str(tmp_path / "f.c")) + gcc( + "-o", + str(s.prefix.bin.main_with_rpath), + str(tmp_path / "main.c"), + "-L", + str(tmp_path), + f"-Wl,-rpath,{tmp_path}", + "-lf", + ) + verify("libraries", f"/{s.dag_hash()}") + + # Now put main_without_rpath linking to libf.so inside the prefix and verify again. This should + # fail because libf.so cannot be located in the rpath. + gcc( + "-o", + str(s.prefix.bin.main_without_rpath), + str(tmp_path / "main.c"), + "-L", + str(tmp_path), + "-lf", + ) + + with pytest.raises(SpackCommandError): + verify("libraries", f"/{s.dag_hash()}") + + # Check the error message + msg = spack.cmd.verify._verify_libraries(s, []) + assert msg is not None and "libf.so => not found" in msg + + # And check that we can make it pass by ignoring it. + assert spack.cmd.verify._verify_libraries(s, ["libf.so"]) is None diff --git a/lib/spack/spack/verify_libraries.py b/lib/spack/spack/verify_libraries.py new file mode 100644 index 00000000000..aab38cff2bc --- /dev/null +++ b/lib/spack/spack/verify_libraries.py @@ -0,0 +1,212 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import fnmatch +import os +import re +from typing import IO, Dict, List + +from llnl.util.filesystem import BaseDirectoryVisitor +from llnl.util.lang import stable_partition + +import spack.util.elf as elf + +#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs +#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in +#: default search paths of the dynamic linker. +ALLOW_UNRESOLVED = [ + # kernel + "linux-vdso.so.*", + "libselinux.so.*", + # musl libc + "ld-musl-*.so.*", + # glibc + "ld-linux*.so.*", + "ld64.so.*", + "libanl.so.*", + "libc.so.*", + "libdl.so.*", + "libm.so.*", + "libmemusage.so.*", + "libmvec.so.*", + "libnsl.so.*", + "libnss_compat.so.*", + "libnss_db.so.*", + "libnss_dns.so.*", + "libnss_files.so.*", + "libnss_hesiod.so.*", + "libpcprofile.so.*", + "libpthread.so.*", + "libresolv.so.*", + "librt.so.*", + "libSegFault.so.*", + "libthread_db.so.*", + "libutil.so.*", + # gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s, + # but the binaries we copy from the compiler don't have an $ORIGIN rpath. + "libasan.so.*", + "libatomic.so.*", + "libcc1.so.*", + "libgcc_s.so.*", + "libgfortran.so.*", + "libgomp.so.*", + "libitm.so.*", + "liblsan.so.*", + "libquadmath.so.*", + "libssp.so.*", + "libstdc++.so.*", + "libtsan.so.*", + "libubsan.so.*", + # systemd + "libudev.so.*", + # cuda driver + "libcuda.so.*", + # intel-oneapi-runtime + "libur_loader.so.*", +] + + +def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool: + return ( + child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN + and parent.is_little_endian == child.is_little_endian + and parent.is_64_bit == child.is_64_bit + and parent.elf_hdr.e_machine == child.elf_hdr.e_machine + ) + + +def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool: + try: + with open(candidate_path, "rb") as g: + return is_compatible(current_elf, elf.parse_elf(g)) + except (OSError, elf.ElfParsingError): + return False + + +class Problem: + def __init__( + self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes] + ) -> None: + self.resolved = resolved + self.unresolved = unresolved + self.relative_rpaths = relative_rpaths + + +class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor): + def __init__(self, allow_unresolved_patterns: List[str]) -> None: + self.problems: Dict[str, Problem] = {} + self._allow_unresolved_regex = re.compile( + "|".join(fnmatch.translate(x) for x in allow_unresolved_patterns) + ) + + def allow_unresolved(self, needed: bytes) -> bool: + try: + name = needed.decode("utf-8") + except UnicodeDecodeError: + return False + return bool(self._allow_unresolved_regex.match(name)) + + def visit_file(self, root: str, rel_path: str, depth: int) -> None: + # We work with byte strings for paths. + path = os.path.join(root, rel_path).encode("utf-8") + + # For $ORIGIN interpolation: should not have trailing dir seperator. + origin = os.path.dirname(path) + + # Retrieve the needed libs + rpaths. + try: + with open(path, "rb") as f: + parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True) + except (OSError, elf.ElfParsingError): + # Not dealing with an invalid ELF file. + return + + # If there's no needed libs all is good + if not parsed_elf.has_needed: + return + + # Get the needed libs and rpaths (notice: byte strings) + # Don't force an encoding cause paths are just a bag of bytes. + needed_libs = parsed_elf.dt_needed_strs + + rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else [] + + # We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really + # supported in general. Also remove empty paths. + rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x] + + # Do not allow relative rpaths (they are relative to the current working directory) + rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs) + + # If there's a / in the needed lib, it's opened directly, otherwise it needs + # a search. + direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x) + + # Do not allow relative paths in direct libs (they are relative to the current working + # directory) + direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs) + + resolved: Dict[bytes, bytes] = {} + + for lib in search_libs: + if self.allow_unresolved(lib): + continue + for rpath in rpaths: + candidate = os.path.join(rpath, lib) + if candidate_matches(parsed_elf, candidate): + resolved[lib] = candidate + break + else: + unresolved.append(lib) + + # Check if directly opened libs are compatible + for lib in direct_libs: + if candidate_matches(parsed_elf, lib): + resolved[lib] = lib + else: + unresolved.append(lib) + + if unresolved or relative_rpaths: + self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths) + + def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None: + pass + + def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool: + # There can be binaries in .spack/test which shouldn't be checked. + if rel_path == ".spack": + return False + return True + + def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool: + return False + + def write(self, output: IO[str], *, indent=0, brief: bool = False) -> None: + indent_str = " " * indent + for path, problem in self.problems.items(): + output.write(indent_str) + output.write(path) + output.write("\n") + if not brief: + for needed, full_path in problem.resolved.items(): + output.write(indent_str) + output.write(" ") + if needed == full_path: + output.write(_decode_or_raw(needed)) + else: + output.write(f"{_decode_or_raw(needed)} => {_decode_or_raw(full_path)}") + output.write("\n") + for not_found in problem.unresolved: + output.write(indent_str) + output.write(f" {_decode_or_raw(not_found)} => not found\n") + for relative_rpath in problem.relative_rpaths: + output.write(indent_str) + output.write(f" {_decode_or_raw(relative_rpath)} => relative rpath\n") + + +def _decode_or_raw(byte_str: bytes) -> str: + try: + return byte_str.decode("utf-8") + except UnicodeDecodeError: + return f"{byte_str!r}" diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 6f15771790d..11be14c935a 100644 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -2025,6 +2025,15 @@ _spack_url_stats() { } _spack_verify() { + if $list_options + then + SPACK_COMPREPLY="-h --help" + else + SPACK_COMPREPLY="manifest libraries" + fi +} + +_spack_verify_manifest() { if $list_options then SPACK_COMPREPLY="-h --help -l --local -j --json -a --all -s --specs -f --files" @@ -2033,6 +2042,15 @@ _spack_verify() { fi } +_spack_verify_libraries() { + if $list_options + then + SPACK_COMPREPLY="-h --help" + else + _installed_packages + fi +} + _spack_versions() { if $list_options then diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish index 9689173431c..95cb6d1d50e 100644 --- a/share/spack/spack-completion.fish +++ b/share/spack/spack-completion.fish @@ -425,7 +425,7 @@ complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a uninstall -d 'rem complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a unit-test -d 'run spack'"'"'s unit tests (wrapper around pytest)' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a unload -d 'remove package from the user environment' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a url -d 'debugging tool for url parsing' -complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a verify -d 'check that all spack packages are on disk as installed' +complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a verify -d 'verify spack installations on disk' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a versions -d 'list available versions of a package' complete -c spack -n '__fish_spack_using_command_pos 0 ' -f -a view -d 'project packages to a compact naming scheme on the filesystem' complete -c spack -n '__fish_spack_using_command ' -s h -l help -f -a help @@ -3153,20 +3153,33 @@ complete -c spack -n '__fish_spack_using_command url stats' -l show-issues -f -a complete -c spack -n '__fish_spack_using_command url stats' -l show-issues -d 'show packages with issues (md5 hashes, http urls)' # spack verify -set -g __fish_spack_optspecs_spack_verify h/help l/local j/json a/all s/specs f/files -complete -c spack -n '__fish_spack_using_command_pos_remainder 0 verify' $__fish_spack_force_files -a '(__fish_spack_installed_specs)' +set -g __fish_spack_optspecs_spack_verify h/help +complete -c spack -n '__fish_spack_using_command_pos 0 verify' -f -a manifest -d 'verify that install directories have not been modified since installation' +complete -c spack -n '__fish_spack_using_command_pos 0 verify' -f -a libraries -d 'verify that shared libraries of install packages can be located in rpaths (Linux only)' complete -c spack -n '__fish_spack_using_command verify' -s h -l help -f -a help complete -c spack -n '__fish_spack_using_command verify' -s h -l help -d 'show this help message and exit' -complete -c spack -n '__fish_spack_using_command verify' -s l -l local -f -a local -complete -c spack -n '__fish_spack_using_command verify' -s l -l local -d 'verify only locally installed packages' -complete -c spack -n '__fish_spack_using_command verify' -s j -l json -f -a json -complete -c spack -n '__fish_spack_using_command verify' -s j -l json -d 'ouptut json-formatted errors' -complete -c spack -n '__fish_spack_using_command verify' -s a -l all -f -a all -complete -c spack -n '__fish_spack_using_command verify' -s a -l all -d 'verify all packages' -complete -c spack -n '__fish_spack_using_command verify' -s s -l specs -f -a type -complete -c spack -n '__fish_spack_using_command verify' -s s -l specs -d 'treat entries as specs (default)' -complete -c spack -n '__fish_spack_using_command verify' -s f -l files -f -a type -complete -c spack -n '__fish_spack_using_command verify' -s f -l files -d 'treat entries as absolute filenames' + +# spack verify manifest +set -g __fish_spack_optspecs_spack_verify_manifest h/help l/local j/json a/all s/specs f/files +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 verify manifest' $__fish_spack_force_files -a '(__fish_spack_installed_specs)' +complete -c spack -n '__fish_spack_using_command verify manifest' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command verify manifest' -s h -l help -d 'show this help message and exit' +complete -c spack -n '__fish_spack_using_command verify manifest' -s l -l local -f -a local +complete -c spack -n '__fish_spack_using_command verify manifest' -s l -l local -d 'verify only locally installed packages' +complete -c spack -n '__fish_spack_using_command verify manifest' -s j -l json -f -a json +complete -c spack -n '__fish_spack_using_command verify manifest' -s j -l json -d 'ouptut json-formatted errors' +complete -c spack -n '__fish_spack_using_command verify manifest' -s a -l all -f -a all +complete -c spack -n '__fish_spack_using_command verify manifest' -s a -l all -d 'verify all packages' +complete -c spack -n '__fish_spack_using_command verify manifest' -s s -l specs -f -a type +complete -c spack -n '__fish_spack_using_command verify manifest' -s s -l specs -d 'treat entries as specs (default)' +complete -c spack -n '__fish_spack_using_command verify manifest' -s f -l files -f -a type +complete -c spack -n '__fish_spack_using_command verify manifest' -s f -l files -d 'treat entries as absolute filenames' + +# spack verify libraries +set -g __fish_spack_optspecs_spack_verify_libraries h/help +complete -c spack -n '__fish_spack_using_command_pos_remainder 0 verify libraries' -f -a '(__fish_spack_installed_specs)' +complete -c spack -n '__fish_spack_using_command verify libraries' -s h -l help -f -a help +complete -c spack -n '__fish_spack_using_command verify libraries' -s h -l help -d 'show this help message and exit' # spack versions set -g __fish_spack_optspecs_spack_versions h/help s/safe r/remote n/new j/jobs=