diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index b9c4aee64ee..14560372e62 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -194,6 +194,12 @@ config: # executables with many dependencies, in particular on slow filesystems. bind: false + # Controls the handling of missing dynamic libraries after installation. + # Options are ignore (default), warn, or error. If set to error, the + # installation fails if installed binaries reference dynamic libraries that + # are not found in their specified rpaths. + missing_library_policy: ignore + # Set to 'false' to allow installation on filesystems that doesn't allow setgid bit # manipulation by unprivileged user (e.g. AFS) diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py index 9082d4f8ab0..8559158cc2b 100644 --- a/lib/spack/spack/build_systems/oneapi.py +++ b/lib/spack/spack/build_systems/oneapi.py @@ -32,6 +32,9 @@ class IntelOneApiPackage(Package): # organization (e.g. University/Company). redistribute(source=False, binary=False) + # contains precompiled binaries without rpaths + unresolved_libraries = ["*"] + for c in [ "target=ppc64:", "target=ppc64le:", diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 73fad62d6ad..51d964ed731 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -35,6 +35,7 @@ class _HookRunner: "spack.hooks.drop_redundant_rpaths", "spack.hooks.absolutify_elf_sonames", "spack.hooks.permissions_setters", + "spack.hooks.resolve_shared_libraries", # after all mutations to the install prefix, write metadata "spack.hooks.write_install_manifest", # after all metadata is written diff --git a/lib/spack/spack/hooks/resolve_shared_libraries.py b/lib/spack/spack/hooks/resolve_shared_libraries.py new file mode 100644 index 00000000000..d055f0d3b62 --- /dev/null +++ b/lib/spack/spack/hooks/resolve_shared_libraries.py @@ -0,0 +1,239 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import fnmatch +import io +import os +import re +from typing import Dict, List, Union + +import llnl.util.tty as tty +from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree +from llnl.util.lang import stable_partition + +import spack.config +import spack.error +import spack.util.elf as elf + +#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs +#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in +#: default search paths of the dynamic linker. +ALLOW_UNRESOLVED = [ + # kernel + "linux-vdso.so.*", + "libselinux.so.*", + # musl libc + "ld-musl-*.so.*", + # glibc + "ld-linux*.so.*", + "ld64.so.*", + "libc.so.*", + "libdl.so.*", + "libm.so.*", + "libmemusage.so.*", + "libmvec.so.*", + "libnsl.so.*", + "libnss_compat.so.*", + "libnss_db.so.*", + "libnss_dns.so.*", + "libnss_files.so.*", + "libnss_hesiod.so.*", + "libpcprofile.so.*", + "libpthread.so.*", + "libresolv.so.*", + "librt.so.*", + "libSegFault.so.*", + "libthread_db.so.*", + "libutil.so.*", + # gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s, + # but the binaries we copy from the compiler don't have an $ORIGIN rpath. + "libasan.so.*", + "libatomic.so.*", + "libcc1.so.*", + "libgcc_s.so.*", + "libgfortran.so.*", + "libgomp.so.*", + "libitm.so.*", + "liblsan.so.*", + "libquadmath.so.*", + "libssp.so.*", + "libstdc++.so.*", + "libtsan.so.*", + "libubsan.so.*", + # systemd + "libudev.so.*", + # cuda driver + "libcuda.so.*", +] + + +def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool: + return ( + child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN + and parent.is_little_endian == child.is_little_endian + and parent.is_64_bit == child.is_64_bit + and parent.elf_hdr.e_machine == child.elf_hdr.e_machine + ) + + +def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool: + try: + with open(candidate_path, "rb") as g: + return is_compatible(current_elf, elf.parse_elf(g)) + except (OSError, elf.ElfParsingError): + return False + + +class Problem: + def __init__( + self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes] + ) -> None: + self.resolved = resolved + self.unresolved = unresolved + self.relative_rpaths = relative_rpaths + + +class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor): + def __init__(self, allow_unresolved_patterns: List[str]) -> None: + self.problems: Dict[str, Problem] = {} + self._allow_unresolved_regex = re.compile( + "|".join(fnmatch.translate(x) for x in allow_unresolved_patterns) + ) + + def allow_unresolved(self, needed: bytes) -> bool: + try: + name = needed.decode("utf-8") + except UnicodeDecodeError: + return False + return bool(self._allow_unresolved_regex.match(name)) + + def visit_file(self, root: str, rel_path: str, depth: int) -> None: + # We work with byte strings for paths. + path = os.path.join(root, rel_path).encode("utf-8") + + # For $ORIGIN interpolation: should not have trailing dir seperator. + origin = os.path.dirname(path) + + # Retrieve the needed libs + rpaths. + try: + with open(path, "rb") as f: + parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True) + except (OSError, elf.ElfParsingError): + # Not dealing with an invalid ELF file. + return + + # If there's no needed libs all is good + if not parsed_elf.has_needed: + return + + # Get the needed libs and rpaths (notice: byte strings) + # Don't force an encoding cause paths are just a bag of bytes. + needed_libs = parsed_elf.dt_needed_strs + + rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else [] + + # We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really + # supported in general. Also remove empty paths. + rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x] + + # Do not allow relative rpaths (they are relative to the current working directory) + rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs) + + # If there's a / in the needed lib, it's opened directly, otherwise it needs + # a search. + direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x) + + # Do not allow relative paths in direct libs (they are relative to the current working + # directory) + direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs) + + resolved: Dict[bytes, bytes] = {} + + for lib in search_libs: + if self.allow_unresolved(lib): + continue + for rpath in rpaths: + candidate = os.path.join(rpath, lib) + if candidate_matches(parsed_elf, candidate): + resolved[lib] = candidate + break + else: + unresolved.append(lib) + + # Check if directly opened libs are compatible + for lib in direct_libs: + if candidate_matches(parsed_elf, lib): + resolved[lib] = lib + else: + unresolved.append(lib) + + if unresolved or relative_rpaths: + self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths) + + def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None: + pass + + def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool: + # There can be binaries in .spack/test which shouldn't be checked. + if rel_path == ".spack": + return False + return True + + def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool: + return False + + +class CannotLocateSharedLibraries(spack.error.SpackError): + pass + + +def maybe_decode(byte_str: bytes) -> Union[str, bytes]: + try: + return byte_str.decode("utf-8") + except UnicodeDecodeError: + return byte_str + + +def post_install(spec, explicit): + """Check whether shared libraries can be resolved in RPATHs.""" + policy = spack.config.get("config:shared_linking:missing_library_policy", "ignore") + + # Currently only supported for ELF files. + if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"): + return + + visitor = ResolveSharedElfLibDepsVisitor( + [*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries] + ) + visit_directory_tree(spec.prefix, visitor) + + # All good? + if not visitor.problems: + return + + # For now just list the issues (print it in ldd style, except we don't recurse) + output = io.StringIO() + output.write("not all executables and libraries can resolve their dependencies:\n") + for path, problem in visitor.problems.items(): + output.write(path) + output.write("\n") + for needed, full_path in problem.resolved.items(): + output.write(" ") + if needed == full_path: + output.write(maybe_decode(needed)) + else: + output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}") + output.write("\n") + for not_found in problem.unresolved: + output.write(f" {maybe_decode(not_found)} => not found\n") + for relative_rpath in problem.relative_rpaths: + output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n") + + message = output.getvalue().strip() + + if policy == "error": + raise CannotLocateSharedLibraries(message) + + tty.warn(message) diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py index f0efa6b4090..305af5cb8c1 100644 --- a/lib/spack/spack/package_base.py +++ b/lib/spack/spack/package_base.py @@ -633,6 +633,14 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta): #: stubs directory are not bound by path.""" non_bindable_shared_objects: List[str] = [] + #: List of fnmatch patterns of library file names (specifically DT_NEEDED entries) that are not + #: expected to be locatable in RPATHs. Generally this is a problem, and Spack install with + #: config:shared_linking:strict will cause install failures if such libraries are found. + #: However, in certain cases it can be hard if not impossible to avoid accidental linking + #: against system libraries; until that is resolved, this attribute can be used to suppress + #: errors. + unresolved_libraries: List[str] = [] + #: List of prefix-relative file paths (or a single path). If these do #: not exist after install, or if they exist but are not files, #: sanity checks fail. diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py index b42c1e27ffa..86f453f943f 100644 --- a/lib/spack/spack/schema/config.py +++ b/lib/spack/spack/schema/config.py @@ -34,6 +34,7 @@ "properties": { "type": {"type": "string", "enum": ["rpath", "runpath"]}, "bind": {"type": "boolean"}, + "missing_library_policy": {"enum": ["error", "warn", "ignore"]}, }, }, ] diff --git a/share/spack/gitlab/cloud_pipelines/configs/config.yaml b/share/spack/gitlab/cloud_pipelines/configs/config.yaml index 590641e7467..97c909cc335 100644 --- a/share/spack/gitlab/cloud_pipelines/configs/config.yaml +++ b/share/spack/gitlab/cloud_pipelines/configs/config.yaml @@ -1,5 +1,7 @@ config: db_lock_timeout: 120 + shared_linking: + missing_library_policy: error install_tree: root: /home/software/spack padded_length: 256 diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml index 8bcef4d46f4..2835301fc81 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-rhel/spack.yaml @@ -8,6 +8,10 @@ spack: reuse: false unify: false + config: + shared_linking: + missing_library_policy: ignore # due to use of externals + packages: all: prefer: diff --git a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml index e06d634a2c3..6a0c7745e9b 100644 --- a/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml +++ b/share/spack/gitlab/cloud_pipelines/stacks/e4s-cray-sles/spack.yaml @@ -8,6 +8,10 @@ spack: reuse: false unify: false + config: + shared_linking: + missing_library_policy: ignore # due to use of externals + packages: all: require: '%gcc' diff --git a/var/spack/repos/builtin/packages/aqlprofile/package.py b/var/spack/repos/builtin/packages/aqlprofile/package.py index f6975256857..981a72ceb8f 100644 --- a/var/spack/repos/builtin/packages/aqlprofile/package.py +++ b/var/spack/repos/builtin/packages/aqlprofile/package.py @@ -265,3 +265,6 @@ def install(self, spec, prefix): def setup_run_environment(self, env): env.prepend_path("LD_LIBRARY_PATH", self.spec["hsa-rocr-dev"].prefix.lib) + + # This package is installed from binaries, and we haven't patched rpaths. + unresolved_libraries = ["*"] diff --git a/var/spack/repos/builtin/packages/charliecloud/package.py b/var/spack/repos/builtin/packages/charliecloud/package.py index 3d2b4dabc3a..671d1b676eb 100644 --- a/var/spack/repos/builtin/packages/charliecloud/package.py +++ b/var/spack/repos/builtin/packages/charliecloud/package.py @@ -174,3 +174,6 @@ def configure_args(self): args.append("--with-libsquashfuse={0}".format(squashfuse_prefix)) return args + + # libexec/charliecloud/sotest/bin/sotest misses an rpath, but shouldn't be problematic. + unresolved_libraries = ["libsotest.so.*"] diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py index ea7b314418d..264ce08e41b 100644 --- a/var/spack/repos/builtin/packages/cuda/package.py +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -818,3 +818,6 @@ def libs(self): # Avoid binding stub libraries by absolute path non_bindable_shared_objects = ["stubs"] + + # contains precompiled binaries without rpaths + unresolved_libraries = ["*"] diff --git a/var/spack/repos/builtin/packages/cudnn/package.py b/var/spack/repos/builtin/packages/cudnn/package.py index 2d1a7b3b95e..507671442e9 100644 --- a/var/spack/repos/builtin/packages/cudnn/package.py +++ b/var/spack/repos/builtin/packages/cudnn/package.py @@ -399,3 +399,6 @@ def install(self, spec, prefix): target_include = os.path.join(prefix, "targets", "ppc64le-linux", "include") if os.path.isdir(target_include) and not os.path.isdir(prefix.include): symlink(target_include, prefix.include) + + # contains precompiled binaries without rpaths + unresolved_libraries = ["*"] diff --git a/var/spack/repos/builtin/packages/intel-oneapi-runtime/package.py b/var/spack/repos/builtin/packages/intel-oneapi-runtime/package.py index 961a8e301cf..52e40a4ef6a 100644 --- a/var/spack/repos/builtin/packages/intel-oneapi-runtime/package.py +++ b/var/spack/repos/builtin/packages/intel-oneapi-runtime/package.py @@ -66,3 +66,9 @@ def libs(self): @property def headers(self): return HeaderList([]) + + # We expect dependencies between runtime libraries themselves to be resolved by rpaths in the + # dependent binaries. This means RUNPATH is currently unsupported. Supporting this is hard, + # because the only way to register the rpath is through patchelf, which itself depends on C++ + # runtime libraries. + unresolved_libraries = ["libimf.so*", "libintlc.so*", "libsvml.so*"] diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 6495d8ae6fe..587ebe18bd5 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -420,3 +420,7 @@ def edit(self, spec, prefix): with open("Make.user", "w") as f: f.write("\n".join(options) + "\n") + + # julia's sys/package images are lacking rpaths, but this is fine because julia dlopen's them + # at which point their dependencies are already loaded. ccalllazyfoo.so is from tests. + unresolved_libraries = ["libjulia.so.*", "libjulia-internal.so.*", "ccalllazyfoo.so"] diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index 4aaac3446fc..6eb78c56dcc 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -1154,6 +1154,17 @@ def llvm_config(self, *args, **kwargs): else: return ret + @property + def unresolved_libraries(self): + # libomptarget at 14 and older has a hard-coded rpath that lacks hwloc's path + # https://github.com/llvm/llvm-project/commit/dc52712a063241bd0d3a0473b4e7ed870e41921f + if self.spec.satisfies("@:14 +libomptarget"): + return ["*"] + + # TODO: for newer llvm there are still issues with runtimes for omp we + # have to add rpaths to `bin/llvm-omp-*` and `share/gdb/python/ompd/ompdModule.so`. + return ["libpython*.so.*", "libomp.so*", "libomptarget*.so*", "libunwind.so.*"] + def get_gcc_install_dir_flag(spec: Spec, compiler) -> Optional[str]: """Get the --gcc-install-dir=... flag, so that clang does not do a system scan for GCC.""" diff --git a/var/spack/repos/builtin/packages/openfoam/package.py b/var/spack/repos/builtin/packages/openfoam/package.py index d3de3823f62..2edb1474b5b 100644 --- a/var/spack/repos/builtin/packages/openfoam/package.py +++ b/var/spack/repos/builtin/packages/openfoam/package.py @@ -894,6 +894,18 @@ def install_links(self): ]: os.symlink(f, os.path.basename(f)) + # Executables like decomposePar require interface libraries for optional dependencies, but if + # the dependency is missing, an dummy library is used and put in lib/dummy. Allow this until + # the https://develop.openfoam.com/Development/openfoam/-/issues/3283 is resolved. + unresolved_libraries = [ + "libkahipDecomp.so", + "libmetisDecomp.so", + "libMGridGen.so", + "libPstream.so", + "libptscotchDecomp.so", + "libscotchDecomp.so", + ] + # ----------------------------------------------------------------------------- diff --git a/var/spack/repos/builtin/packages/openjdk/package.py b/var/spack/repos/builtin/packages/openjdk/package.py index bcaec85a861..4f9d15f29fa 100644 --- a/var/spack/repos/builtin/packages/openjdk/package.py +++ b/var/spack/repos/builtin/packages/openjdk/package.py @@ -555,3 +555,6 @@ def setup_dependent_run_environment(self, env, dependent_spec): # fix that prevents us from modifying the soname of libjvm.so. If we move # to source builds this should be possible. non_bindable_shared_objects = ["libjvm.so"] + + # contains precompiled binaries without rpaths + unresolved_libraries = ["*"] diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py index 8b608e04d21..5ce66723c3a 100644 --- a/var/spack/repos/builtin/packages/perl/package.py +++ b/var/spack/repos/builtin/packages/perl/package.py @@ -31,6 +31,9 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package executables = [r"^perl(-?\d+.*)?$"] + # TODO: resolve the circular dependency between perl and libxcrypt. + unresolved_libraries = ["libcrypt.so.*"] + # see https://www.cpan.org/src/README.html for # explanation of version numbering scheme diff --git a/var/spack/repos/builtin/packages/py-pyzmq/package.py b/var/spack/repos/builtin/packages/py-pyzmq/package.py index 2946cf86719..c60d6f570dd 100644 --- a/var/spack/repos/builtin/packages/py-pyzmq/package.py +++ b/var/spack/repos/builtin/packages/py-pyzmq/package.py @@ -12,16 +12,6 @@ class PyPyzmq(PythonPackage): homepage = "https://github.com/zeromq/pyzmq" pypi = "pyzmq/pyzmq-22.3.0.tar.gz" - skip_modules = [ - # Requires zmq.backend.cffi._cffi - "zmq.backend.cffi", - # Requires tornado - "zmq.eventloop", - "zmq.green.eventloop", - # Requires pytest - "zmq.tests", - ] - license("BSD-3-Clause") version("26.2.0", sha256="070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f") @@ -98,12 +88,9 @@ def setup(self): ) ) - def setup_build_environment(self, env): - # Needed for `spack install --test=root py-pyzmq` - # Fixes import failure for zmq.backend.cffi - # https://github.com/zeromq/pyzmq/issues/395#issuecomment-22041019 - env.prepend_path("C_INCLUDE_PATH", self.spec["libzmq"].headers.directories[0]) - env.prepend_path("LIBRARY_PATH", self.spec["libzmq"].libs.directories[0]) - - # Needed for `spack test run py-pyzmq` - setup_run_environment = setup_build_environment + @property + def import_modules(self): + # importing zmq mutates the install prefix, meaning spack install --test=root py-pyzmq + # would result in a different install prefix than spack install py-pyzmq. Therefore do not + # run any import tests. + return [] if self.spec.satisfies("@:20") else ["zmq"] diff --git a/var/spack/repos/builtin/packages/py-torch-nvidia-apex/package.py b/var/spack/repos/builtin/packages/py-torch-nvidia-apex/package.py index a145e1b3ad4..c24ce306c3b 100644 --- a/var/spack/repos/builtin/packages/py-torch-nvidia-apex/package.py +++ b/var/spack/repos/builtin/packages/py-torch-nvidia-apex/package.py @@ -64,7 +64,7 @@ class PyTorchNvidiaApex(PythonPackage, CudaPackage): depends_on("py-setuptools") depends_on("py-packaging") depends_on("py-pip") - with default_args(type=("build", "run")): + with default_args(type=("build", "link", "run")): depends_on("python@3:") depends_on("py-torch@0.4:") for _arch in CudaPackage.cuda_arch_values: diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index d7226b16c9b..2342fac40b8 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -210,3 +210,6 @@ def build(self, spec, prefix): def install(self, spec, prefix): python("./x.py", "install") + + # known issue: https://github.com/rust-lang/rust/issues/132604 + unresolved_libraries = ["libz.so.*"] diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py index 9081b5965ad..c6262f10ae3 100644 --- a/var/spack/repos/builtin/packages/tau/package.py +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -602,3 +602,6 @@ def test_rocm(self): ): rocm_test_dir = join_path(self.test_suite.current_test_cache_dir, self.rocm_test) self._run_rocm_test("test_rocm", "Testing rocm", rocm_test_dir) + + # tau contains various prebuilt binaries with missing system dependencies + unresolved_libraries = ["*"] diff --git a/var/spack/repos/builtin/packages/visit/package.py b/var/spack/repos/builtin/packages/visit/package.py index a3c2ea531a8..87e65ecdd65 100644 --- a/var/spack/repos/builtin/packages/visit/package.py +++ b/var/spack/repos/builtin/packages/visit/package.py @@ -411,3 +411,6 @@ def determine_version(cls, exe): output = Executable(exe)("-version", output=str, error=str) match = re.search(r"\s*(\d[\d\.]+)\.", output) return match.group(1) if match else None + + # see https://github.com/visit-dav/visit/issues/20055 + unresolved_libraries = ["*"]