Improve setup build / run / test environment (#35737)

This adds a `SetupContext` class which is responsible for setting
package.py module globals, and computing the changes to environment
variables for the build, test or run context.

The class uses `effective_deptypes` which takes a list of specs (e.g. single
item of a spec to build, or a list of environment roots) and a context
(build, run, test), and outputs a flat list of specs that affect the
environment together with a flag in what way they do so. This list is
topologically ordered from root to leaf, so that one can be assured that
dependents override variables set by dependencies, not the other way
around.

This is used to replace the logic in `modifications_from_dependencies`,
which has several issues: missing calls to `setup_run_environment`, and
the order in which operations are applied.

Further, it should improve performance a bit in certain cases, since
`effective_deptypes` run in O(v + e) time, whereas `spack env activate`
currently can take up to O(v^2 + e) time due to loops over roots. Each
edge in the DAG is visited once by calling `effective_deptypes` with
`env.concrete_roots()`.

By marking and propagating flags through the DAG, this commit also fixes
a bug where Spack wouldn't call `setup_run_environment` for runtime
dependencies of link dependencies. And this PR ensures that Spack
correctly sets up the runtime environment of direct build dependencies.

Regarding test dependencies: in a build context they are are build-time
test deps, whereas in a test context they are install-time test deps.
Since there are no means to distinguish the build/install type test deps,
they're both.

Further changes:

- all `package.py` module globals are guaranteed to be set before any of the
  `setup_(dependent)_(run|build)_env` functions is called
- traversal order during setup: first the group of externals, then the group
  of non-externals, with specs in each group traversed topological (dependencies
  are setup before dependents)
- modules: only ever call `setup_dependent_run_environment` of *direct* link/run
   type deps
- the marker in `set_module_variables_for_package` is dropped, since we should
  call the method once per spec. This allows us to set only a cheap subset of
  globals on the module: for example it's not necessary to compute the expensive
  `cmake_args` and w/e if the spec under consideration is not the root node to be
  built.
- `spack load`'s `--only` is deprecated (it has no effect now), and `spack load x`
  now means: do everything that's required for `x` to work at runtime, which
  requires runtime deps to be setup -- just like `spack env activate`.
- `spack load` no longer loads build deps (of build deps) ...
- `spack env activate` on partially installed or broken environments: this is all
  or nothing now. If some spec errors during setup of its runtime env, you'll only
  get the unconditional variables + a warning that says the runtime changes for
  specs couldn't be applied.
- Remove traversal in upward direction from `setup_dependent_*` in packages.
  Upward traversal may iterate to specs that aren't children of the roots
  (e.g. zlib / python have hundreds of dependents, only a small fraction is
  reachable from the roots. Packages should only modify the direct dependent
  they receive as an argument)
This commit is contained in:
Harmen Stoppels 2023-10-19 20:44:05 +02:00 committed by GitHub
parent 79896ee85c
commit 72b36ac144
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 544 additions and 395 deletions

View File

@ -446,16 +446,11 @@ def ensure_executables_in_path_or_raise(
current_bootstrapper.last_search["spec"],
current_bootstrapper.last_search["command"],
)
env_mods = spack.util.environment.EnvironmentModifications()
for dep in concrete_spec.traverse(
root=True, order="post", deptype=("link", "run")
):
env_mods.extend(
spack.user_environment.environment_modifications_for_spec(
dep, set_package_py_globals=False
)
cmd.add_default_envmod(
spack.user_environment.environment_modifications_for_specs(
concrete_spec, set_package_py_globals=False
)
cmd.add_default_envmod(env_mods)
)
return cmd
assert exception_handler, (

View File

@ -40,12 +40,15 @@
import sys
import traceback
import types
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import List, Tuple
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import join_path
from llnl.util.lang import dedupe
from llnl.util.lang import dedupe, stable_partition
from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd
@ -55,17 +58,21 @@
import spack.build_systems.python
import spack.builder
import spack.config
import spack.deptypes as dt
import spack.main
import spack.package_base
import spack.paths
import spack.platforms
import spack.repo
import spack.schema.environment
import spack.spec
import spack.store
import spack.subprocess_context
import spack.user_environment
import spack.util.path
import spack.util.pattern
from spack import traverse
from spack.context import Context
from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.installer import InstallError
@ -76,7 +83,6 @@
env_flag,
filter_system_paths,
get_path,
inspect_path,
is_system_path,
validate,
)
@ -109,7 +115,6 @@
SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
# Platform-specific library suffix.
if sys.platform == "darwin":
dso_suffix = "dylib"
@ -406,19 +411,13 @@ def set_compiler_environment_variables(pkg, env):
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper
(which have the prefix `SPACK_`) and also add the compiler wrappers
to PATH.
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
This determines the injected -L/-I/-rpath options; each
of these specifies a search order and this function computes these
options in a manner that is intended to match the DAG traversal order
in `modifications_from_dependencies`: that method uses a post-order
traversal so that `PrependPath` actions from dependencies take lower
precedence; we use a post-order traversal here to match the visitation
order of `modifications_from_dependencies` (so we are visiting the
lowest priority packages first).
"""
This determines the injected -L/-I/-rpath options; each of these specifies a search order and
this function computes these options in a manner that is intended to match the DAG traversal
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
is using topo order."""
# Set environment variables if specified for
# the given compiler
compiler = pkg.compiler
@ -537,45 +536,42 @@ def update_compiler_args_for_dep(dep):
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def set_module_variables_for_package(pkg):
def set_package_py_globals(pkg, context: Context = Context.BUILD):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.
"""
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
marker = "_set_run_already_called"
if getattr(pkg.module, marker, False):
return
module = ModuleChangePropagator(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m = module
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs)
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# analog to configure for win32
m.cscript = Executable("cscript")
if context == Context.BUILD:
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m.make_jobs = jobs
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable("./configure")
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs)
m.gmake = MakeExecutable("gmake", jobs)
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# analog to configure for win32
m.cscript = Executable("cscript")
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable("./configure")
# Put spack compiler paths in module scope.
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
@ -599,9 +595,6 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
m.static_to_shared_library = static_to_shared_library
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
setattr(m, marker, True)
module.propagate_changes_to_mro()
@ -727,12 +720,15 @@ def load_external_modules(pkg):
load_module(external_module)
def setup_package(pkg, dirty, context="build"):
def setup_package(pkg, dirty, context: Context = Context.BUILD):
"""Execute all environment setup routines."""
if context not in ["build", "test"]:
raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
if context not in (Context.BUILD, Context.TEST):
raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}")
set_module_variables_for_package(pkg)
# First populate the package.py's module with the relevant globals that could be used in any
# of the setup_* functions.
setup_context = SetupContext(pkg.spec, context=context)
setup_context.set_all_package_py_globals()
# Keep track of env changes from packages separately, since we want to
# issue warnings when packages make "suspicious" modifications.
@ -740,13 +736,15 @@ def setup_package(pkg, dirty, context="build"):
env_mods = EnvironmentModifications()
# setup compilers for build contexts
need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
need_compiler = context == Context.BUILD or (
context == Context.TEST and pkg.test_requires_compiler
)
if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies")
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
env_mods.extend(setup_context.get_env_modifications())
tty.debug("setup_package: collected all modifications from dependencies")
# architecture specific setup
@ -754,7 +752,7 @@ def setup_package(pkg, dirty, context="build"):
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
if context == "build":
if context == Context.BUILD:
tty.debug("setup_package: setup build environment for root")
builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
@ -765,16 +763,7 @@ def setup_package(pkg, dirty, context="build"):
"config to assume that the package is part of the system"
" includes and omit it when invoked with '--cflags'."
)
elif context == "test":
tty.debug("setup_package: setup test environment for root")
env_mods.extend(
inspect_path(
pkg.spec.prefix,
spack.user_environment.prefix_inspections(pkg.spec.platform),
exclude=is_system_path,
)
)
pkg.setup_run_environment(env_mods)
elif context == Context.TEST:
env_mods.prepend_path("PATH", ".")
# First apply the clean environment changes
@ -813,158 +802,245 @@ def setup_package(pkg, dirty, context="build"):
return env_base
def _make_runnable(pkg, env):
# Helper method which prepends a Package's bin/ prefix to the PATH
# environment variable
prefix = pkg.prefix
class EnvironmentVisitor:
def __init__(self, *roots: spack.spec.Spec, context: Context):
# For the roots (well, marked specs) we follow different edges
# than for their deps, depending on the context.
self.root_hashes = set(s.dag_hash() for s in roots)
for dirname in ["bin", "bin64"]:
bin_dir = os.path.join(prefix, dirname)
if os.path.isdir(bin_dir):
env.prepend_path("PATH", bin_dir)
if context == Context.BUILD:
# Drop direct run deps in build context
# We don't really distinguish between install and build time test deps,
# so we include them here as build-time test deps.
self.root_depflag = dt.BUILD | dt.TEST | dt.LINK
elif context == Context.TEST:
# This is more of an extended run environment
self.root_depflag = dt.TEST | dt.RUN | dt.LINK
elif context == Context.RUN:
self.root_depflag = dt.RUN | dt.LINK
def neighbors(self, item):
spec = item.edge.spec
if spec.dag_hash() in self.root_hashes:
depflag = self.root_depflag
else:
depflag = dt.LINK | dt.RUN
return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag))
def modifications_from_dependencies(
spec, context, custom_mods_only=True, set_package_py_globals=True
):
"""Returns the environment modifications that are required by
the dependencies of a spec and also applies modifications
to this spec's package at module scope, if need be.
class UseMode(Flag):
#: Entrypoint spec (a spec to be built; an env root, etc)
ROOT = auto()
Environment modifications include:
#: A spec used at runtime, but no executables in PATH
RUNTIME = auto()
- Updating PATH so that executables can be found
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies
- Running custom package environment modifications
#: A spec used at runtime, with executables in PATH
RUNTIME_EXECUTABLE = auto()
Custom package modifications can conflict with the default PATH changes
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
environment variables), so this applies changes in a fixed order:
#: A spec that's a direct build or test dep
BUILDTIME_DIRECT = auto()
- All modifications (custom and default) from external deps first
- All modifications from non-external deps afterwards
#: A spec that should be visible in search paths in a build env.
BUILDTIME = auto()
With that order, `PrependPath` actions from non-external default
environment modifications will take precedence over custom modifications
from external packages.
#: Flag is set when the (node, mode) is finalized
ADDED = auto()
A secondary constraint is that custom and default modifications are
grouped on a per-package basis: combined with the post-order traversal this
means that default modifications of dependents can override custom
modifications of dependencies (again, this would only occur for PATH,
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
Args:
spec (spack.spec.Spec): spec for which we want the modifications
context (str): either 'build' for build-time modifications or 'run'
for run-time modifications
custom_mods_only (bool): if True returns only custom modifications, if False
returns custom and default modifications
set_package_py_globals (bool): whether or not to set the global variables in the
package.py files (this may be problematic when using buildcaches that have
been built on a different but compatible OS)
"""
if context not in ["build", "run", "test"]:
raise ValueError(
"Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
def effective_deptypes(
*specs: spack.spec.Spec, context: Context = Context.BUILD
) -> List[Tuple[spack.spec.Spec, UseMode]]:
"""Given a list of input specs and a context, return a list of tuples of
all specs that contribute to (environment) modifications, together with
a flag specifying in what way they do so. The list is ordered topologically
from root to leaf, meaning that environment modifications should be applied
in reverse so that dependents override dependencies, not the other way around."""
visitor = traverse.TopoVisitor(
EnvironmentVisitor(*specs, context=context),
key=lambda x: x.dag_hash(),
root=True,
all_edges=True,
)
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
use_modes = defaultdict(lambda: UseMode(0))
nodes_with_type = []
for edge in visitor.edges:
parent, child, depflag = edge.parent, edge.spec, edge.depflag
# Mark the starting point
if parent is None:
use_modes[child] = UseMode.ROOT
continue
parent_mode = use_modes[parent]
# Nothing to propagate.
if not parent_mode:
continue
# Dependending on the context, include particular deps from the root.
if UseMode.ROOT & parent_mode:
if context == Context.BUILD:
if (dt.BUILD | dt.TEST) & depflag:
use_modes[child] |= UseMode.BUILDTIME_DIRECT
if dt.LINK & depflag:
use_modes[child] |= UseMode.BUILDTIME
elif context == Context.TEST:
if (dt.RUN | dt.TEST) & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
elif dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
elif context == Context.RUN:
if dt.RUN & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
elif dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
# Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps.
if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode:
if dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
if dt.RUN & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
# Propagate BUILDTIME through link deps.
if UseMode.BUILDTIME & parent_mode:
if dt.LINK & depflag:
use_modes[child] |= UseMode.BUILDTIME
# Finalize the spec; the invariant is that all in-edges are processed
# before out-edges, meaning that parent is done.
if not (UseMode.ADDED & parent_mode):
use_modes[parent] |= UseMode.ADDED
nodes_with_type.append((parent, parent_mode))
# Attach the leaf nodes, since we only added nodes with out-edges.
for spec, parent_mode in use_modes.items():
if parent_mode and not (UseMode.ADDED & parent_mode):
nodes_with_type.append((spec, parent_mode))
return nodes_with_type
class SetupContext:
"""This class encapsulates the logic to determine environment modifications, and is used as
well to set globals in modules of package.py."""
def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
"""Construct a ModificationsFromDag object.
Args:
specs: single root spec for build/test context, possibly more for run context
context: build, run, or test"""
if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1:
raise ValueError("Cannot setup build environment for multiple specs")
specs_with_type = effective_deptypes(*specs, context=context)
self.specs = specs
self.context = context
self.external: List[Tuple[spack.spec.Spec, UseMode]]
self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]]
# Reverse so we go from leaf to root
self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type)
# Split into non-external and external, maintaining topo order per group.
self.external, self.nonexternal = stable_partition(
reversed(specs_with_type), lambda t: t[0].external
)
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
self.should_setup_run_env = UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
env = EnvironmentModifications()
if context == Context.RUN or context == Context.TEST:
self.should_be_runnable |= UseMode.ROOT
self.should_setup_run_env |= UseMode.ROOT
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
# function; these sets form the building blocks of those collections.
build_deps = set(spec.dependencies(deptype=("build", "test")))
link_deps = set(spec.traverse(root=False, deptype="link"))
build_link_deps = build_deps | link_deps
build_and_supporting_deps = set()
for build_dep in build_deps:
build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
test_and_supporting_deps = set()
for test_dep in set(spec.dependencies(deptype="test")):
test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
# Everything that calls setup_run_environment and setup_dependent_* needs globals set.
self.should_set_package_py_globals = (
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
)
# In a build context, the root and direct build deps need build-specific globals set.
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
# All dependencies that might have environment modifications to apply
custom_mod_deps = set()
if context == "build":
custom_mod_deps.update(build_and_supporting_deps)
# Tests may be performed after build
custom_mod_deps.update(test_and_supporting_deps)
else:
# test/run context
custom_mod_deps.update(run_and_supporting_deps)
if context == "test":
custom_mod_deps.update(test_and_supporting_deps)
custom_mod_deps.update(link_deps)
def set_all_package_py_globals(self):
"""Set the globals in modules of package.py files."""
for dspec, flag in chain(self.external, self.nonexternal):
pkg = dspec.package
# Determine 'exe_deps': the set of packages with binaries we want to use
if context == "build":
exe_deps = build_and_supporting_deps | test_and_supporting_deps
elif context == "run":
exe_deps = set(spec.traverse(deptype="run"))
elif context == "test":
exe_deps = test_and_supporting_deps
if self.should_set_package_py_globals & flag:
if self.context == Context.BUILD and self.needs_build_context & flag:
set_package_py_globals(pkg, context=Context.BUILD)
else:
# This includes runtime dependencies, also runtime deps of direct build deps.
set_package_py_globals(pkg, context=Context.RUN)
def default_modifications_for_dep(dep):
if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
prefix = dep.prefix
for spec in dspec.dependents():
# Note: some specs have dependents that are unreachable from the root, so avoid
# setting globals for those.
if id(spec) not in self.nodes_in_subdag:
continue
dependent_module = ModuleChangePropagator(spec.package)
pkg.setup_dependent_package(dependent_module, spec)
dependent_module.propagate_changes_to_mro()
env.prepend_path("CMAKE_PREFIX_PATH", prefix)
def get_env_modifications(self) -> EnvironmentModifications:
"""Returns the environment variable modifications for the given input specs and context.
Environment modifications include:
- Updating PATH for packages that are required at runtime
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies (when context=build)
- Running custom package environment modifications (setup_run_environment,
setup_dependent_build_environment, setup_dependent_run_environment)
for directory in ("lib", "lib64", "share"):
pcdir = os.path.join(prefix, directory, "pkgconfig")
if os.path.isdir(pcdir):
env.prepend_path("PKG_CONFIG_PATH", pcdir)
The (partial) order imposed on the specs is externals first, then topological
from leaf to root. That way externals cannot contribute search paths that would shadow
Spack's prefixes, and dependents override variables set by dependencies."""
env = EnvironmentModifications()
for dspec, flag in chain(self.external, self.nonexternal):
tty.debug(f"Adding env modifications for {dspec.name}")
pkg = dspec.package
if dep in exe_deps and not is_system_path(dep.prefix):
_make_runnable(dep, env)
if self.should_setup_dependent_build_env & flag:
self._make_buildtime_detectable(dspec, env)
def add_modifications_for_dep(dep):
tty.debug("Adding env modifications for {0}".format(dep.name))
# Some callers of this function only want the custom modifications.
# For callers that want both custom and default modifications, we want
# to perform the default modifications here (this groups custom
# and default modifications together on a per-package basis).
if not custom_mods_only:
default_modifications_for_dep(dep)
for spec in self.specs:
builder = spack.builder.create(pkg)
builder.setup_dependent_build_environment(env, spec)
# Perform custom modifications here (PrependPath actions performed in
# the custom method override the default environment modifications
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
# PKG_CONFIG_PATH)
if dep in custom_mod_deps:
dpkg = dep.package
if set_package_py_globals:
set_module_variables_for_package(dpkg)
if self.should_be_runnable & flag:
self._make_runnable(dspec, env)
current_module = ModuleChangePropagator(spec.package)
dpkg.setup_dependent_package(current_module, spec)
current_module.propagate_changes_to_mro()
if self.should_setup_run_env & flag:
# TODO: remove setup_dependent_run_environment...
for spec in dspec.dependents(deptype=dt.RUN):
if id(spec) in self.nodes_in_subdag:
pkg.setup_dependent_run_environment(env, spec)
pkg.setup_run_environment(env)
return env
if context == "build":
builder = spack.builder.create(dpkg)
builder.setup_dependent_build_environment(env, spec)
else:
dpkg.setup_dependent_run_environment(env, spec)
tty.debug("Added env modifications for {0}".format(dep.name))
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
if is_system_path(dep.prefix):
return
# Note that we want to perform environment modifications in a fixed order.
# The Spec.traverse method provides this: i.e. in addition to
# the post-order semantics, it also guarantees a fixed traversal order
# among dependencies which are not constrained by post-order semantics.
for dspec in spec.traverse(root=False, order="post"):
if dspec.external:
add_modifications_for_dep(dspec)
env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix)
for d in ("lib", "lib64", "share"):
pcdir = os.path.join(dep.prefix, d, "pkgconfig")
if os.path.isdir(pcdir):
env.prepend_path("PKG_CONFIG_PATH", pcdir)
for dspec in spec.traverse(root=False, order="post"):
# Default env modifications for non-external packages can override
# custom modifications of external packages (this can only occur
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
if not dspec.external:
add_modifications_for_dep(dspec)
def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
if is_system_path(dep.prefix):
return
return env
for d in ("bin", "bin64"):
bin_dir = os.path.join(dep.prefix, d)
if os.path.isdir(bin_dir):
env.prepend_path("PATH", bin_dir)
def get_cmake_prefix_path(pkg):
@ -996,7 +1072,7 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run(
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
):
context = kwargs.get("context", "build")
context: str = kwargs.get("context", "build")
try:
# We are in the child process. Python sets sys.stdin to
@ -1012,7 +1088,7 @@ def _setup_pkg_and_run(
if not kwargs.get("fake", False):
kwargs["unmodified_env"] = os.environ.copy()
kwargs["env_modifications"] = setup_package(
pkg, dirty=kwargs.get("dirty", False), context=context
pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context)
)
return_value = function(pkg, kwargs)
write_pipe.send(return_value)

View File

@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
from spack.context import Context
description = (
"run a command in a spec's install environment, or dump its environment to screen or file"
@ -14,4 +15,4 @@
def build_env(parser, args):
env_utility.emulate_env_utility("build-env", "build", args)
env_utility.emulate_env_utility("build-env", Context.BUILD, args)

View File

@ -7,7 +7,6 @@
import llnl.util.tty as tty
import spack.build_environment as build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
@ -15,7 +14,8 @@
import spack.paths
import spack.spec
import spack.store
from spack import traverse
from spack import build_environment, traverse
from spack.context import Context
from spack.util.environment import dump_environment, pickle_environment
@ -42,14 +42,14 @@ def setup_parser(subparser):
class AreDepsInstalledVisitor:
def __init__(self, context="build"):
if context not in ("build", "test"):
raise ValueError("context can only be build or test")
if context == "build":
def __init__(self, context: Context = Context.BUILD):
if context == Context.BUILD:
# TODO: run deps shouldn't be required for build env.
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
else:
elif context == Context.TEST:
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
else:
raise ValueError("context can only be Context.BUILD or Context.TEST")
self.has_uninstalled_deps = False
@ -76,7 +76,7 @@ def neighbors(self, item):
return item.edge.spec.edges_to_dependencies(depflag=depflag)
def emulate_env_utility(cmd_name, context, args):
def emulate_env_utility(cmd_name, context: Context, args):
if not args.spec:
tty.die("spack %s requires a spec." % cmd_name)
@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context, args):
hashes=True,
# This shows more than necessary, but we cannot dynamically change deptypes
# in Spec.tree(...).
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"),
),
)

View File

@ -5,6 +5,8 @@
import sys
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.cmd.find
@ -108,16 +110,14 @@ def load(parser, args):
)
return 1
with spack.store.STORE.db.read_transaction():
if "dependencies" in args.things_to_load:
include_roots = "package" in args.things_to_load
specs = [
dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
]
if args.things_to_load != "package,dependencies":
tty.warn(
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
)
env_mod = spack.util.environment.EnvironmentModifications()
with spack.store.STORE.db.read_transaction():
env_mod = uenv.environment_modifications_for_specs(*specs)
for spec in specs:
env_mod.extend(uenv.environment_modifications_for_spec(spec))
env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
cmds = env_mod.shell_modifications(args.shell)

View File

@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
from spack.context import Context
description = (
"run a command in a spec's test environment, or dump its environment to screen or file"
@ -14,4 +15,4 @@
def test_env(parser, args):
env_utility.emulate_env_utility("test-env", "test", args)
env_utility.emulate_env_utility("test-env", Context.TEST, args)

View File

@ -88,9 +88,8 @@ def unload(parser, args):
)
return 1
env_mod = spack.util.environment.EnvironmentModifications()
env_mod = uenv.environment_modifications_for_specs(*specs).reversed()
for spec in specs:
env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
cmds = env_mod.shell_modifications(args.shell)

View File

@ -0,0 +1,29 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module provides classes used in user and build environment"""
from enum import Enum
class Context(Enum):
"""Enum used to indicate the context in which an environment has to be setup: build,
run or test."""
BUILD = 1
RUN = 2
TEST = 3
def __str__(self):
return ("build", "run", "test")[self.value - 1]
@classmethod
def from_string(cls, s: str):
if s == "build":
return Context.BUILD
elif s == "run":
return Context.RUN
elif s == "test":
return Context.TEST
raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}")

View File

@ -1690,41 +1690,18 @@ def check_views(self):
"Loading the environment view will require reconcretization." % self.name
)
def _env_modifications_for_view(self, view: ViewDescriptor, reverse: bool = False):
all_mods = spack.util.environment.EnvironmentModifications()
visited = set()
errors = []
for root_spec in self.concrete_roots():
if root_spec in view and root_spec.installed and root_spec.package:
for spec in root_spec.traverse(deptype="run", root=True):
if spec.name in visited:
# It is expected that only one instance of the package
# can be added to the environment - do not attempt to
# add multiple.
tty.debug(
"Not adding {0} to shell modifications: "
"this package has already been added".format(
spec.format("{name}/{hash:7}")
)
)
continue
else:
visited.add(spec.name)
try:
mods = uenv.environment_modifications_for_spec(spec, view)
except Exception as e:
msg = "couldn't get environment settings for %s" % spec.format(
"{name}@{version} /{hash:7}"
)
errors.append((msg, str(e)))
continue
all_mods.extend(mods.reversed() if reverse else mods)
return all_mods, errors
def _env_modifications_for_view(
self, view: ViewDescriptor, reverse: bool = False
) -> spack.util.environment.EnvironmentModifications:
try:
mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
except Exception as e:
# Failing to setup spec-specific changes shouldn't be a hard error.
tty.warn(
"couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
)
return spack.util.environment.EnvironmentModifications()
return mods.reversed() if reverse else mods
def add_view_to_env(
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
@ -1740,12 +1717,7 @@ def add_view_to_env(
return env_mod
env_mod.extend(uenv.unconditional_environment_modifications(descriptor))
mods, errors = self._env_modifications_for_view(descriptor)
env_mod.extend(mods)
if errors:
for err in errors:
tty.warn(*err)
env_mod.extend(self._env_modifications_for_view(descriptor))
# deduplicate paths from specs mapped to the same location
for env_var in env_mod.group_by_name():
@ -1767,9 +1739,7 @@ def rm_view_from_env(
return env_mod
env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed())
mods, _ = self._env_modifications_for_view(descriptor, reverse=True)
env_mod.extend(mods)
env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True))
return env_mod

View File

@ -56,6 +56,7 @@
import spack.util.file_permissions as fp
import spack.util.path
import spack.util.spack_yaml as syaml
from spack.context import Context
#: config section for this file
@ -717,10 +718,16 @@ def environment_modifications(self):
)
# Let the extendee/dependency modify their extensions/dependencies
# before asking for package-specific modifications
env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run"))
# Package specific modifications
spack.build_environment.set_module_variables_for_package(spec.package)
# The only thing we care about is `setup_dependent_run_environment`, but
# for that to work, globals have to be set on the package modules, and the
# whole chain of setup_dependent_package has to be followed from leaf to spec.
# So: just run it here, but don't collect env mods.
spack.build_environment.SetupContext(context=Context.RUN).set_all_package_py_globals()
# Then run setup_dependent_run_environment before setup_run_environment.
for dep in spec.dependencies(deptype=("link", "run")):
dep.package.setup_dependent_run_environment(env, spec)
spec.package.setup_run_environment(env)
# Modifications required from modules.yaml

View File

@ -17,7 +17,8 @@
import spack.package_base
import spack.spec
import spack.util.spack_yaml as syaml
from spack.build_environment import _static_to_shared_library, dso_suffix
from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix
from spack.context import Context
from spack.paths import build_env_path
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import EnvironmentModifications
@ -438,10 +439,10 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
# b (parallel =True)
s = default_mock_concretization("a foobar=bar")
spack.build_environment.set_module_variables_for_package(s.package)
spack.build_environment.set_package_py_globals(s.package)
assert s["a"].package.module.make_jobs == 1
spack.build_environment.set_module_variables_for_package(s["b"].package)
spack.build_environment.set_package_py_globals(s["b"].package)
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
parallel=s["b"].package.parallel
)
@ -575,3 +576,69 @@ def test_setting_attributes(self, default_mock_concretization):
if current_module == spack.package_base:
break
assert current_module.SOME_ATTRIBUTE == 1
def test_effective_deptype_build_environment(default_mock_concretization):
s = default_mock_concretization("dttop")
# [ ] dttop@1.0 #
# [b ] ^dtbuild1@1.0 # <- direct build dep
# [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped
# [bl ] ^dtlink2@1.0 # <- linkable, and runtime dep of build dep
# [ r ] ^dtrun2@1.0 # <- non-linkable, exectuable runtime dep of build dep
# [bl ] ^dtlink1@1.0 # <- direct build dep
# [bl ] ^dtlink3@1.0 # <- linkable, and runtime dep of build dep
# [b ] ^dtbuild2@1.0 # <- indirect build-only dep is dropped
# [bl ] ^dtlink4@1.0 # <- linkable, and runtime dep of build dep
# [ r ] ^dtrun1@1.0 # <- run-only dep is pruned (should it be in PATH?)
# [bl ] ^dtlink5@1.0 # <- children too
# [ r ] ^dtrun3@1.0 # <- children too
# [b ] ^dtbuild3@1.0 # <- children too
expected_flags = {
"dttop": UseMode.ROOT,
"dtbuild1": UseMode.BUILDTIME_DIRECT,
"dtlink1": UseMode.BUILDTIME_DIRECT | UseMode.BUILDTIME,
"dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME,
"dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME,
"dtrun2": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
"dtlink2": UseMode.RUNTIME,
}
for spec, effective_type in spack.build_environment.effective_deptypes(
s, context=Context.BUILD
):
assert effective_type & expected_flags.pop(spec.name) == effective_type
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"
def test_effective_deptype_run_environment(default_mock_concretization):
s = default_mock_concretization("dttop")
# [ ] dttop@1.0 #
# [b ] ^dtbuild1@1.0 # <- direct build-only dep is pruned
# [b ] ^dtbuild2@1.0 # <- children too
# [bl ] ^dtlink2@1.0 # <- children too
# [ r ] ^dtrun2@1.0 # <- children too
# [bl ] ^dtlink1@1.0 # <- runtime, not executable
# [bl ] ^dtlink3@1.0 # <- runtime, not executable
# [b ] ^dtbuild2@1.0 # <- indirect build only dep is pruned
# [bl ] ^dtlink4@1.0 # <- runtime, not executable
# [ r ] ^dtrun1@1.0 # <- runtime and executable
# [bl ] ^dtlink5@1.0 # <- runtime, not executable
# [ r ] ^dtrun3@1.0 # <- runtime and executable
# [b ] ^dtbuild3@1.0 # <- indirect build-only dep is pruned
expected_flags = {
"dttop": UseMode.ROOT,
"dtlink1": UseMode.RUNTIME,
"dtlink3": UseMode.BUILDTIME | UseMode.RUNTIME,
"dtlink4": UseMode.BUILDTIME | UseMode.RUNTIME,
"dtrun1": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
"dtlink5": UseMode.RUNTIME,
"dtrun3": UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE,
}
for spec, effective_type in spack.build_environment.effective_deptypes(s, context=Context.RUN):
assert effective_type & expected_flags.pop(spec.name) == effective_type
assert not expected_flags, f"Missing {expected_flags.keys()} from effective_deptypes"

View File

@ -168,7 +168,7 @@ def test_env_remove(capfd):
foo = ev.read("foo")
with foo:
with pytest.raises(spack.main.SpackCommandError):
with pytest.raises(SpackCommandError):
with capfd.disabled():
env("remove", "-y", "foo")
assert "foo" in env("list")
@ -283,7 +283,7 @@ def setup_error(pkg, env):
_, err = capfd.readouterr()
assert "cmake-client had issues!" in err
assert "Warning: couldn't get environment settings" in err
assert "Warning: couldn't load runtime environment" in err
def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch):
@ -500,11 +500,14 @@ def test_env_activate_broken_view(
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and fails gracefully
with spack.repo.use_repositories(mock_custom_repository):
with pytest.raises(SpackCommandError):
env("activate", "--sh", "test")
wrong_repo = env("activate", "--sh", "test")
assert "Warning: couldn't load runtime environment" in wrong_repo
assert "Unknown namespace: builtin.mock" in wrong_repo
# test replacing repo fixes it
env("activate", "--sh", "test")
normal_repo = env("activate", "--sh", "test")
assert "Warning: couldn't load runtime environment" not in normal_repo
assert "Unknown namespace: builtin.mock" not in normal_repo
def test_to_lockfile_dict():
@ -1044,7 +1047,7 @@ def test_env_commands_die_with_no_env_arg():
env("remove")
# these have an optional env arg and raise errors via tty.die
with pytest.raises(spack.main.SpackCommandError):
with pytest.raises(SpackCommandError):
env("loads")
# This should NOT raise an error with no environment

View File

@ -9,6 +9,7 @@
import spack.spec
import spack.user_environment as uenv
import spack.util.environment
from spack.main import SpackCommand
load = SpackCommand("load")
@ -27,74 +28,63 @@ def test_manpath_trailing_colon(
manpath search path via a trailing colon"""
install("mpileaks")
sh_out = load("--sh", "--only", "package", "mpileaks")
sh_out = load("--sh", "mpileaks")
lines = sh_out.split("\n")
assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines)
os.environ["MANPATH"] = "/tmp/man:"
sh_out = load("--sh", "--only", "package", "mpileaks")
sh_out = load("--sh", "mpileaks")
lines = sh_out.split("\n")
assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines)
def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test that the commands generated by load add the specified prefix
inspections. Also test that Spack records loaded specs by hash in the
user environment.
CMAKE_PREFIX_PATH is the only prefix inspection guaranteed for fake
packages, since it keys on the prefix instead of a subdir."""
install_out = install("mpileaks", output=str, fail_on_error=False)
print("spack install mpileaks")
print(install_out)
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
sh_out = load("--sh", "--only", "package", "mpileaks")
csh_out = load("--csh", "--only", "package", "mpileaks")
# Test prefix inspections
sh_out_test = "export CMAKE_PREFIX_PATH=%s" % mpileaks_spec.prefix
csh_out_test = "setenv CMAKE_PREFIX_PATH %s" % mpileaks_spec.prefix
assert sh_out_test in sh_out
assert csh_out_test in csh_out
# Test hashes recorded properly
hash_test_replacements = (uenv.spack_loaded_hashes_var, mpileaks_spec.dag_hash())
sh_hash_test = "export %s=%s" % hash_test_replacements
csh_hash_test = "setenv %s %s" % hash_test_replacements
assert sh_hash_test in sh_out
assert csh_hash_test in csh_out
def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test that the '-r' option to the load command prepends dependency prefix
inspections in post-order"""
def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages, working_env):
"""Test that `spack load` applies prefix inspections of its required runtime deps in
topo-order"""
install("mpileaks")
mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
# Ensure our reference variable is cleed.
os.environ["CMAKE_PREFIX_PATH"] = "/hello:/world"
sh_out = load("--sh", "mpileaks")
csh_out = load("--csh", "mpileaks")
# Test prefix inspections
prefix_test_replacement = ":".join(
reversed([s.prefix for s in mpileaks_spec.traverse(order="post")])
def extract_cmake_prefix_path(output, prefix):
return next(cmd for cmd in output.split(";") if cmd.startswith(prefix))[
len(prefix) :
].split(":")
# Map a prefix found in CMAKE_PREFIX_PATH back to a package name in mpileaks' DAG.
prefix_to_pkg = lambda prefix: next(
s.name for s in mpileaks_spec.traverse() if s.prefix == prefix
)
sh_prefix_test = "export CMAKE_PREFIX_PATH=%s" % prefix_test_replacement
csh_prefix_test = "setenv CMAKE_PREFIX_PATH %s" % prefix_test_replacement
assert sh_prefix_test in sh_out
assert csh_prefix_test in csh_out
paths_sh = extract_cmake_prefix_path(sh_out, prefix="export CMAKE_PREFIX_PATH=")
paths_csh = extract_cmake_prefix_path(csh_out, prefix="setenv CMAKE_PREFIX_PATH ")
# Test spack records loaded hashes properly
hash_test_replacement = (
uenv.spack_loaded_hashes_var,
":".join(reversed([s.dag_hash() for s in mpileaks_spec.traverse(order="post")])),
# Shouldn't be a difference between loading csh / sh, so check they're the same.
assert paths_sh == paths_csh
# We should've prepended new paths, and keep old ones.
assert paths_sh[-2:] == ["/hello", "/world"]
# All but the last two paths are added by spack load; lookup what packages they're from.
pkgs = [prefix_to_pkg(p) for p in paths_sh[:-2]]
# Do we have all the runtime packages?
assert set(pkgs) == set(
s.name for s in mpileaks_spec.traverse(deptype=("link", "run"), root=True)
)
sh_hash_test = "export %s=%s" % hash_test_replacement
csh_hash_test = "setenv %s %s" % hash_test_replacement
assert sh_hash_test in sh_out
assert csh_hash_test in csh_out
# Finally, do we list them in topo order?
for i, pkg in enumerate(pkgs):
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
# Lastly, do we keep track that mpileaks was loaded?
assert f"export {uenv.spack_loaded_hashes_var}={mpileaks_spec.dag_hash()}" in sh_out
assert f"setenv {uenv.spack_loaded_hashes_var} {mpileaks_spec.dag_hash()}" in csh_out
def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages):

View File

@ -4,11 +4,18 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import sys
from contextlib import contextmanager
from typing import Callable
from llnl.util.lang import nullcontext
import spack.build_environment
import spack.config
import spack.spec
import spack.util.environment as environment
import spack.util.prefix as prefix
from spack import traverse
from spack.context import Context
#: Environment variable name Spack uses to track individually loaded packages
spack_loaded_hashes_var = "SPACK_LOADED_HASHES"
@ -62,40 +69,58 @@ def unconditional_environment_modifications(view):
return env
def environment_modifications_for_spec(spec, view=None, set_package_py_globals=True):
@contextmanager
def projected_prefix(*specs: spack.spec.Spec, projection: Callable[[spack.spec.Spec], str]):
"""Temporarily replace every Spec's prefix with projection(s)"""
prefixes = dict()
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
if s.external:
continue
prefixes[s.dag_hash()] = s.prefix
s.prefix = prefix.Prefix(projection(s))
yield
for s in traverse.traverse_nodes(specs, key=lambda s: s.dag_hash()):
s.prefix = prefixes.get(s.dag_hash(), s.prefix)
def environment_modifications_for_specs(
*specs: spack.spec.Spec, view=None, set_package_py_globals: bool = True
):
"""List of environment (shell) modifications to be processed for spec.
This list is specific to the location of the spec or its projection in
the view.
Args:
spec (spack.spec.Spec): spec for which to list the environment modifications
specs: spec(s) for which to list the environment modifications
view: view associated with the spec passed as first argument
set_package_py_globals (bool): whether or not to set the global variables in the
set_package_py_globals: whether or not to set the global variables in the
package.py files (this may be problematic when using buildcaches that have
been built on a different but compatible OS)
"""
spec = spec.copy()
if view and not spec.external:
spec.prefix = prefix.Prefix(view.get_projection_for_spec(spec))
env = environment.EnvironmentModifications()
topo_ordered = traverse.traverse_nodes(specs, root=True, deptype=("run", "link"), order="topo")
# generic environment modifications determined by inspecting the spec
# prefix
env = environment.inspect_path(
spec.prefix, prefix_inspections(spec.platform), exclude=environment.is_system_path
)
if view:
maybe_projected = projected_prefix(*specs, projection=view.get_projection_for_spec)
else:
maybe_projected = nullcontext()
# Let the extendee/dependency modify their extensions/dependents
# before asking for package-specific modifications
env.extend(
spack.build_environment.modifications_from_dependencies(
spec, context="run", set_package_py_globals=set_package_py_globals
)
)
with maybe_projected:
# Static environment changes (prefix inspections)
for s in reversed(list(topo_ordered)):
static = environment.inspect_path(
s.prefix, prefix_inspections(s.platform), exclude=environment.is_system_path
)
env.extend(static)
if set_package_py_globals:
spack.build_environment.set_module_variables_for_package(spec.package)
spec.package.setup_run_environment(env)
# Dynamic environment changes (setup_run_environment etc)
setup_context = spack.build_environment.SetupContext(*specs, context=Context.RUN)
if set_package_py_globals:
setup_context.set_all_package_py_globals()
dynamic = setup_context.get_env_modifications()
env.extend(dynamic)
return env

View File

@ -335,15 +335,14 @@ set _b_bin $_b_loc"/bin"
set _a_loc (spack -m location -i shell-a)
set _a_bin $_a_loc"/bin"
spt_contains "set -gx PATH $_b_bin" spack -m load --only package --fish shell-b
spt_contains "set -gx PATH $_b_bin" spack -m load --fish shell-b
spt_succeeds spack -m load shell-b
set LIST_CONTENT (spack -m load shell-b; spack load --list)
spt_contains "shell-b@" echo $LIST_CONTENT
spt_does_not_contain "shell-a@" echo $LIST_CONTENT
# test a variable MacOS clears and one it doesn't for recursive loads
spt_contains "set -gx PATH $_a_bin:$_b_bin" spack -m load --fish shell-a
spt_succeeds spack -m load --only dependencies shell-a
spt_succeeds spack -m load --only package shell-a
spt_succeeds spack -m load shell-a
spt_fails spack -m load d
spt_contains "usage: spack load " spack -m load -h
spt_contains "usage: spack load " spack -m load -h d

View File

@ -104,7 +104,7 @@ contains "usage: spack module " spack -m module --help
contains "usage: spack module " spack -m module
title 'Testing `spack load`'
contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --only package --sh shell-b
contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b
succeeds spack -m load shell-b
LIST_CONTENT=`spack -m load shell-b; spack load --list`
contains "shell-b@" echo $LIST_CONTENT
@ -113,8 +113,7 @@ fails spack -m load -l
# test a variable MacOS clears and one it doesn't for recursive loads
contains "export PATH=$(spack -m location -i shell-a)/bin" spack -m load --sh shell-a
contains "export PATH=$(spack -m location -i shell-b)/bin" spack -m load --sh shell-b
succeeds spack -m load --only dependencies shell-a
succeeds spack -m load --only package shell-a
succeeds spack -m load shell-a
fails spack -m load d
contains "usage: spack load " spack -m load -h
contains "usage: spack load " spack -m load -h d

View File

@ -52,6 +52,7 @@ class Gptune(CMakePackage):
depends_on("py-pyaml", type=("build", "run"))
depends_on("py-statsmodels@0.13.0:", type=("build", "run"))
depends_on("py-mpi4py@3.0.3:", type=("build", "run"))
depends_on("python", type=("build", "run"))
depends_on("pygmo", type=("build", "run"))
depends_on("openturns", type=("build", "run"))
depends_on("py-pymoo", type=("build", "run"), when="@3.0.0:")

View File

@ -752,14 +752,6 @@ def setup_build_environment(self, env):
os.symlink(bin, sym)
env.prepend_path("PATH", self.stage.path)
def setup_run_environment(self, env):
if "+clang" in self.spec:
env.set("CC", join_path(self.spec.prefix.bin, "clang"))
env.set("CXX", join_path(self.spec.prefix.bin, "clang++"))
if "+flang" in self.spec:
env.set("FC", join_path(self.spec.prefix.bin, "flang"))
env.set("F77", join_path(self.spec.prefix.bin, "flang"))
root_cmakelists_dir = "llvm"
def cmake_args(self):

View File

@ -401,14 +401,13 @@ def install_cpanm(self):
maker()
maker("install")
def _setup_dependent_env(self, env, dependent_spec, deptype):
def _setup_dependent_env(self, env, dependent_spec):
"""Set PATH and PERL5LIB to include the extension and
any other perl extensions it depends on,
assuming they were installed with INSTALL_BASE defined."""
perl_lib_dirs = []
for d in dependent_spec.traverse(deptype=deptype):
if d.package.extends(self.spec):
perl_lib_dirs.append(d.prefix.lib.perl5)
if dependent_spec.package.extends(self.spec):
perl_lib_dirs.append(dependent_spec.prefix.lib.perl5)
if perl_lib_dirs:
perl_lib_path = ":".join(perl_lib_dirs)
env.prepend_path("PERL5LIB", perl_lib_path)
@ -416,10 +415,10 @@ def _setup_dependent_env(self, env, dependent_spec, deptype):
env.append_path("PATH", self.prefix.bin)
def setup_dependent_build_environment(self, env, dependent_spec):
self._setup_dependent_env(env, dependent_spec, deptype=("build", "run", "test"))
self._setup_dependent_env(env, dependent_spec)
def setup_dependent_run_environment(self, env, dependent_spec):
self._setup_dependent_env(env, dependent_spec, deptype=("run",))
self._setup_dependent_env(env, dependent_spec)
def setup_dependent_package(self, module, dependent_spec):
"""Called before perl modules' install() methods.

View File

@ -1244,12 +1244,11 @@ def setup_dependent_run_environment(self, env, dependent_spec):
"""Set PYTHONPATH to include the site-packages directory for the
extension and any other python extensions it depends on.
"""
for d in dependent_spec.traverse(deptype=("run"), root=True):
if d.package.extends(self.spec):
# Packages may be installed in platform-specific or platform-independent
# site-packages directories
for directory in {self.platlib, self.purelib}:
env.prepend_path("PYTHONPATH", os.path.join(d.prefix, directory))
if dependent_spec.package.extends(self.spec):
# Packages may be installed in platform-specific or platform-independent
# site-packages directories
for directory in {self.platlib, self.purelib}:
env.prepend_path("PYTHONPATH", os.path.join(dependent_spec.prefix, directory))
def setup_dependent_package(self, module, dependent_spec):
"""Called before python modules' install() methods."""

View File

@ -83,9 +83,8 @@ def url_for_version(self, version):
return url.format(version.up_to(2), version)
def setup_dependent_run_environment(self, env, dependent_spec):
for d in dependent_spec.traverse(deptype=("run"), root=True):
if d.package.extends(self.spec):
env.prepend_path("GEM_PATH", d.prefix)
if dependent_spec.package.extends(self.spec):
env.prepend_path("GEM_PATH", dependent_spec.prefix)
def setup_dependent_package(self, module, dependent_spec):
"""Called before ruby modules' install() methods. Sets GEM_HOME

View File

@ -151,13 +151,12 @@ def setup_dependent_build_environment(self, env, dependent_spec):
# https://core.tcl-lang.org/tk/tktview/447bd3e4abe17452d19a80e6840dcc8a2603fcbc
env.prepend_path("TCLLIBPATH", self.spec["tcl"].libs.directories[0], separator=" ")
for d in dependent_spec.traverse(deptype=("build", "run", "test")):
if d.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ["lib", "lib64"]:
tcllibpath = join_path(d.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ")
if dependent_spec.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ["lib", "lib64"]:
tcllibpath = join_path(dependent_spec.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ")
def setup_dependent_run_environment(self, env, dependent_spec):
"""Set TCLLIBPATH to include the tcl-shipped directory for
@ -167,10 +166,9 @@ def setup_dependent_run_environment(self, env, dependent_spec):
* https://wiki.tcl-lang.org/page/TCLLIBPATH
"""
for d in dependent_spec.traverse(deptype=("build", "run", "test")):
if d.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ["lib", "lib64"]:
tcllibpath = join_path(d.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ")
if dependent_spec.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ["lib", "lib64"]:
tcllibpath = join_path(dependent_spec.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path("TCLLIBPATH", tcllibpath, separator=" ")