Compare commits
5 Commits
hs/fix/sep
...
hs/fix/for
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4969fdf23a | ||
|
|
2db654bf5a | ||
|
|
9992b563db | ||
|
|
daba1a805e | ||
|
|
832bf95aa4 |
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
phases = ("bootstrap", "build", "install")
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
||||
@@ -91,6 +91,9 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -502,7 +505,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -704,7 +707,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -714,14 +717,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -729,7 +732,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -1785,7 +1788,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2943,7 +2946,7 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
@@ -2964,7 +2967,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
@@ -3008,7 +3011,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
|
||||
@@ -301,11 +301,13 @@ def clean_environment():
|
||||
env.unset("CPLUS_INCLUDE_PATH")
|
||||
env.unset("OBJC_INCLUDE_PATH")
|
||||
|
||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||
env.set("CONFIG_SITE", os.devnull)
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -17,19 +19,18 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
to the Aspell extensions.
|
||||
"""
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
"ASPELL={0}".format(aspell),
|
||||
"PREZIP={0}".format(prezip),
|
||||
"DESTDIR={0}".format(destdir),
|
||||
)
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
|
||||
|
||||
# Aspell dictionaries install their bits into their prefix.lib
|
||||
|
||||
@@ -191,6 +191,177 @@ def archive_files(self) -> List[str]:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@property
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
# Handle the case where the configure directory is set to a non-absolute path
|
||||
# Non-absolute paths are always relative to the staging source path
|
||||
build_dir = self.configure_directory
|
||||
if not os.path.isabs(build_dir):
|
||||
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
||||
return build_dir
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
def autoreconf(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which requires a few build dependencies
|
||||
ensure_build_dependencies_or_raise(
|
||||
spec=spec,
|
||||
dependencies=["autoconf", "automake", "libtool"],
|
||||
error_msg="Cannot generate configure",
|
||||
)
|
||||
|
||||
tty.msg("Configure script not found: trying to generate it")
|
||||
tty.warn("*********************************************************")
|
||||
tty.warn("* If the default procedure fails, consider implementing *")
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self.pkg, "configure_flag_args", [])
|
||||
options += ["--prefix={0}".format(prefix)]
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
This function works on all type of variants. For bool-valued variants
|
||||
it will return by default ``--with-{name}`` or ``--without-{name}``.
|
||||
For other kinds of variants it will cycle over the allowed values and
|
||||
return either ``--with-{value}`` or ``--without-{value}``.
|
||||
|
||||
If activation_value is given, then for each possible value of the
|
||||
variant, the option ``--with-{value}=activation_value(value)`` or
|
||||
``--without-{value}`` will be added depending on whether or not
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
@@ -303,6 +474,24 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
@@ -512,142 +701,27 @@ def _do_patch_libtool(self) -> None:
|
||||
stop_at=stop_at,
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@property
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
# Handle the case where the configure directory is set to a non-absolute path
|
||||
# Non-absolute paths are always relative to the staging source path
|
||||
build_dir = self.configure_directory
|
||||
if not os.path.isabs(build_dir):
|
||||
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
||||
return build_dir
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
if os.path.exists(self.configure_abs_path):
|
||||
# If .la files are to be installed there's nothing to do
|
||||
if self.install_libtool_archives:
|
||||
return
|
||||
|
||||
# Else try to regenerate it, which requires a few build dependencies
|
||||
ensure_build_dependencies_or_raise(
|
||||
spec=spec,
|
||||
dependencies=["autoconf", "automake", "libtool"],
|
||||
error_msg="Cannot generate configure",
|
||||
)
|
||||
# Remove the files and create a log of what was removed
|
||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||
with fs.safe_remove(*libtool_files):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
tty.msg("Configure script not found: trying to generate it")
|
||||
tty.warn("*********************************************************")
|
||||
tty.warn("* If the default procedure fails, consider implementing *")
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
options = getattr(self.pkg, "configure_flag_args", [])
|
||||
options += ["--prefix={0}".format(prefix)]
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
def _activate_or_not(
|
||||
self,
|
||||
@@ -769,93 +843,6 @@ def _default_generator(is_activated):
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
This function works on all type of variants. For bool-valued variants
|
||||
it will return by default ``--with-{name}`` or ``--without-{name}``.
|
||||
For other kinds of variants it will cycle over the allowed values and
|
||||
return either ``--with-{value}`` or ``--without-{value}``.
|
||||
|
||||
If activation_value is given, then for each possible value of the
|
||||
variant, the option ``--with-{value}=activation_value(value)`` or
|
||||
``--without-{value}`` will be added depending on whether or not
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
list of arguments to configure
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
# If .la files are to be installed there's nothing to do
|
||||
if self.install_libtool_archives:
|
||||
return
|
||||
|
||||
# Remove the files and create a log of what was removed
|
||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||
with fs.safe_remove(*libtool_files):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||
# Many configure files rely on matching '10.*' for macOS version
|
||||
# detection and fail to add flags if it shows as version 11.
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -330,7 +332,9 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, pkg, spec, prefix):
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -81,12 +83,16 @@ def check_args(self):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
@@ -454,10 +454,7 @@ def cmake_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
@@ -474,10 +471,7 @@ def cmake(
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -488,10 +482,7 @@ def build(
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
@@ -48,3 +50,8 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(
|
||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -88,12 +90,16 @@ def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -55,7 +57,9 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(self, pkg, spec, prefix):
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -66,15 +70,16 @@ def unpack(self, pkg, spec, prefix):
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
if d.package.extends(self.pkg.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
@@ -85,23 +90,26 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def preprocess(self, pkg, spec, prefix):
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
||||
@@ -98,29 +98,20 @@ def build_directory(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
@@ -58,16 +60,20 @@ def build_args(self):
|
||||
"""List of args to pass to build phase."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Compile code and package into a JAR file."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
mvn = which("mvn")
|
||||
mvn = which("mvn", required=True)
|
||||
if self.pkg.run_tests:
|
||||
mvn("verify", *self.build_args())
|
||||
else:
|
||||
mvn("package", "-DskipTests", *self.build_args())
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy to installation prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree(".", prefix)
|
||||
|
||||
@@ -188,10 +188,7 @@ def meson_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
@@ -204,10 +201,7 @@ def meson(
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
@@ -216,10 +210,7 @@ def build(
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -99,7 +101,9 @@ def msbuild_install_args(self):
|
||||
as `msbuild_args` by default."""
|
||||
return self.msbuild_args()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
@@ -108,7 +112,9 @@ def build(self, pkg, spec, prefix):
|
||||
self.define_targets(*self.build_targets),
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -123,7 +125,9 @@ def nmake_install_args(self):
|
||||
Individual packages should override to specify NMake args to command line"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
@@ -132,7 +136,9 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
opts = self.std_nmake_args
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -42,7 +44,9 @@ class OctaveBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
@@ -149,7 +151,9 @@ def configure_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
@@ -173,7 +177,9 @@ def fix_shebang(self):
|
||||
repl = "#!/usr/bin/env perl"
|
||||
filter_file(pattern, repl, "Build", backup=False)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Builds a Perl package."""
|
||||
self.build_executable()
|
||||
|
||||
@@ -184,6 +190,8 @@ def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
self.build_executable("test")
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs a Perl package."""
|
||||
self.build_executable("install")
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
|
||||
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -62,17 +64,23 @@ def qmake_args(self):
|
||||
"""List of arguments passed to qmake."""
|
||||
return []
|
||||
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
def qmake(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
@@ -74,18 +76,22 @@ def build_directory(self):
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with fs.working_dir(self.build_directory):
|
||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
name = pkg.racket_name
|
||||
assert name is not None, "Racket package name is not set"
|
||||
args = [
|
||||
"pkg",
|
||||
"install",
|
||||
"-t",
|
||||
"dir",
|
||||
"-n",
|
||||
self.pkg.racket_name,
|
||||
name,
|
||||
"--deps",
|
||||
"fail",
|
||||
"--ignore-implies",
|
||||
@@ -101,8 +107,7 @@ def install(self, pkg, spec, prefix):
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
msg = (
|
||||
"Racket package {0} was already installed, uninstalling via "
|
||||
tty.warn(
|
||||
f"Racket package {name} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!"
|
||||
)
|
||||
tty.warn(msg.format(self.pkg.racket_name))
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -42,7 +44,9 @@ class RubyBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build a Ruby gem."""
|
||||
|
||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||
@@ -58,7 +62,9 @@ def build(self, pkg, spec, prefix):
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install a Ruby gem.
|
||||
|
||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -59,7 +61,9 @@ def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
|
||||
@@ -67,7 +71,9 @@ def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
@@ -130,7 +132,9 @@ class SIPBuilder(BuilderWithDefaults):
|
||||
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
@@ -148,7 +152,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -159,7 +165,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
|
||||
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
@@ -97,7 +99,9 @@ def waf(self, *args, **kwargs):
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configures the project."""
|
||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||
args += self.configure_args()
|
||||
@@ -108,7 +112,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Executes the build."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -118,7 +124,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs the targets on the system."""
|
||||
args = self.install_args()
|
||||
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.spec_lookup
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -212,8 +211,7 @@ def _concretize_spec_pairs(
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete
|
||||
or (abstract if abstract.concrete else spack.spec_lookup.lookup_hash(abstract))
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.solver.asp as asp
|
||||
import spack.spec_lookup
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
@@ -211,7 +210,7 @@ def diff(parser, args):
|
||||
specs = []
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
# If the spec has a hash, check it before disambiguating
|
||||
spack.spec_lookup.replace_hash(spec)
|
||||
spec.replace_hash()
|
||||
if spec.concrete:
|
||||
specs.append(spec)
|
||||
else:
|
||||
|
||||
@@ -199,12 +199,10 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
||||
the packages in the list, if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
from spack.solver.asp import Solver, SpecBuilder
|
||||
from spack.spec_lookup import replace_hash
|
||||
|
||||
if isinstance(spec, str):
|
||||
spec = Spec(spec)
|
||||
|
||||
replace_hash(spec)
|
||||
spec = spec.lookup_hash()
|
||||
|
||||
if spec.concrete:
|
||||
return spec.copy()
|
||||
|
||||
@@ -123,6 +123,15 @@
|
||||
"deprecated_for",
|
||||
)
|
||||
|
||||
#: File where the database is written
|
||||
INDEX_JSON_FILE = "index.json"
|
||||
|
||||
# Verifier file to check last modification of the DB
|
||||
_INDEX_VERIFIER_FILE = "index_verifier"
|
||||
|
||||
# Lockfile for the database
|
||||
_LOCK_FILE = "lock"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _getfqdn():
|
||||
@@ -260,7 +269,7 @@ class ForbiddenLockError(SpackError):
|
||||
|
||||
class ForbiddenLock:
|
||||
def __getattr__(self, name):
|
||||
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
||||
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
||||
|
||||
def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
@@ -589,9 +598,9 @@ def __init__(
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = self.database_directory / "index.json"
|
||||
self._verifier_path = self.database_directory / "index_verifier"
|
||||
self._lock_path = self.database_directory / "lock"
|
||||
self._index_path = self.database_directory / INDEX_JSON_FILE
|
||||
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
||||
self._lock_path = self.database_directory / _LOCK_FILE
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
@@ -606,7 +615,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -1090,7 +1099,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||
tty.warn(f"upstream not found: {self._index_path}")
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
||||
@@ -202,10 +202,3 @@ class MirrorError(SpackError):
|
||||
|
||||
def __init__(self, msg, long_msg=None):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class InvalidHashError(SpecError):
|
||||
def __init__(self, spec, hash):
|
||||
msg = f"No spec with hash {hash} could be found to match {spec}."
|
||||
msg += " Either the hash does not exist, or it does not match other spec constraints."
|
||||
super().__init__(msg)
|
||||
|
||||
@@ -39,7 +39,6 @@
|
||||
import spack.repo
|
||||
import spack.solver.splicing
|
||||
import spack.spec
|
||||
import spack.spec_lookup
|
||||
import spack.store
|
||||
import spack.util.crypto
|
||||
import spack.util.libc
|
||||
@@ -3775,7 +3774,7 @@ def execute_explicit_splices(self):
|
||||
|
||||
# The first iteration, we need to replace the abstract hash
|
||||
if not replacement.concrete:
|
||||
spack.spec_lookup.replace_hash(replacement)
|
||||
replacement.replace_hash()
|
||||
current_spec = current_spec.splice(replacement, transitive)
|
||||
new_key = NodeArgument(id=key.id, pkg=current_spec.name)
|
||||
specs[new_key] = current_spec
|
||||
@@ -4134,7 +4133,7 @@ def solve_with_stats(
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
specs = [spack.spec_lookup.lookup_hash(s) for s in specs]
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
@@ -4171,7 +4170,7 @@ def solve_in_rounds(
|
||||
tests (bool): add test dependencies to the solve
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
specs = [spack.spec_lookup.lookup_hash(s) for s in specs]
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
@@ -106,6 +106,8 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
"Spec",
|
||||
@@ -126,6 +128,8 @@
|
||||
"UnsatisfiableArchitectureSpecError",
|
||||
"UnsatisfiableProviderSpecError",
|
||||
"UnsatisfiableDependencySpecError",
|
||||
"AmbiguousHashError",
|
||||
"InvalidHashError",
|
||||
"SpecDeprecatedError",
|
||||
]
|
||||
|
||||
@@ -2166,6 +2170,66 @@ def process_hash_bit_prefix(self, bits):
|
||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||
return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
|
||||
|
||||
def _lookup_hash(self):
|
||||
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
|
||||
store, or finally, binary caches."""
|
||||
import spack.binary_distribution
|
||||
import spack.environment
|
||||
|
||||
active_env = spack.environment.active_environment()
|
||||
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(self) if active_env else [])
|
||||
or spack.store.STORE.db.query(self, installed=InstallRecordStatus.ANY)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||
)
|
||||
|
||||
if not matches:
|
||||
raise InvalidHashError(self, self.abstract_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError(
|
||||
f"Multiple packages specify hash beginning '{self.abstract_hash}'.", *matches
|
||||
)
|
||||
|
||||
return matches[0]
|
||||
|
||||
def lookup_hash(self):
|
||||
"""Given a spec with an abstract hash, return a copy of the spec with all properties and
|
||||
dependencies by looking up the hash in the environment, store, or finally, binary caches.
|
||||
This is non-destructive."""
|
||||
if self.concrete or not any(node.abstract_hash for node in self.traverse()):
|
||||
return self
|
||||
|
||||
spec = self.copy(deps=False)
|
||||
# root spec is replaced
|
||||
if spec.abstract_hash:
|
||||
spec._dup(self._lookup_hash())
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
spec._add_dependency(node._lookup_hash(), depflag=0, virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), depflag=0, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
def replace_hash(self):
|
||||
"""Given a spec with an abstract hash, attempt to populate all properties and dependencies
|
||||
by looking up the hash in the environment, store, or finally, binary caches.
|
||||
This is destructive."""
|
||||
|
||||
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
||||
return
|
||||
|
||||
self._dup(self.lookup_hash())
|
||||
|
||||
def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary representing the state of this Spec.
|
||||
|
||||
@@ -3068,7 +3132,7 @@ def constrain(self, other, deps=True):
|
||||
if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
|
||||
self.abstract_hash = other.abstract_hash
|
||||
elif not self.abstract_hash.startswith(other.abstract_hash):
|
||||
raise spack.error.InvalidHashError(self, other.abstract_hash)
|
||||
raise InvalidHashError(self, other.abstract_hash)
|
||||
|
||||
if not (self.name == other.name or (not self.name) or (not other.name)):
|
||||
raise UnsatisfiableSpecNameError(self.name, other.name)
|
||||
@@ -5275,6 +5339,21 @@ def __init__(self, spec):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class AmbiguousHashError(spack.error.SpecError):
|
||||
def __init__(self, msg, *specs):
|
||||
spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
|
||||
spec_fmt += "{variants}{ arch=architecture}{/hash:7}"
|
||||
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
|
||||
super().__init__(msg + specs_str)
|
||||
|
||||
|
||||
class InvalidHashError(spack.error.SpecError):
|
||||
def __init__(self, spec, hash):
|
||||
msg = f"No spec with hash {hash} could be found to match {spec}."
|
||||
msg += " Either the hash does not exist, or it does not match other spec constraints."
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class SpecFilenameError(spack.error.SpecError):
|
||||
"""Raised when a spec file name is invalid."""
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
from typing import List
|
||||
|
||||
import spack.spec
|
||||
import spack.spec_lookup
|
||||
import spack.variant
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -231,7 +230,7 @@ def _expand_matrix_constraints(matrix_config):
|
||||
pass
|
||||
|
||||
# Resolve abstract hashes for exclusion criteria
|
||||
if any(spack.spec_lookup.lookup_hash(test_spec).satisfies(x) for x in excludes):
|
||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.binary_distribution
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.spec
|
||||
import spack.store
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
|
||||
def _lookup_hash(spec: spack.spec.Spec):
|
||||
"""Lookup just one spec with an abstract hash, returning a spec from the the environment,
|
||||
store, or finally, binary caches."""
|
||||
|
||||
active_env = spack.environment.active_environment()
|
||||
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(spec) if active_env else [])
|
||||
or spack.store.STORE.db.query(spec, installed=InstallRecordStatus.ANY)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(spec)
|
||||
)
|
||||
|
||||
if not matches:
|
||||
raise spack.error.InvalidHashError(spec, spec.abstract_hash)
|
||||
|
||||
if len(matches) != 1:
|
||||
raise AmbiguousHashError(
|
||||
f"Multiple packages specify hash beginning '{spec.abstract_hash}'.", *matches
|
||||
)
|
||||
|
||||
return matches[0]
|
||||
|
||||
|
||||
def lookup_hash(spec: spack.spec.Spec) -> spack.spec.Spec:
|
||||
"""Given a spec with an abstract hash, return a copy of the spec with all properties and
|
||||
dependencies by looking up the hash in the environment, store, or finally, binary caches.
|
||||
This is non-destructive."""
|
||||
if spec.concrete or not any(node.abstract_hash for node in spec.traverse()):
|
||||
return spec
|
||||
|
||||
spec = spec.copy(deps=False)
|
||||
# root spec is replaced
|
||||
if spec.abstract_hash:
|
||||
spec._dup(_lookup_hash(spec))
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in spec.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
spec._add_dependency(_lookup_hash(node), depflag=0, virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in spec.traverse(root=False):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), depflag=0, virtuals=())
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def replace_hash(spec: spack.spec.Spec) -> None:
|
||||
"""Given a spec with an abstract hash, attempt to populate all properties and dependencies
|
||||
by looking up the hash in the environment, store, or finally, binary caches.
|
||||
This is destructive."""
|
||||
|
||||
if not any(node for node in spec.traverse(order="post") if node.abstract_hash):
|
||||
return
|
||||
|
||||
spec._dup(lookup_hash(spec))
|
||||
|
||||
|
||||
class AmbiguousHashError(spack.error.SpecError):
|
||||
def __init__(self, msg, *specs):
|
||||
spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
|
||||
spec_fmt += "{variants}{ arch=architecture}{/hash:7}"
|
||||
specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
|
||||
super().__init__(msg + specs_str)
|
||||
@@ -42,7 +42,8 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError
|
||||
from spack.binary_distribution import INDEX_HASH_FILE, CannotListKeys, GenerateIndexError
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
@@ -606,7 +607,7 @@ def test_etag_fetching_304():
|
||||
# handled as success, since it means the local cache is up-to-date.
|
||||
def response_304(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
raise urllib.error.HTTPError(
|
||||
url, 304, "Not Modified", hdrs={}, fp=None # type: ignore[arg-type]
|
||||
@@ -628,7 +629,7 @@ def test_etag_fetching_200():
|
||||
# Test conditional fetch with etags. The remote has modified the file.
|
||||
def response_200(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url == "https://www.example.com/build_cache/index.json":
|
||||
if url == f"https://www.example.com/build_cache/{INDEX_JSON_FILE}":
|
||||
assert request.get_header("If-none-match") == '"112a8bbc1b3f7f185621c1ee335f0502"'
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(b"Result"),
|
||||
@@ -679,7 +680,7 @@ def test_default_index_fetch_200():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl( # type: ignore[arg-type]
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -687,7 +688,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@@ -718,7 +719,7 @@ def test_default_index_dont_fetch_index_json_hash_if_no_local_hash():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json"):
|
||||
if url.endswith(INDEX_JSON_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json.encode()),
|
||||
headers={"Etag": '"59bcc3ad6775562f845953cf01624225"'}, # type: ignore[arg-type]
|
||||
@@ -747,7 +748,7 @@ def test_default_index_not_modified():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -792,7 +793,7 @@ def test_default_index_json_404():
|
||||
|
||||
def urlopen(request: urllib.request.Request):
|
||||
url = request.get_full_url()
|
||||
if url.endswith("index.json.hash"):
|
||||
if url.endswith(INDEX_HASH_FILE):
|
||||
return urllib.response.addinfourl(
|
||||
io.BytesIO(index_json_hash.encode()),
|
||||
headers={}, # type: ignore[arg-type]
|
||||
@@ -800,7 +801,7 @@ def urlopen(request: urllib.request.Request):
|
||||
code=200,
|
||||
)
|
||||
|
||||
elif url.endswith("index.json"):
|
||||
elif url.endswith(INDEX_JSON_FILE):
|
||||
raise urllib.error.HTTPError(
|
||||
url,
|
||||
code=404,
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
from spack.ci.common import PipelineDag, PipelineOptions, SpackCIConfig
|
||||
from spack.ci.generator_registry import generator
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
@@ -847,7 +848,7 @@ def test_push_to_build_cache(
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd("update-index", mirror_url)
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as idx_fd:
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
jsonschema.validate(index_object, db_idx_schema)
|
||||
|
||||
@@ -1065,7 +1066,7 @@ def test_ci_rebuild_index(
|
||||
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
with open(mirror_dir / "build_cache" / "index.json", encoding="utf-8") as f:
|
||||
with open(mirror_dir / "build_cache" / INDEX_JSON_FILE, encoding="utf-8") as f:
|
||||
jsonschema.validate(json.load(f), db_idx_schema)
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import spack
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
from spack.database import INDEX_JSON_FILE
|
||||
from spack.main import SpackCommand
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -36,7 +37,7 @@ def test_create_db_tarball(tmpdir, database):
|
||||
contents = tar("tzf", tarball_name, output=str)
|
||||
|
||||
# DB file is included
|
||||
assert "index.json" in contents
|
||||
assert INDEX_JSON_FILE in contents
|
||||
|
||||
# specfiles from all installs are included
|
||||
for spec in database.query():
|
||||
|
||||
@@ -476,8 +476,8 @@ def test_default_queries(database):
|
||||
|
||||
def test_005_db_exists(database):
|
||||
"""Make sure db cache file exists after creating."""
|
||||
index_file = os.path.join(database.root, ".spack-db", "index.json")
|
||||
lock_file = os.path.join(database.root, ".spack-db", "lock")
|
||||
index_file = os.path.join(database.root, ".spack-db", spack.database.INDEX_JSON_FILE)
|
||||
lock_file = os.path.join(database.root, ".spack-db", spack.database._LOCK_FILE)
|
||||
assert os.path.exists(str(index_file))
|
||||
# Lockfiles not currently supported on Windows
|
||||
if sys.platform != "win32":
|
||||
@@ -982,7 +982,7 @@ def test_database_works_with_empty_dir(tmpdir):
|
||||
# Create the lockfile and failures directory otherwise
|
||||
# we'll get a permission error on Database creation
|
||||
db_dir = tmpdir.ensure_dir(".spack-db")
|
||||
db_dir.ensure("lock")
|
||||
db_dir.ensure(spack.database._LOCK_FILE)
|
||||
db_dir.ensure_dir("failures")
|
||||
tmpdir.chmod(mode=0o555)
|
||||
db = spack.database.Database(str(tmpdir))
|
||||
|
||||
@@ -427,9 +427,9 @@ def test_mismatched_constrain_spec_by_hash(self, default_mock_concretization, da
|
||||
"""Test that Specs specified only by their incompatible hashes fail appropriately."""
|
||||
lhs = "/" + database.query_one("callpath ^mpich").dag_hash()
|
||||
rhs = "/" + database.query_one("callpath ^mpich2").dag_hash()
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
Spec(lhs).constrain(Spec(rhs))
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
Spec(lhs[:7]).constrain(Spec(rhs))
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -11,11 +11,9 @@
|
||||
import spack.binary_distribution
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.error
|
||||
import spack.platforms.test
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.spec_lookup import AmbiguousHashError, lookup_hash, replace_hash
|
||||
from spack.spec_parser import (
|
||||
UNIX_FILENAME,
|
||||
WINDOWS_FILENAME,
|
||||
@@ -28,7 +26,7 @@
|
||||
|
||||
FAIL_ON_WINDOWS = pytest.mark.xfail(
|
||||
sys.platform == "win32",
|
||||
raises=(SpecTokenizationError, spack.error.InvalidHashError),
|
||||
raises=(SpecTokenizationError, spack.spec.InvalidHashError),
|
||||
reason="Unix style path on Windows",
|
||||
)
|
||||
|
||||
@@ -784,22 +782,22 @@ def test_spec_by_hash(database, monkeypatch, config):
|
||||
|
||||
hash_str = f"/{mpileaks.dag_hash()}"
|
||||
parsed_spec = SpecParser(hash_str).next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
assert parsed_spec == mpileaks
|
||||
|
||||
short_hash_str = f"/{mpileaks.dag_hash()[:5]}"
|
||||
parsed_spec = SpecParser(short_hash_str).next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
assert parsed_spec == mpileaks
|
||||
|
||||
name_version_and_hash = f"{mpileaks.name}@{mpileaks.version} /{mpileaks.dag_hash()[:5]}"
|
||||
parsed_spec = SpecParser(name_version_and_hash).next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
assert parsed_spec == mpileaks
|
||||
|
||||
b_hash = f"/{b.dag_hash()}"
|
||||
parsed_spec = SpecParser(b_hash).next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
assert parsed_spec == b
|
||||
|
||||
|
||||
@@ -813,7 +811,7 @@ def test_dep_spec_by_hash(database, config):
|
||||
assert "zmpi" in mpileaks_zmpi
|
||||
|
||||
mpileaks_hash_fake = SpecParser(f"mpileaks ^/{fake.dag_hash()} ^zmpi").next_spec()
|
||||
replace_hash(mpileaks_hash_fake)
|
||||
mpileaks_hash_fake.replace_hash()
|
||||
assert "fake" in mpileaks_hash_fake
|
||||
assert mpileaks_hash_fake["fake"] == fake
|
||||
assert "zmpi" in mpileaks_hash_fake
|
||||
@@ -822,7 +820,7 @@ def test_dep_spec_by_hash(database, config):
|
||||
mpileaks_hash_zmpi = SpecParser(
|
||||
f"mpileaks %{mpileaks_zmpi.compiler} ^ /{zmpi.dag_hash()}"
|
||||
).next_spec()
|
||||
replace_hash(mpileaks_hash_zmpi)
|
||||
mpileaks_hash_zmpi.replace_hash()
|
||||
assert "zmpi" in mpileaks_hash_zmpi
|
||||
assert mpileaks_hash_zmpi["zmpi"] == zmpi
|
||||
assert mpileaks_zmpi.compiler.satisfies(mpileaks_hash_zmpi.compiler)
|
||||
@@ -830,7 +828,7 @@ def test_dep_spec_by_hash(database, config):
|
||||
mpileaks_hash_fake_and_zmpi = SpecParser(
|
||||
f"mpileaks ^/{fake.dag_hash()[:4]} ^ /{zmpi.dag_hash()[:5]}"
|
||||
).next_spec()
|
||||
replace_hash(mpileaks_hash_fake_and_zmpi)
|
||||
mpileaks_hash_fake_and_zmpi.replace_hash()
|
||||
assert "zmpi" in mpileaks_hash_fake_and_zmpi
|
||||
assert mpileaks_hash_fake_and_zmpi["zmpi"] == zmpi
|
||||
|
||||
@@ -890,13 +888,13 @@ def test_ambiguous_hash(mutable_database):
|
||||
|
||||
# ambiguity in first hash character
|
||||
s1 = SpecParser("/x").next_spec()
|
||||
with pytest.raises(AmbiguousHashError):
|
||||
lookup_hash(s1)
|
||||
with pytest.raises(spack.spec.AmbiguousHashError):
|
||||
s1.lookup_hash()
|
||||
|
||||
# ambiguity in first hash character AND spec name
|
||||
s2 = SpecParser("pkg-a/x").next_spec()
|
||||
with pytest.raises(AmbiguousHashError):
|
||||
lookup_hash(s2)
|
||||
with pytest.raises(spack.spec.AmbiguousHashError):
|
||||
s2.lookup_hash()
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@@ -905,24 +903,24 @@ def test_invalid_hash(database, config):
|
||||
mpich = database.query_one("mpich")
|
||||
|
||||
# name + incompatible hash
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
parsed_spec = SpecParser(f"zmpi /{mpich.dag_hash()}").next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
parsed_spec.replace_hash()
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
parsed_spec = SpecParser(f"mpich /{zmpi.dag_hash()}").next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
|
||||
# name + dep + incompatible hash
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
parsed_spec = SpecParser(f"mpileaks ^zmpi /{mpich.dag_hash()}").next_spec()
|
||||
replace_hash(parsed_spec)
|
||||
parsed_spec.replace_hash()
|
||||
|
||||
|
||||
def test_invalid_hash_dep(database, config):
|
||||
mpich = database.query_one("mpich")
|
||||
hash = mpich.dag_hash()
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
replace_hash(spack.spec.Spec(f"callpath ^zlib/{hash}"))
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
spack.spec.Spec(f"callpath ^zlib/{hash}").replace_hash()
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
@@ -935,8 +933,9 @@ def test_nonexistent_hash(database, config):
|
||||
hashes = [s._hash for s in specs]
|
||||
assert no_such_hash not in [h[: len(no_such_hash)] for h in hashes]
|
||||
|
||||
with pytest.raises(spack.error.InvalidHashError):
|
||||
replace_hash(SpecParser(f"/{no_such_hash}").next_spec())
|
||||
with pytest.raises(spack.spec.InvalidHashError):
|
||||
parsed_spec = SpecParser(f"/{no_such_hash}").next_spec()
|
||||
parsed_spec.replace_hash()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -967,7 +966,7 @@ def test_disambiguate_hash_by_spec(spec1, spec2, constraint, mock_packages, monk
|
||||
else:
|
||||
spec = spack.spec.Spec("/spec" + constraint)
|
||||
|
||||
assert lookup_hash(spec) == spec1_concrete
|
||||
assert spec.lookup_hash() == spec1_concrete
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -17,6 +17,8 @@ class Fmt(CMakePackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("11.1.2", sha256="ef54df1d4ba28519e31bf179f6a4fb5851d684c328ca051ce5da1b52bf8b1641")
|
||||
version("11.1.1", sha256="a25124e41c15c290b214c4dec588385153c91b47198dbacda6babce27edc4b45")
|
||||
version("11.0.2", sha256="40fc58bebcf38c759e11a7bd8fdc163507d2423ef5058bba7f26280c5b9c5465")
|
||||
version("11.0.1", sha256="62ca45531814109b5d6cef0cf2fd17db92c32a30dd23012976e768c685534814")
|
||||
version("11.0.0", sha256="583ce480ef07fad76ef86e1e2a639fc231c3daa86c4aa6bcba524ce908f30699")
|
||||
|
||||
@@ -13,6 +13,7 @@ class Spdlog(CMakePackage):
|
||||
|
||||
license("MIT")
|
||||
|
||||
version("1.15.0", sha256="9962648c9b4f1a7bbc76fd8d9172555bad1871fdb14ff4f842ef87949682caa5")
|
||||
version("1.14.1", sha256="1586508029a7d0670dfcb2d97575dcdc242d3868a259742b69f100801ab4e16b")
|
||||
version("1.13.0", sha256="534f2ee1a4dcbeb22249856edfb2be76a1cf4f708a20b0ac2ed090ee24cfdbc9")
|
||||
version("1.12.0", sha256="4dccf2d10f410c1e2feaff89966bfc49a1abb29ef6f08246335b110e001e09a9")
|
||||
@@ -69,6 +70,20 @@ class Spdlog(CMakePackage):
|
||||
when="@1.11.0 ^fmt@10:",
|
||||
)
|
||||
|
||||
# spdlog@1.15.0 with fmt@11 https://github.com/gabime/spdlog/pull/3314
|
||||
# (with https://github.com/gabime/spdlog/pull/3301 as a prerequisite)
|
||||
patch(
|
||||
"https://github.com/gabime/spdlog/commit/276ee5f5c0eb13626bd367b006ace5eae9526d8a.patch?full_index=1",
|
||||
sha256="fd4cbb10a795a03c7182a4070056c2b004d47b120a86e1958ff82316627bb565",
|
||||
when="@1.13.0:1.15.0",
|
||||
)
|
||||
patch(
|
||||
"https://github.com/gabime/spdlog/commit/96a8f6250cbf4e8c76387c614f666710a2fa9bad.patch?full_index=1",
|
||||
sha256="5ed92f4c131fd31eb3d28390615ecff3ade3789cdecfd3db18cadb07cc8095e3",
|
||||
when="@1.13.0:1.15.0",
|
||||
)
|
||||
conflicts("^fmt@11.1:", when="@:1.12")
|
||||
|
||||
def cmake_args(self):
|
||||
args = []
|
||||
|
||||
|
||||
Reference in New Issue
Block a user